diff --git a/hapi-deployable-pom/pom.xml b/hapi-deployable-pom/pom.xml
index 4a49d458c1b..1cb6dade030 100644
--- a/hapi-deployable-pom/pom.xml
+++ b/hapi-deployable-pom/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-android/pom.xml b/hapi-fhir-android/pom.xml
index 7f35ad22728..0efd9f8ca41 100644
--- a/hapi-fhir-android/pom.xml
+++ b/hapi-fhir-android/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-base/pom.xml b/hapi-fhir-base/pom.xml
index 4fe0c871f41..28cb9278b5b 100644
--- a/hapi-fhir-base/pom.xml
+++ b/hapi-fhir-base/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java
index f66e24024de..f8aff921214 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java
@@ -1101,10 +1101,47 @@ public enum Pointcut implements IPointcut {
*/
STORAGE_INITIATE_BULK_EXPORT(
void.class,
- "ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions",
+ "ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters",
"ca.uhn.fhir.rest.api.server.RequestDetails",
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
),
+
+
+ /**
+ * Storage Hook:
+ * Invoked when a Bulk Export job is being processed. If any hook method is registered
+ * for this pointcut, the hook method will be called once for each resource that is
+ * loaded for inclusion in a bulk export file. Hook methods may modify
+ * the resource object and this modification will affect the copy that is stored in the
+ * bulk export data file (but will not affect the original). Hook methods may also
+ * return false
in order to request that the resource be filtered
+ * from the export.
+ *
+ * Hooks may accept the following parameters:
+ *
+ *
+ * -
+ * ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters - The details of the job being kicked off
+ *
+ * -
+ *org.hl7.fhir.instance.model.api.IBaseResource - The resource that will be included in the file
+ *
+ *
+ *
+ * Hooks methods may return false
to indicate that the resource should be
+ * filtered out. Otherwise, hook methods should return true
.
+ *
+ *
+ * @since 6.8.0
+ */
+ STORAGE_BULK_EXPORT_RESOURCE_INCLUSION(
+ boolean.class,
+ "ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters",
+ "org.hl7.fhir.instance.model.api.IBaseResource"
+ ),
+
+
+
/**
* Storage Hook:
* Invoked when a set of resources are about to be deleted and expunged via url like http://localhost/Patient?active=false&_expunge=true
diff --git a/hapi-fhir-bom/pom.xml b/hapi-fhir-bom/pom.xml
index bb11b3b1b72..50e8712adcd 100644
--- a/hapi-fhir-bom/pom.xml
+++ b/hapi-fhir-bom/pom.xml
@@ -4,7 +4,7 @@
4.0.0
ca.uhn.hapi.fhir
hapi-fhir-bom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
pom
HAPI FHIR BOM
@@ -12,7 +12,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-checkstyle/pom.xml b/hapi-fhir-checkstyle/pom.xml
index 58414fba4a3..22f5f082e30 100644
--- a/hapi-fhir-checkstyle/pom.xml
+++ b/hapi-fhir-checkstyle/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml
index 98fbf8c4e19..19bf9659f3d 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml
+++ b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml
index 1999f2de722..81eee9f6772 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml
+++ b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-fhir-cli
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-cli/pom.xml b/hapi-fhir-cli/pom.xml
index 5f5c3be5933..da5d153d45c 100644
--- a/hapi-fhir-cli/pom.xml
+++ b/hapi-fhir-cli/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-client-okhttp/pom.xml b/hapi-fhir-client-okhttp/pom.xml
index 9a18f892ecb..763719d6bd7 100644
--- a/hapi-fhir-client-okhttp/pom.xml
+++ b/hapi-fhir-client-okhttp/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-client/pom.xml b/hapi-fhir-client/pom.xml
index 55336e9422f..755417b00fb 100644
--- a/hapi-fhir-client/pom.xml
+++ b/hapi-fhir-client/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-converter/pom.xml b/hapi-fhir-converter/pom.xml
index 42e74fbdc29..62f1628c37f 100644
--- a/hapi-fhir-converter/pom.xml
+++ b/hapi-fhir-converter/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-dist/pom.xml b/hapi-fhir-dist/pom.xml
index 07b32c66870..d9d7469beca 100644
--- a/hapi-fhir-dist/pom.xml
+++ b/hapi-fhir-dist/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-docs/pom.xml b/hapi-fhir-docs/pom.xml
index 0ba709cbfb0..9cc90e735dc 100644
--- a/hapi-fhir-docs/pom.xml
+++ b/hapi-fhir-docs/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_0/5039-add-bulk-export-pointcut.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_0/5039-add-bulk-export-pointcut.yaml
new file mode 100644
index 00000000000..71a8f6e89f6
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_0/5039-add-bulk-export-pointcut.yaml
@@ -0,0 +1,6 @@
+---
+type: change
+issue: 5039
+title: "A new interceptor pointcut called `STORAGE_BULK_EXPORT_RESOURCE_INCLUSION` has been added. This
+ pointcut is called for every resource that will be included in a bulk export data file. Hook methods
+ may modify the resources or signal that they should be filtered out."
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_0/5039-bulk-export-partitioning-improvement.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_0/5039-bulk-export-partitioning-improvement.yaml
new file mode 100644
index 00000000000..11ee55c6ea4
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_0/5039-bulk-export-partitioning-improvement.yaml
@@ -0,0 +1,5 @@
+---
+type: change
+issue: 5039
+title: "Bulk export fetch operations now properly account for the partition being searched, which
+ should improve performance in some cases on partitioned systems."
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_0/5039-rename-bulkdataexportoptions.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_0/5039-rename-bulkdataexportoptions.yaml
new file mode 100644
index 00000000000..c9232a50591
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_8_0/5039-rename-bulkdataexportoptions.yaml
@@ -0,0 +1,7 @@
+---
+type: change
+issue: 5039
+title: "The class `BulkDataExportOptions` has been removed and replaced with an equivalent class called
+ `BulkExportJobParameters`. This change is a breaking change unfortunately, but should be easily remedied
+ as the replacement class has a very similar signature. This change reduces a large amount of duplication
+ in the bulk export codebase and should make for more maintainable code in the future."
diff --git a/hapi-fhir-jacoco/pom.xml b/hapi-fhir-jacoco/pom.xml
index 202953a5de7..868f234c188 100644
--- a/hapi-fhir-jacoco/pom.xml
+++ b/hapi-fhir-jacoco/pom.xml
@@ -11,7 +11,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jaxrsserver-base/pom.xml b/hapi-fhir-jaxrsserver-base/pom.xml
index 58034af7af4..96ec64644b8 100644
--- a/hapi-fhir-jaxrsserver-base/pom.xml
+++ b/hapi-fhir-jaxrsserver-base/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpa/pom.xml b/hapi-fhir-jpa/pom.xml
index 841e628a215..28051fcafc3 100644
--- a/hapi-fhir-jpa/pom.xml
+++ b/hapi-fhir-jpa/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/nickname/INicknameSvc.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/nickname/INicknameSvc.java
index fce906290e9..bfa5a73e00b 100644
--- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/nickname/INicknameSvc.java
+++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/nickname/INicknameSvc.java
@@ -1,3 +1,22 @@
+/*-
+ * #%L
+ * hapi-fhir-jpa
+ * %%
+ * Copyright (C) 2014 - 2023 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
package ca.uhn.fhir.jpa.nickname;
import org.springframework.core.io.Resource;
diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml
index 5857335d415..143d26279da 100644
--- a/hapi-fhir-jpaserver-base/pom.xml
+++ b/hapi-fhir-jpaserver-base/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/JpaBulkExportProcessor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/JpaBulkExportProcessor.java
index f5b5c784201..4532062cdee 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/JpaBulkExportProcessor.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/JpaBulkExportProcessor.java
@@ -48,7 +48,7 @@ import ca.uhn.fhir.mdm.model.MdmPidTuple;
import ca.uhn.fhir.model.api.Include;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
-import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import ca.uhn.fhir.rest.param.HasOrListParam;
import ca.uhn.fhir.rest.param.HasParam;
import ca.uhn.fhir.rest.param.ReferenceOrListParam;
@@ -125,7 +125,8 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor {
@Override
public Iterator getResourcePidIterator(ExportPIDIteratorParameters theParams) {
return myHapiTransactionService
- .withRequest(null)
+ .withSystemRequest()
+ .withRequestPartitionId(theParams.getPartitionIdOrAllPartitions())
.readOnly()
.execute(() -> {
String resourceType = theParams.getResourceType();
@@ -134,9 +135,9 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor {
RuntimeResourceDefinition def = myContext.getResourceDefinition(resourceType);
LinkedHashSet pids;
- if (theParams.getExportStyle() == BulkDataExportOptions.ExportStyle.PATIENT) {
+ if (theParams.getExportStyle() == BulkExportJobParameters.ExportStyle.PATIENT) {
pids = getPidsForPatientStyleExport(theParams, resourceType, jobId, chunkId, def);
- } else if (theParams.getExportStyle() == BulkDataExportOptions.ExportStyle.GROUP) {
+ } else if (theParams.getExportStyle() == BulkExportJobParameters.ExportStyle.GROUP) {
pids = getPidsForGroupStyleExport(theParams, resourceType, def);
} else {
pids = getPidsForSystemStyleExport(theParams, jobId, chunkId, def);
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java
index 5fa3044f0c3..7681d957657 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java
@@ -37,7 +37,7 @@ import ca.uhn.fhir.jpa.api.svc.ISearchUrlJobMaintenanceSvc;
import ca.uhn.fhir.jpa.binary.interceptor.BinaryStorageInterceptor;
import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
-import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
+import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider;
import ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportJobSchedulingHelperImpl;
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportHelperService;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportJobDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportJobDao.java
deleted file mode 100644
index 3b7cf5fa5dc..00000000000
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportJobDao.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * #%L
- * HAPI FHIR JPA Server
- * %%
- * Copyright (C) 2014 - 2023 Smile CDR, Inc.
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-package ca.uhn.fhir.jpa.dao.data;
-
-import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
-import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
-import org.springframework.data.domain.Pageable;
-import org.springframework.data.domain.Slice;
-import org.springframework.data.jpa.repository.JpaRepository;
-import org.springframework.data.jpa.repository.Modifying;
-import org.springframework.data.jpa.repository.Query;
-import org.springframework.data.repository.query.Param;
-
-import java.util.Date;
-import java.util.Optional;
-
-public interface IBulkExportJobDao extends JpaRepository, IHapiFhirJpaRepository {
-
- @Modifying
- @Query("DELETE FROM BulkExportJobEntity t WHERE t.myId = :pid")
- void deleteByPid(@Param("pid") Long theId);
-}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java
index 0f9caeac028..9271f5247bf 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java
@@ -19,8 +19,6 @@
*/
package ca.uhn.fhir.jpa.entity;
-import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
-import ca.uhn.fhir.rest.api.Constants;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.hl7.fhir.r5.model.InstantType;
@@ -80,9 +78,8 @@ public class BulkExportJobEntity implements Serializable {
@Column(name = JOB_ID, length = UUID_LENGTH, nullable = false)
private String myJobId;
- @Enumerated(EnumType.STRING)
@Column(name = "JOB_STATUS", length = 10, nullable = false)
- private BulkExportJobStatusEnum myStatus;
+ private String myStatus;
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "CREATED_TIME", nullable = false)
private Date myCreated;
@@ -168,11 +165,11 @@ public class BulkExportJobEntity implements Serializable {
return b.toString();
}
- public BulkExportJobStatusEnum getStatus() {
+ public String getStatus() {
return myStatus;
}
- public void setStatus(BulkExportJobStatusEnum theStatus) {
+ public void setStatus(String theStatus) {
if (myStatus != theStatus) {
myStatusTime = new Date();
myStatus = theStatus;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/NpmPackageMetadataJson.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/NpmPackageMetadataJson.java
index 91ea49776b2..7631aa96451 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/NpmPackageMetadataJson.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/NpmPackageMetadataJson.java
@@ -19,8 +19,8 @@
*/
package ca.uhn.fhir.jpa.packages;
-import ca.uhn.fhir.jpa.util.JsonDateDeserializer;
-import ca.uhn.fhir.jpa.util.JsonDateSerializer;
+import ca.uhn.fhir.rest.server.util.JsonDateDeserializer;
+import ca.uhn.fhir.rest.server.util.JsonDateSerializer;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/reindex/Batch2DaoSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/reindex/Batch2DaoSvcImpl.java
index 9758f5c87ab..f802f8235a5 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/reindex/Batch2DaoSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/reindex/Batch2DaoSvcImpl.java
@@ -32,25 +32,25 @@ import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
-import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.SortOrderEnum;
import ca.uhn.fhir.rest.api.SortSpec;
+import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.util.DateRangeUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
-import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Date;
import java.util.List;
import java.util.stream.Collectors;
+
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
public class Batch2DaoSvcImpl implements IBatch2DaoSvc {
@@ -80,7 +80,7 @@ public class Batch2DaoSvcImpl implements IBatch2DaoSvc {
return myTransactionService
.withSystemRequest()
.withRequestPartitionId(theRequestPartitionId)
- .execute(()->{
+ .execute(() -> {
if (theUrl == null) {
return fetchResourceIdsPageNoUrl(theStart, theEnd, thePageSize, theRequestPartitionId);
} else {
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/export/svc/JpaBulkExportProcessorTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/export/svc/JpaBulkExportProcessorTest.java
index 76411e19c55..0382404fb38 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/export/svc/JpaBulkExportProcessorTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/export/svc/JpaBulkExportProcessorTest.java
@@ -25,7 +25,7 @@ import ca.uhn.fhir.mdm.model.MdmPidTuple;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
-import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Group;
@@ -145,11 +145,11 @@ public class JpaBulkExportProcessorTest {
@InjectMocks
private JpaBulkExportProcessor myProcessor;
- private ExportPIDIteratorParameters createExportParameters(BulkDataExportOptions.ExportStyle theExportStyle) {
+ private ExportPIDIteratorParameters createExportParameters(BulkExportJobParameters.ExportStyle theExportStyle) {
ExportPIDIteratorParameters parameters = new ExportPIDIteratorParameters();
parameters.setInstanceId("instanceId");
parameters.setExportStyle(theExportStyle);
- if (theExportStyle == BulkDataExportOptions.ExportStyle.GROUP) {
+ if (theExportStyle == BulkExportJobParameters.ExportStyle.GROUP) {
parameters.setGroupId("123");
}
parameters.setStartDate(new Date());
@@ -176,7 +176,7 @@ public class JpaBulkExportProcessorTest {
@ValueSource(booleans = {true, false})
public void getResourcePidIterator_paramsWithPatientExportStyle_returnsAnIterator(boolean thePartitioned) {
// setup
- ExportPIDIteratorParameters parameters = createExportParameters(BulkDataExportOptions.ExportStyle.PATIENT);
+ ExportPIDIteratorParameters parameters = createExportParameters(BulkExportJobParameters.ExportStyle.PATIENT);
parameters.setResourceType("Patient");
parameters.setPartitionId(getPartitionIdFromParams(thePartitioned));
@@ -242,7 +242,7 @@ public class JpaBulkExportProcessorTest {
@Test
public void getResourcePidIterator_patientStyleWithIndexMissingFieldsDisabled_throws() {
// setup
- ExportPIDIteratorParameters parameters = createExportParameters(BulkDataExportOptions.ExportStyle.PATIENT);
+ ExportPIDIteratorParameters parameters = createExportParameters(BulkExportJobParameters.ExportStyle.PATIENT);
parameters.setResourceType("Patient");
// when
@@ -262,7 +262,7 @@ public class JpaBulkExportProcessorTest {
@CsvSource({"false, false", "false, true", "true, true", "true, false"})
public void getResourcePidIterator_groupExportStyleWithPatientResource_returnsIterator(boolean theMdm, boolean thePartitioned) {
// setup
- ExportPIDIteratorParameters parameters = createExportParameters(BulkDataExportOptions.ExportStyle.GROUP);
+ ExportPIDIteratorParameters parameters = createExportParameters(BulkExportJobParameters.ExportStyle.GROUP);
parameters.setResourceType("Patient");
JpaPid groupId = JpaPid.fromId(Long.parseLong(parameters.getGroupId()));
@@ -370,7 +370,7 @@ public class JpaBulkExportProcessorTest {
@SuppressWarnings({"rawtypes", "unchecked"})
public void getResourcePidIterator_groupExportStyleWithNonPatientResource_returnsIterator(boolean theMdm, boolean thePartitioned) {
// setup
- ExportPIDIteratorParameters parameters = createExportParameters(BulkDataExportOptions.ExportStyle.GROUP);
+ ExportPIDIteratorParameters parameters = createExportParameters(BulkExportJobParameters.ExportStyle.GROUP);
parameters.setResourceType("Observation");
JpaPid groupId = JpaPid.fromId(Long.parseLong(parameters.getGroupId()));
@@ -484,7 +484,7 @@ public class JpaBulkExportProcessorTest {
@ValueSource(booleans = {true, false})
public void getResourcePidIterator_systemExport_returnsIterator(boolean thePartitioned) {
// setup
- ExportPIDIteratorParameters parameters = createExportParameters(BulkDataExportOptions.ExportStyle.SYSTEM);
+ ExportPIDIteratorParameters parameters = createExportParameters(BulkExportJobParameters.ExportStyle.SYSTEM);
parameters.setResourceType("Patient");
parameters.setPartitionId(getPartitionIdFromParams(thePartitioned));
@@ -540,7 +540,7 @@ public class JpaBulkExportProcessorTest {
@ValueSource(booleans = {true, false})
public void getResourcePidIterator_groupExportStyleWithGroupResource_returnsAnIterator(boolean thePartitioned) {
// setup
- ExportPIDIteratorParameters parameters = createExportParameters(BulkDataExportOptions.ExportStyle.GROUP);
+ ExportPIDIteratorParameters parameters = createExportParameters(BulkExportJobParameters.ExportStyle.GROUP);
parameters.setResourceType("Group");
parameters.setPartitionId(getPartitionIdFromParams(thePartitioned));
diff --git a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml
index 6cb7bebe474..eb2bddd6851 100644
--- a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml
+++ b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/bulk/BulkGroupExportWithIndexedSearchParametersTest.java b/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/bulk/BulkGroupExportWithIndexedSearchParametersTest.java
index 1d1c7e54144..e65146124e4 100644
--- a/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/bulk/BulkGroupExportWithIndexedSearchParametersTest.java
+++ b/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/bulk/BulkGroupExportWithIndexedSearchParametersTest.java
@@ -1,19 +1,19 @@
package ca.uhn.fhir.jpa.bulk;
+import ca.uhn.fhir.batch2.api.IJobCoordinator;
+import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
-import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
import ca.uhn.fhir.jpa.test.BaseJpaTest;
import ca.uhn.fhir.jpa.test.config.TestHSearchAddInConfig;
import ca.uhn.fhir.jpa.test.config.TestR4Config;
-import ca.uhn.fhir.jpa.util.BulkExportUtils;
import ca.uhn.fhir.rest.api.Constants;
-import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
+import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
import ca.uhn.fhir.util.JsonUtil;
import org.hl7.fhir.r4.model.Bundle;
-import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.Meta;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@@ -45,12 +45,13 @@ import static org.junit.jupiter.api.Assertions.assertNotNull;
public class BulkGroupExportWithIndexedSearchParametersTest extends BaseJpaTest {
private final FhirContext myCtx = FhirContext.forR4Cached();
- @Autowired private IBatch2JobRunner myJobRunner;
@Autowired private PlatformTransactionManager myTxManager;
@Autowired
@Qualifier("mySystemDaoR4")
protected IFhirSystemDao mySystemDao;
+ @Autowired
+ private IJobCoordinator myJobCoordinator;
@BeforeEach
void setUp() {
@@ -72,28 +73,32 @@ public class BulkGroupExportWithIndexedSearchParametersTest extends BaseJpaTest
mySystemDao.transaction(mySrd, inputBundle);
// set the export options
- BulkDataExportOptions options = new BulkDataExportOptions();
+ BulkExportJobParameters options = new BulkExportJobParameters();
options.setResourceTypes(Set.of("Patient", "Observation", "Group"));
- options.setGroupId(new IdType("Group", "G1"));
- options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
+ options.setGroupId("Group/G1");
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
BulkExportJobResults jobResults = getBulkExportJobResults(options);
assertThat(jobResults.getResourceTypeToBinaryIds().keySet(), containsInAnyOrder("Patient", "Observation", "Group"));
}
- private BulkExportJobResults getBulkExportJobResults(BulkDataExportOptions theOptions) {
- Batch2JobStartResponse startResponse = myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(theOptions));
+ private BulkExportJobResults getBulkExportJobResults(BulkExportJobParameters theOptions) {
+ JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
+ startRequest.setJobDefinitionId(Batch2JobDefinitionConstants.BULK_EXPORT);
+ startRequest.setParameters(theOptions);
+
+ Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(mySrd, startRequest);
assertNotNull(startResponse);
// Run a scheduled pass to build the export
myBatch2JobHelper.awaitJobCompletion(startResponse.getInstanceId());
- await().until(() -> myJobRunner.getJobInfo(startResponse.getInstanceId()).getReport() != null);
+ await().until(() -> myJobCoordinator.getInstance(startResponse.getInstanceId()).getReport() != null);
// Iterate over the files
- String report = myJobRunner.getJobInfo(startResponse.getInstanceId()).getReport();
+ String report = myJobCoordinator.getInstance(startResponse.getInstanceId()).getReport();
return JsonUtil.deserialize(report, BulkExportJobResults.class);
}
diff --git a/hapi-fhir-jpaserver-ips/pom.xml b/hapi-fhir-jpaserver-ips/pom.xml
index df7bb1a3034..192463f223a 100644
--- a/hapi-fhir-jpaserver-ips/pom.xml
+++ b/hapi-fhir-jpaserver-ips/pom.xml
@@ -3,7 +3,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-mdm/pom.xml b/hapi-fhir-jpaserver-mdm/pom.xml
index 3188bb0bb53..a7cd792d99b 100644
--- a/hapi-fhir-jpaserver-mdm/pom.xml
+++ b/hapi-fhir-jpaserver-mdm/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-model/pom.xml b/hapi-fhir-jpaserver-model/pom.xml
index ca9da3c82a9..30d4bbc8535 100644
--- a/hapi-fhir-jpaserver-model/pom.xml
+++ b/hapi-fhir-jpaserver-model/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-searchparam/pom.xml b/hapi-fhir-jpaserver-searchparam/pom.xml
index 7439a507544..d990497c790 100755
--- a/hapi-fhir-jpaserver-searchparam/pom.xml
+++ b/hapi-fhir-jpaserver-searchparam/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-subscription/pom.xml b/hapi-fhir-jpaserver-subscription/pom.xml
index 34b6d752c56..52278f93941 100644
--- a/hapi-fhir-jpaserver-subscription/pom.xml
+++ b/hapi-fhir-jpaserver-subscription/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-dstu2/pom.xml b/hapi-fhir-jpaserver-test-dstu2/pom.xml
index 762a9fb0ae4..7d9336e47d9 100644
--- a/hapi-fhir-jpaserver-test-dstu2/pom.xml
+++ b/hapi-fhir-jpaserver-test-dstu2/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-dstu3/pom.xml b/hapi-fhir-jpaserver-test-dstu3/pom.xml
index b3539fbf087..7866d1fa40d 100644
--- a/hapi-fhir-jpaserver-test-dstu3/pom.xml
+++ b/hapi-fhir-jpaserver-test-dstu3/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-r4/pom.xml b/hapi-fhir-jpaserver-test-r4/pom.xml
index d856255b5e9..550c85e0885 100644
--- a/hapi-fhir-jpaserver-test-r4/pom.xml
+++ b/hapi-fhir-jpaserver-test-r4/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/BulkDataErrorAbuseTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/BulkDataErrorAbuseTest.java
index 91461742c67..b8b16b4670f 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/BulkDataErrorAbuseTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/BulkDataErrorAbuseTest.java
@@ -1,17 +1,17 @@
package ca.uhn.fhir.jpa.batch2;
+import ca.uhn.fhir.batch2.api.IJobCoordinator;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
+import ca.uhn.fhir.batch2.model.JobInstance;
+import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
-import ca.uhn.fhir.jpa.api.model.Batch2JobInfo;
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
-import ca.uhn.fhir.jpa.api.model.BulkExportParameters;
-import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
import ca.uhn.fhir.jpa.test.config.TestR4Config;
-import ca.uhn.fhir.jpa.util.BulkExportUtils;
import ca.uhn.fhir.rest.api.Constants;
-import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
+import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
import ca.uhn.fhir.util.JsonUtil;
import com.google.common.collect.Sets;
import org.hl7.fhir.r4.model.Binary;
@@ -34,7 +34,6 @@ import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
-import java.util.Objects;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ExecutionException;
@@ -61,7 +60,7 @@ public class BulkDataErrorAbuseTest extends BaseResourceProviderR4Test {
private static final Logger ourLog = LoggerFactory.getLogger(BulkDataErrorAbuseTest.class);
@Autowired
- private IBatch2JobRunner myJobRunner;
+ private IJobCoordinator myJobCoordinator;
@BeforeEach
void beforeEach() {
@@ -122,11 +121,11 @@ public class BulkDataErrorAbuseTest extends BaseResourceProviderR4Test {
myClient.update().resource(group).execute();
// set the export options
- BulkDataExportOptions options = new BulkDataExportOptions();
+ BulkExportJobParameters options = new BulkExportJobParameters();
options.setResourceTypes(Sets.newHashSet("Patient"));
- options.setGroupId(new IdType("Group", "G2"));
+ options.setGroupId("Group/G2");
options.setFilters(new HashSet<>());
- options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
BlockingQueue workQueue = new LinkedBlockingQueue<>();
@@ -184,7 +183,7 @@ public class BulkDataErrorAbuseTest extends BaseResourceProviderR4Test {
private void verifyBulkExportResults(String theInstanceId, List theContainedList, List theExcludedList) {
// Iterate over the files
- Batch2JobInfo jobInfo = myJobRunner.getJobInfo(theInstanceId);
+ JobInstance jobInfo = myJobCoordinator.getInstance(theInstanceId);
String report = jobInfo.getReport();
ourLog.debug("Export job {} report: {}", theInstanceId, report);
if (!theContainedList.isEmpty()) {
@@ -237,10 +236,12 @@ public class BulkDataErrorAbuseTest extends BaseResourceProviderR4Test {
ourLog.info("Job {} ok", theInstanceId);
}
- private String startJob(BulkDataExportOptions theOptions) {
- BulkExportParameters startRequest = BulkExportUtils.createBulkExportJobParametersFromExportOptions(theOptions);
- startRequest.setUseExistingJobsFirst(false);
- Batch2JobStartResponse startResponse = myJobRunner.startNewJob(null, startRequest);
+ private String startJob(BulkExportJobParameters theOptions) {
+ JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
+ startRequest.setJobDefinitionId(Batch2JobDefinitionConstants.BULK_EXPORT);
+ startRequest.setUseCache(false);
+ startRequest.setParameters(theOptions);
+ Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(mySrd, startRequest);
assertNotNull(startResponse);
return startResponse.getInstanceId();
}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java
index 3d5261b60c1..4b354894d7d 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java
@@ -1,25 +1,25 @@
package ca.uhn.fhir.jpa.bulk;
+import ca.uhn.fhir.batch2.api.IJobCoordinator;
+import ca.uhn.fhir.batch2.api.JobOperationResultJson;
+import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider;
+import ca.uhn.fhir.batch2.model.JobInstance;
+import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
+import ca.uhn.fhir.batch2.model.StatusEnum;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
-import ca.uhn.fhir.jpa.api.model.Batch2JobInfo;
-import ca.uhn.fhir.jpa.api.model.Batch2JobOperationResult;
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
-import ca.uhn.fhir.jpa.api.model.BulkExportParameters;
-import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
-import ca.uhn.fhir.jpa.batch.models.Batch2BaseJobParameters;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
-import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson;
-import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.RequestDetails;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import ca.uhn.fhir.rest.client.apache.ResourceEntity;
import ca.uhn.fhir.rest.server.HardcodedServerAddressStrategy;
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
@@ -66,15 +66,16 @@ import java.util.Set;
import java.util.stream.Stream;
import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
-import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.junit.jupiter.api.Assertions.assertAll;
import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
@@ -82,7 +83,6 @@ import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.isNotNull;
import static org.mockito.Mockito.eq;
import static org.mockito.Mockito.lenient;
-import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@@ -98,10 +98,13 @@ public class BulkDataExportProviderTest {
private final FhirContext myCtx = FhirContext.forR4Cached();
@RegisterExtension
private final HttpClientExtension myClient = new HttpClientExtension();
- @Mock
- private IBatch2JobRunner myJobRunner;
+ private final RequestPartitionId myRequestPartitionId = RequestPartitionId.fromPartitionIdAndName(123, "Partition-A");
+ private final String myPartitionName = "Partition-A";
+ private final String myFixedBaseUrl = "http:/myfixedbaseurl.com";
@Mock
IFhirResourceDao myFhirResourceDao;
+ @Mock
+ IJobCoordinator myJobCoordinator;
@InjectMocks
private BulkDataExportProvider myProvider;
@RegisterExtension
@@ -109,44 +112,18 @@ public class BulkDataExportProviderTest {
.withServer(s -> s.registerProvider(myProvider));
@Spy
private RequestPartitionHelperSvc myRequestPartitionHelperSvc = new MyRequestPartitionHelperSvc();
-
private JpaStorageSettings myStorageSettings;
-
- private final RequestPartitionId myRequestPartitionId = RequestPartitionId.fromPartitionIdAndName(123, "Partition-A");
-
- private final String myPartitionName = "Partition-A";
-
- private final String myFixedBaseUrl = "http:/myfixedbaseurl.com";
-
- private class MyRequestPartitionHelperSvc extends RequestPartitionHelperSvc {
- @Override
- public @NotNull RequestPartitionId determineReadPartitionForRequest(RequestDetails theRequest, ReadPartitionIdRequestDetails theDetails) {
- assert theRequest != null;
- if (myPartitionName.equals(theRequest.getTenantId())) {
- return myRequestPartitionId;
- } else {
- return RequestPartitionId.fromPartitionName(theRequest.getTenantId());
- }
- }
-
- @Override
- public void validateHasPartitionPermissions(RequestDetails theRequest, String theResourceType, RequestPartitionId theRequestPartitionId) {
- if (!myPartitionName.equals(theRequest.getTenantId()) && theRequest.getTenantId() != null) {
- throw new ForbiddenOperationException("User does not have access to resources on the requested partition");
- }
- }
-
- }
+ @Mock
+ private DaoRegistry myDaoRegistry;
@BeforeEach
public void injectStorageSettings() {
myStorageSettings = new JpaStorageSettings();
myProvider.setStorageSettings(myStorageSettings);
- DaoRegistry daoRegistry = mock(DaoRegistry.class);
- lenient().when(daoRegistry.getRegisteredDaoTypes()).thenReturn(Set.of("Patient", "Observation", "Encounter"));
+ lenient().when(myDaoRegistry.getRegisteredDaoTypes()).thenReturn(Set.of("Patient", "Observation", "Encounter", "Group", "Device", "DiagnosticReport"));
- lenient().when(daoRegistry.getResourceDao(anyString())).thenReturn(myFhirResourceDao);
- myProvider.setDaoRegistry(daoRegistry);
+ lenient().when(myDaoRegistry.getResourceDao(anyString())).thenReturn(myFhirResourceDao);
+ myProvider.setDaoRegistry(myDaoRegistry);
}
@@ -159,12 +136,15 @@ public class BulkDataExportProviderTest {
myServer.getRestfulServer().setTenantIdentificationStrategy(new UrlBaseTenantIdentificationStrategy());
}
- private BulkExportParameters verifyJobStart() {
- ArgumentCaptor startJobCaptor = ArgumentCaptor.forClass(Batch2BaseJobParameters.class);
- verify(myJobRunner).startNewJob(isNotNull(), startJobCaptor.capture());
- Batch2BaseJobParameters sp = startJobCaptor.getValue();
- assertTrue(sp instanceof BulkExportParameters);
- return (BulkExportParameters) sp;
+ private JobInstanceStartRequest verifyJobStart() {
+ ArgumentCaptor startJobCaptor = ArgumentCaptor.forClass(JobInstanceStartRequest.class);
+ verify(myJobCoordinator).startInstance(isNotNull(), startJobCaptor.capture());
+ JobInstanceStartRequest sp = startJobCaptor.getValue();
+ return sp;
+ }
+
+ private BulkExportJobParameters verifyJobStartAndReturnParameters() {
+ return verifyJobStart().getParameters(BulkExportJobParameters.class);
}
private Batch2JobStartResponse createJobStartResponse(String theJobId) {
@@ -198,8 +178,7 @@ public class BulkDataExportProviderTest {
String practitionerResource = "Practitioner";
String filter = "Patient?identifier=foo";
String postFetchFilter = "Patient?_tag=foo";
- when(myJobRunner.startNewJob(isNotNull(), any()))
- .thenReturn(createJobStartResponse());
+ when(myJobCoordinator.startInstance(isNotNull(), any())).thenReturn(createJobStartResponse());
InstantType now = InstantType.now();
@@ -229,7 +208,7 @@ public class BulkDataExportProviderTest {
baseUrl = myServer.getBaseUrl();
}
- if(partitioningEnabled) {
+ if (partitioningEnabled) {
baseUrl = baseUrl + "/" + myPartitionName;
}
@@ -238,7 +217,7 @@ public class BulkDataExportProviderTest {
assertEquals(baseUrl + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
}
- BulkExportParameters params = verifyJobStart();
+ BulkExportJobParameters params = verifyJobStartAndReturnParameters();
assertEquals(2, params.getResourceTypes().size());
assertTrue(params.getResourceTypes().contains(patientResource));
assertTrue(params.getResourceTypes().contains(practitionerResource));
@@ -250,7 +229,7 @@ public class BulkDataExportProviderTest {
@Test
public void testOmittingOutputFormatDefaultsToNdjson() throws IOException {
- when(myJobRunner.startNewJob(isNotNull(), any()))
+ when(myJobCoordinator.startInstance(isNotNull(), any()))
.thenReturn(createJobStartResponse());
Parameters input = new Parameters();
@@ -262,7 +241,7 @@ public class BulkDataExportProviderTest {
assertEquals(202, response.getStatusLine().getStatusCode());
}
- BulkExportParameters params = verifyJobStart();
+ BulkExportJobParameters params = verifyJobStartAndReturnParameters();
assertEquals(Constants.CT_FHIR_NDJSON, params.getOutputFormat());
@@ -271,7 +250,7 @@ public class BulkDataExportProviderTest {
@ParameterizedTest
@MethodSource("paramsProvider")
public void testSuccessfulInitiateBulkRequest_GetWithPartitioning(boolean partitioningEnabled) throws IOException {
- when(myJobRunner.startNewJob(isNotNull(), any())).thenReturn(createJobStartResponse());
+ when(myJobCoordinator.startInstance(isNotNull(), any())).thenReturn(createJobStartResponse());
InstantType now = InstantType.now();
@@ -299,7 +278,7 @@ public class BulkDataExportProviderTest {
assertEquals(myBaseUrl + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
}
- BulkExportParameters params = verifyJobStart();
+ BulkExportJobParameters params = verifyJobStartAndReturnParameters();
assertEquals(Constants.CT_FHIR_NDJSON, params.getOutputFormat());
assertThat(params.getResourceTypes(), containsInAnyOrder("Patient", "Practitioner"));
assertThat(params.getSince(), notNullValue());
@@ -308,7 +287,7 @@ public class BulkDataExportProviderTest {
@Test
public void testSuccessfulInitiateBulkRequest_Get_MultipleTypeFilters() throws IOException {
- when(myJobRunner.startNewJob(isNotNull(), any()))
+ when(myJobCoordinator.startInstance(isNotNull(), any()))
.thenReturn(createJobStartResponse());
String url = myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT
@@ -328,7 +307,7 @@ public class BulkDataExportProviderTest {
assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
}
- BulkExportParameters params = verifyJobStart();
+ BulkExportJobParameters params = verifyJobStartAndReturnParameters();
assertEquals(Constants.CT_FHIR_NDJSON, params.getOutputFormat());
assertThat(params.getResourceTypes(), containsInAnyOrder("Patient", "EpisodeOfCare"));
assertThat(params.getSince(), nullValue());
@@ -338,13 +317,16 @@ public class BulkDataExportProviderTest {
@Test
public void testPollForStatus_QUEUED() throws IOException {
// setup
- Batch2JobInfo info = new Batch2JobInfo();
- info.setJobId(A_JOB_ID);
- info.setStatus(BulkExportJobStatusEnum.BUILDING);
+ JobInstance info = new JobInstance();
+ info.setInstanceId(A_JOB_ID);
+ info.setStatus(StatusEnum.QUEUED);
info.setEndTime(new Date());
+ BulkExportJobParameters parameters = new BulkExportJobParameters();
+ info.setParameters(parameters);
+
// when
- when(myJobRunner.getJobInfo(eq(A_JOB_ID)))
+ when(myJobCoordinator.getInstance(eq(A_JOB_ID)))
.thenReturn(info);
// test
@@ -364,16 +346,19 @@ public class BulkDataExportProviderTest {
}
@Test
- public void testPollForStatus_ERROR() throws IOException {
+ public void testPollForStatus_Failed() throws IOException {
// setup
- Batch2JobInfo info = new Batch2JobInfo();
- info.setJobId(A_JOB_ID);
- info.setStatus(BulkExportJobStatusEnum.ERROR);
+ JobInstance info = new JobInstance();
+ info.setInstanceId(A_JOB_ID);
+ info.setStatus(StatusEnum.FAILED);
info.setStartTime(new Date());
- info.setErrorMsg("Some Error Message");
+ info.setErrorMessage("Some Error Message");
+
+ BulkExportJobParameters parameters = new BulkExportJobParameters();
+ info.setParameters(parameters);
// when
- when(myJobRunner.getJobInfo(eq(A_JOB_ID)))
+ when(myJobCoordinator.getInstance(eq(A_JOB_ID)))
.thenReturn(info);
// call
@@ -410,9 +395,9 @@ public class BulkDataExportProviderTest {
myBaseUriForExport = myServer.getBaseUrl();
}
- Batch2JobInfo info = new Batch2JobInfo();
- info.setJobId(A_JOB_ID);
- info.setStatus(BulkExportJobStatusEnum.COMPLETE);
+ JobInstance info = new JobInstance();
+ info.setInstanceId(A_JOB_ID);
+ info.setStatus(StatusEnum.COMPLETED);
info.setEndTime(InstantType.now().getValue());
ArrayList ids = new ArrayList<>();
ids.add(new IdType("Binary/111").getValueAsString());
@@ -424,12 +409,15 @@ public class BulkDataExportProviderTest {
map.put("Patient", ids);
results.setResourceTypeToBinaryIds(map);
info.setReport(JsonUtil.serialize(results));
+
+ BulkExportJobParameters parameters = new BulkExportJobParameters();
if (partitioningEnabled) {
- info.setRequestPartitionId(myRequestPartitionId);
+ parameters.setPartitionId(myRequestPartitionId);
}
+ info.setParameters(parameters);
// when
- when(myJobRunner.getJobInfo(eq(A_JOB_ID)))
+ when(myJobCoordinator.getInstance(eq(A_JOB_ID)))
.thenReturn(info);
// call
@@ -450,7 +438,7 @@ public class BulkDataExportProviderTest {
}
if (partitioningEnabled) {
// If partitioning is enabled, then the URLs in the poll response should also have the partition name.
- myBaseUriForPoll = myBaseUriForPoll + "/"+ myPartitionName;
+ myBaseUriForPoll = myBaseUriForPoll + "/" + myPartitionName;
}
assertEquals(200, response.getStatusLine().getStatusCode());
@@ -476,11 +464,15 @@ public class BulkDataExportProviderTest {
// setup
enablePartitioning();
- Batch2JobInfo info = new Batch2JobInfo();
- info.setJobId(A_JOB_ID);
- info.setStatus(BulkExportJobStatusEnum.COMPLETE);
+ JobInstance info = new JobInstance();
+ info.setInstanceId(A_JOB_ID);
+ info.setStatus(StatusEnum.COMPLETED);
info.setEndTime(InstantType.now().getValue());
- info.setRequestPartitionId(myRequestPartitionId);
+
+ BulkExportJobParameters parameters = new BulkExportJobParameters();
+ parameters.setPartitionId(myRequestPartitionId);
+ info.setParameters(parameters);
+
ArrayList ids = new ArrayList<>();
ids.add(new IdType("Binary/111").getValueAsString());
ids.add(new IdType("Binary/222").getValueAsString());
@@ -493,7 +485,7 @@ public class BulkDataExportProviderTest {
info.setReport(JsonUtil.serialize(results));
// when
- when(myJobRunner.getJobInfo(eq(A_JOB_ID)))
+ when(myJobCoordinator.getInstance(eq(A_JOB_ID)))
.thenReturn(info);
// call
@@ -514,10 +506,14 @@ public class BulkDataExportProviderTest {
public void testExportWhenNoResourcesReturned() throws IOException {
// setup
String msg = "Some msg";
- Batch2JobInfo info = new Batch2JobInfo();
- info.setJobId(A_JOB_ID);
- info.setStatus(BulkExportJobStatusEnum.COMPLETE);
+ JobInstance info = new JobInstance();
+ info.setInstanceId(A_JOB_ID);
+ info.setStatus(StatusEnum.COMPLETED);
info.setEndTime(InstantType.now().getValue());
+
+ BulkExportJobParameters parameters = new BulkExportJobParameters();
+ info.setParameters(parameters);
+
ArrayList ids = new ArrayList<>();
BulkExportJobResults results = new BulkExportJobResults();
HashMap> map = new HashMap<>();
@@ -527,7 +523,7 @@ public class BulkDataExportProviderTest {
info.setReport(JsonUtil.serialize(results));
// when
- when(myJobRunner.getJobInfo(eq(A_JOB_ID)))
+ when(myJobCoordinator.getInstance(eq(A_JOB_ID)))
.thenReturn(info);
// test
@@ -554,7 +550,7 @@ public class BulkDataExportProviderTest {
// setup
// when
- when(myJobRunner.getJobInfo(anyString()))
+ when(myJobCoordinator.getInstance(anyString()))
.thenThrow(new ResourceNotFoundException("Unknown job: AAA"));
String url = myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" +
@@ -584,7 +580,7 @@ public class BulkDataExportProviderTest {
@Test
public void testSuccessfulInitiateGroupBulkRequest_Post() throws IOException {
// when
- when(myJobRunner.startNewJob(isNotNull(), any()))
+ when(myJobCoordinator.startInstance(isNotNull(), any()))
.thenReturn(createJobStartResponse(G_JOB_ID));
InstantType now = InstantType.now();
@@ -612,7 +608,7 @@ public class BulkDataExportProviderTest {
}
// verify
- BulkExportParameters bp = verifyJobStart();
+ BulkExportJobParameters bp = verifyJobStartAndReturnParameters();
assertEquals(Constants.CT_FHIR_NDJSON, bp.getOutputFormat());
assertThat(bp.getResourceTypes(), containsInAnyOrder("Observation", "DiagnosticReport"));
@@ -625,7 +621,7 @@ public class BulkDataExportProviderTest {
@Test
public void testSuccessfulInitiateGroupBulkRequest_Get() throws IOException {
// when
- when(myJobRunner.startNewJob(isNotNull(), any())).thenReturn(createJobStartResponse(G_JOB_ID));
+ when(myJobCoordinator.startInstance(isNotNull(), any())).thenReturn(createJobStartResponse(G_JOB_ID));
InstantType now = InstantType.now();
@@ -648,7 +644,7 @@ public class BulkDataExportProviderTest {
assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + G_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
}
- BulkExportParameters bp = verifyJobStart();
+ BulkExportJobParameters bp = verifyJobStartAndReturnParameters();
assertEquals(Constants.CT_FHIR_NDJSON, bp.getOutputFormat());
assertThat(bp.getResourceTypes(), containsInAnyOrder("Patient", "Practitioner"));
assertThat(bp.getSince(), notNullValue());
@@ -657,6 +653,40 @@ public class BulkDataExportProviderTest {
assertThat(bp.isExpandMdm(), is(equalTo(true)));
}
+ @Test
+ public void testSuccessfulInitiateGroupBulkRequest_Get_SomeTypesDisabled() throws IOException {
+ // when
+ when(myJobCoordinator.startInstance(isNotNull(), any())).thenReturn(createJobStartResponse(G_JOB_ID));
+
+ InstantType now = InstantType.now();
+
+ String url = myServer.getBaseUrl() + "/" + GROUP_ID + "/" + JpaConstants.OPERATION_EXPORT
+ + "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON)
+ + "&" + JpaConstants.PARAM_EXPORT_SINCE + "=" + UrlUtil.escapeUrlParam(now.getValueAsString());
+
+ // call
+ HttpGet get = new HttpGet(url);
+ get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
+ ourLog.info("Request: {}", url);
+ try (CloseableHttpResponse response = myClient.execute(get)) {
+ ourLog.info("Response: {}", response.toString());
+
+ assertEquals(202, response.getStatusLine().getStatusCode());
+ assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
+ assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + G_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
+ }
+
+ BulkExportJobParameters bp = verifyJobStartAndReturnParameters();
+ assertEquals(Constants.CT_FHIR_NDJSON, bp.getOutputFormat());
+ assertThat(bp.getResourceTypes().toString(), bp.getResourceTypes(), containsInAnyOrder(
+ "DiagnosticReport", "Group", "Observation", "Device", "Patient", "Encounter"
+ ));
+ assertThat(bp.getSince(), notNullValue());
+ assertThat(bp.getFilters(), notNullValue());
+ assertEquals(GROUP_ID, bp.getGroupId());
+ assertThat(bp.isExpandMdm(), is(equalTo(false)));
+ }
+
@Test
public void testInitiateWithGetAndMultipleTypeFilters() throws IOException {
// setup
@@ -687,7 +717,7 @@ public class BulkDataExportProviderTest {
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
try (CloseableHttpResponse ignored = myClient.execute(get)) {
// verify
- BulkExportParameters bp = verifyJobStart();
+ BulkExportJobParameters bp = verifyJobStartAndReturnParameters();
assertThat(bp.getFilters(), containsInAnyOrder(immunizationTypeFilter1, immunizationTypeFilter2, observationFilter1));
}
}
@@ -714,8 +744,7 @@ public class BulkDataExportProviderTest {
@Test
public void testInitiateGroupExportWithNoResourceTypes() throws IOException {
// when
- when(myJobRunner.startNewJob(isNotNull(), any(Batch2BaseJobParameters.class)))
- .thenReturn(createJobStartResponse());
+ when(myJobCoordinator.startInstance(isNotNull(), any())).thenReturn(createJobStartResponse());
// test
String url = myServer.getBaseUrl() + "/" + "Group/123/" + JpaConstants.OPERATION_EXPORT
@@ -727,12 +756,12 @@ public class BulkDataExportProviderTest {
// verify
assertThat(execute.getStatusLine().getStatusCode(), is(equalTo(202)));
- final BulkExportParameters bulkExportParameters = verifyJobStart();
+ final BulkExportJobParameters BulkExportJobParameters = verifyJobStartAndReturnParameters();
assertAll(
- () -> assertTrue(bulkExportParameters.getResourceTypes().contains("Patient")),
- () -> assertTrue(bulkExportParameters.getResourceTypes().contains("Group")),
- () -> assertTrue(bulkExportParameters.getResourceTypes().contains("Device"))
+ () -> assertTrue(BulkExportJobParameters.getResourceTypes().contains("Patient")),
+ () -> assertTrue(BulkExportJobParameters.getResourceTypes().contains("Group")),
+ () -> assertTrue(BulkExportJobParameters.getResourceTypes().contains("Device"))
);
}
}
@@ -740,7 +769,7 @@ public class BulkDataExportProviderTest {
@Test
public void testInitiateWithPostAndMultipleTypeFilters() throws IOException {
// when
- when(myJobRunner.startNewJob(isNotNull(), any())).thenReturn(createJobStartResponse());
+ when(myJobCoordinator.startInstance(isNotNull(), any())).thenReturn(createJobStartResponse());
Parameters input = new Parameters();
input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON));
@@ -763,7 +792,7 @@ public class BulkDataExportProviderTest {
}
// verify
- BulkExportParameters bp = verifyJobStart();
+ BulkExportJobParameters bp = verifyJobStartAndReturnParameters();
assertEquals(Constants.CT_FHIR_NDJSON, bp.getOutputFormat());
assertThat(bp.getResourceTypes(), containsInAnyOrder("Patient"));
assertThat(bp.getFilters(), containsInAnyOrder("Patient?gender=male", "Patient?gender=female"));
@@ -772,7 +801,7 @@ public class BulkDataExportProviderTest {
@Test
public void testInitiateBulkExportOnPatient_noTypeParam_addsTypeBeforeBulkExport() throws IOException {
// when
- when(myJobRunner.startNewJob(isNotNull(), any()))
+ when(myJobCoordinator.startInstance(isNotNull(), any()))
.thenReturn(createJobStartResponse());
Parameters input = new Parameters();
@@ -790,7 +819,7 @@ public class BulkDataExportProviderTest {
assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
}
- BulkExportParameters bp = verifyJobStart();
+ BulkExportJobParameters bp = verifyJobStartAndReturnParameters();
assertEquals(Constants.CT_FHIR_NDJSON, bp.getOutputFormat());
assertThat(bp.getResourceTypes(), containsInAnyOrder("Patient"));
}
@@ -798,7 +827,7 @@ public class BulkDataExportProviderTest {
@Test
public void testInitiatePatientExportRequest() throws IOException {
// when
- when(myJobRunner.startNewJob(isNotNull(), any()))
+ when(myJobCoordinator.startInstance(isNotNull(), any()))
.thenReturn(createJobStartResponse());
InstantType now = InstantType.now();
@@ -823,7 +852,7 @@ public class BulkDataExportProviderTest {
assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
}
- BulkExportParameters bp = verifyJobStart();
+ BulkExportJobParameters bp = verifyJobStartAndReturnParameters();
assertEquals(Constants.CT_FHIR_NDJSON, bp.getOutputFormat());
assertThat(bp.getResourceTypes(), containsInAnyOrder("Immunization", "Observation"));
assertThat(bp.getSince(), notNullValue());
@@ -837,7 +866,7 @@ public class BulkDataExportProviderTest {
startResponse.setUsesCachedResult(true);
// when
- when(myJobRunner.startNewJob(isNotNull(), any(Batch2BaseJobParameters.class)))
+ when(myJobCoordinator.startInstance(isNotNull(), any()))
.thenReturn(startResponse);
Parameters input = new Parameters();
@@ -858,8 +887,8 @@ public class BulkDataExportProviderTest {
}
// verify
- BulkExportParameters parameters = verifyJobStart();
- assertThat(parameters.isUseExistingJobsFirst(), is(equalTo(false)));
+ JobInstanceStartRequest parameters = verifyJobStart();
+ assertFalse(parameters.isUseCache());
}
@Test
@@ -871,7 +900,7 @@ public class BulkDataExportProviderTest {
myStorageSettings.setEnableBulkExportJobReuse(false);
// when
- when(myJobRunner.startNewJob(isNotNull(), any(Batch2BaseJobParameters.class)))
+ when(myJobCoordinator.startInstance(isNotNull(), any()))
.thenReturn(startResponse);
Parameters input = new Parameters();
@@ -891,8 +920,8 @@ public class BulkDataExportProviderTest {
}
// verify
- BulkExportParameters parameters = verifyJobStart();
- assertThat(parameters.isUseExistingJobsFirst(), is(equalTo(false)));
+ JobInstanceStartRequest parameters = verifyJobStart();
+ assertFalse(parameters.isUseCache());
}
@Test
@@ -901,7 +930,7 @@ public class BulkDataExportProviderTest {
Batch2JobStartResponse startResponse = createJobStartResponse();
startResponse.setUsesCachedResult(true);
startResponse.setInstanceId(A_JOB_ID);
- when(myJobRunner.startNewJob(isNotNull(), any(Batch2BaseJobParameters.class)))
+ when(myJobCoordinator.startInstance(isNotNull(), any()))
.thenReturn(startResponse);
// when
@@ -919,22 +948,26 @@ public class BulkDataExportProviderTest {
@MethodSource("paramsProvider")
public void testDeleteForOperationPollStatus_SUBMITTED_ShouldCancelJobSuccessfully(boolean partitioningEnabled) throws IOException {
// setup
- Batch2JobInfo info = new Batch2JobInfo();
- info.setJobId(A_JOB_ID);
- info.setStatus(BulkExportJobStatusEnum.SUBMITTED);
- info.setEndTime(InstantType.now().getValue());
+
+ BulkExportJobParameters parameters = new BulkExportJobParameters();
if (partitioningEnabled) {
- info.setRequestPartitionId(myRequestPartitionId);
+ parameters.setPartitionId(myRequestPartitionId);
}
- Batch2JobOperationResult result = new Batch2JobOperationResult();
+
+ JobInstance info = new JobInstance();
+ info.setParameters(parameters);
+ info.setInstanceId(A_JOB_ID);
+ info.setStatus(StatusEnum.QUEUED);
+ info.setEndTime(InstantType.now().getValue());
+ JobOperationResultJson result = new JobOperationResultJson();
result.setOperation("Cancel job instance " + A_JOB_ID);
result.setMessage("Job instance <" + A_JOB_ID + "> successfully cancelled.");
result.setSuccess(true);
// when
- when(myJobRunner.getJobInfo(eq(A_JOB_ID)))
+ when(myJobCoordinator.getInstance(eq(A_JOB_ID)))
.thenReturn(info);
- when(myJobRunner.cancelInstance(eq(A_JOB_ID)))
+ when(myJobCoordinator.cancelInstance(eq(A_JOB_ID)))
.thenReturn(result);
// call
@@ -955,7 +988,7 @@ public class BulkDataExportProviderTest {
assertEquals(202, response.getStatusLine().getStatusCode());
assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
- verify(myJobRunner, times(1)).cancelInstance(A_JOB_ID);
+ verify(myJobCoordinator, times(1)).cancelInstance(A_JOB_ID);
String responseContent = IOUtils.toString(response.getEntity().getContent(), Charsets.UTF_8);
ourLog.info("Response content: {}", responseContent);
assertThat(responseContent, containsString("successfully cancelled."));
@@ -965,13 +998,16 @@ public class BulkDataExportProviderTest {
@Test
public void testDeleteForOperationPollStatus_COMPLETE_ShouldReturnError() throws IOException {
// setup
- Batch2JobInfo info = new Batch2JobInfo();
- info.setJobId(A_JOB_ID);
- info.setStatus(BulkExportJobStatusEnum.COMPLETE);
+ JobInstance info = new JobInstance();
+ info.setInstanceId(A_JOB_ID);
+ info.setStatus(StatusEnum.COMPLETED);
info.setEndTime(InstantType.now().getValue());
+ BulkExportJobParameters parameters = new BulkExportJobParameters();
+ info.setParameters(parameters);
+
// when
- when(myJobRunner.getJobInfo(eq(A_JOB_ID)))
+ when(myJobCoordinator.getInstance(eq(A_JOB_ID)))
.thenReturn(info);
// call
@@ -984,7 +1020,7 @@ public class BulkDataExportProviderTest {
assertEquals(404, response.getStatusLine().getStatusCode());
assertEquals("Not Found", response.getStatusLine().getReasonPhrase());
- verify(myJobRunner, times(1)).cancelInstance(A_JOB_ID);
+ verify(myJobCoordinator, times(1)).cancelInstance(A_JOB_ID);
String responseContent = IOUtils.toString(response.getEntity().getContent(), Charsets.UTF_8);
// content would be blank, since the job is cancelled, so no
ourLog.info("Response content: {}", responseContent);
@@ -995,7 +1031,7 @@ public class BulkDataExportProviderTest {
@Test
public void testGetBulkExport_outputFormat_FhirNdJson_inHeader() throws IOException {
// when
- when(myJobRunner.startNewJob(isNotNull(), any()))
+ when(myJobCoordinator.startInstance(isNotNull(), any()))
.thenReturn(createJobStartResponse());
// call
@@ -1011,14 +1047,14 @@ public class BulkDataExportProviderTest {
assertTrue(IOUtils.toString(response.getEntity().getContent(), Charsets.UTF_8).isEmpty());
}
- final BulkExportParameters params = verifyJobStart();
+ final BulkExportJobParameters params = verifyJobStartAndReturnParameters();
assertEquals(Constants.CT_FHIR_NDJSON, params.getOutputFormat());
}
@Test
public void testGetBulkExport_outputFormat_FhirNdJson_inUrl() throws IOException {
// when
- when(myJobRunner.startNewJob(isNotNull(), any()))
+ when(myJobCoordinator.startInstance(isNotNull(), any()))
.thenReturn(createJobStartResponse());
// call
@@ -1034,7 +1070,7 @@ public class BulkDataExportProviderTest {
);
}
- final BulkExportParameters params = verifyJobStart();
+ final BulkExportJobParameters params = verifyJobStartAndReturnParameters();
assertEquals(Constants.CT_FHIR_NDJSON, params.getOutputFormat());
}
@@ -1042,6 +1078,8 @@ public class BulkDataExportProviderTest {
public void testOperationExportPollStatus_POST_NonExistingId_NotFound() throws IOException {
String jobId = "NonExisting-JobId";
+ when(myJobCoordinator.getInstance(any())).thenThrow(new ResourceNotFoundException("Unknown"));
+
// Create the initial launch Parameters containing the request
Parameters input = new Parameters();
input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(ca.uhn.fhir.rest.api.Constants.CT_FHIR_NDJSON));
@@ -1063,16 +1101,19 @@ public class BulkDataExportProviderTest {
@MethodSource("paramsProvider")
public void testOperationExportPollStatus_POST_ExistingId_Accepted(boolean partititioningEnabled) throws IOException {
// setup
- Batch2JobInfo info = new Batch2JobInfo();
- info.setJobId(A_JOB_ID);
- info.setStatus(BulkExportJobStatusEnum.SUBMITTED);
+ JobInstance info = new JobInstance();
+ info.setInstanceId(A_JOB_ID);
+ info.setStatus(StatusEnum.QUEUED);
info.setEndTime(InstantType.now().getValue());
- if(partititioningEnabled) {
- info.setRequestPartitionId(myRequestPartitionId);
+
+ BulkExportJobParameters parameters = new BulkExportJobParameters();
+ if (partititioningEnabled) {
+ parameters.setPartitionId(myRequestPartitionId);
}
+ info.setParameters(parameters);
// when
- when(myJobRunner.getJobInfo(eq(A_JOB_ID)))
+ when(myJobCoordinator.getInstance(eq(A_JOB_ID)))
.thenReturn(info);
// Create the initial launch Parameters containing the request
@@ -1159,14 +1200,17 @@ public class BulkDataExportProviderTest {
// setup
enablePartitioning();
- Batch2JobInfo info = new Batch2JobInfo();
- info.setJobId(A_JOB_ID);
- info.setStatus(BulkExportJobStatusEnum.BUILDING);
+ JobInstance info = new JobInstance();
+ info.setInstanceId(A_JOB_ID);
+ info.setStatus(StatusEnum.IN_PROGRESS);
info.setEndTime(new Date());
- info.setRequestPartitionId(myRequestPartitionId);
+
+ BulkExportJobParameters parameters = new BulkExportJobParameters();
+ parameters.setPartitionId(myRequestPartitionId);
+ info.setParameters(parameters);
// when
- when(myJobRunner.getJobInfo(eq(A_JOB_ID)))
+ when(myJobCoordinator.getInstance(eq(A_JOB_ID)))
.thenReturn(info);
// test
@@ -1189,5 +1233,25 @@ public class BulkDataExportProviderTest {
);
}
+ private class MyRequestPartitionHelperSvc extends RequestPartitionHelperSvc {
+ @Override
+ public @NotNull RequestPartitionId determineReadPartitionForRequest(RequestDetails theRequest, ReadPartitionIdRequestDetails theDetails) {
+ assert theRequest != null;
+ if (myPartitionName.equals(theRequest.getTenantId())) {
+ return myRequestPartitionId;
+ } else {
+ return RequestPartitionId.fromPartitionName(theRequest.getTenantId());
+ }
+ }
+
+ @Override
+ public void validateHasPartitionPermissions(RequestDetails theRequest, String theResourceType, RequestPartitionId theRequestPartitionId) {
+ if (!myPartitionName.equals(theRequest.getTenantId()) && theRequest.getTenantId() != null) {
+ throw new ForbiddenOperationException("User does not have access to resources on the requested partition");
+ }
+ }
+
+ }
+
}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportTest.java
index 5ce7a4f7840..fe6567acc3a 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportTest.java
@@ -1,22 +1,44 @@
package ca.uhn.fhir.jpa.bulk;
+import ca.uhn.fhir.batch2.api.IJobCoordinator;
import ca.uhn.fhir.batch2.model.JobInstance;
+import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
+import ca.uhn.fhir.batch2.model.StatusEnum;
+import ca.uhn.fhir.interceptor.api.Hook;
+import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
-import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
-import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
-import ca.uhn.fhir.jpa.util.BulkExportUtils;
import ca.uhn.fhir.rest.api.Constants;
-import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
+import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
import ca.uhn.fhir.util.JsonUtil;
import com.google.common.collect.Sets;
import org.apache.commons.io.LineIterator;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
-import org.hl7.fhir.r4.model.*;
+import org.hl7.fhir.r4.model.Basic;
+import org.hl7.fhir.r4.model.Binary;
+import org.hl7.fhir.r4.model.CarePlan;
+import org.hl7.fhir.r4.model.Device;
+import org.hl7.fhir.r4.model.DocumentReference;
+import org.hl7.fhir.r4.model.Encounter;
+import org.hl7.fhir.r4.model.Enumerations;
+import org.hl7.fhir.r4.model.Group;
+import org.hl7.fhir.r4.model.IdType;
+import org.hl7.fhir.r4.model.InstantType;
+import org.hl7.fhir.r4.model.Location;
+import org.hl7.fhir.r4.model.MedicationAdministration;
+import org.hl7.fhir.r4.model.Observation;
+import org.hl7.fhir.r4.model.Organization;
+import org.hl7.fhir.r4.model.Patient;
+import org.hl7.fhir.r4.model.Practitioner;
+import org.hl7.fhir.r4.model.Provenance;
+import org.hl7.fhir.r4.model.QuestionnaireResponse;
+import org.hl7.fhir.r4.model.Reference;
+import org.hl7.fhir.r4.model.ServiceRequest;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.MethodOrderer;
@@ -58,7 +80,7 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
private static final Logger ourLog = LoggerFactory.getLogger(BulkDataExportTest.class);
@Autowired
- private IBatch2JobRunner myJobRunner;
+ private IJobCoordinator myJobCoordinator;
@AfterEach
void afterEach() {
@@ -94,11 +116,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
myClient.update().resource(group).execute();
// set the export options
- BulkDataExportOptions options = new BulkDataExportOptions();
+ BulkExportJobParameters options = new BulkExportJobParameters();
options.setResourceTypes(Sets.newHashSet("Patient"));
- options.setGroupId(new IdType("Group", "G"));
+ options.setGroupId("Group/G");
options.setFilters(Sets.newHashSet("Patient?gender=female"));
- options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
verifyBulkExportResults(options, Collections.singletonList("Patient/PF"), Collections.singletonList("Patient/PM"));
}
@@ -131,11 +153,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
myClient.update().resource(group).execute();
// set the export options
- BulkDataExportOptions options = new BulkDataExportOptions();
+ BulkExportJobParameters options = new BulkExportJobParameters();
options.setResourceTypes(Sets.newHashSet("Patient"));
- options.setGroupId(new IdType("Group", "G"));
+ options.setGroupId("Group/G");
options.setFilters(Sets.newHashSet("Patient?_security=http://security|val1"));
- options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
verifyBulkExportResults(options, Collections.singletonList("Patient/PM"), Collections.singletonList("Patient/PF"));
}
@@ -170,11 +192,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
myClient.update().resource(group).execute();
// set the export options
- BulkDataExportOptions options = new BulkDataExportOptions();
+ BulkExportJobParameters options = new BulkExportJobParameters();
options.setResourceTypes(Sets.newHashSet("Patient"));
- options.setGroupId(new IdType("Group", "G2"));
+ options.setGroupId("Group/G2");
options.setFilters(new HashSet<>());
- options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
verifyBulkExportResults(options, List.of("Patient/PING1", "Patient/PING2"), Collections.singletonList("Patient/PNING3"));
}
@@ -195,11 +217,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
myClient.update().resource(group).execute();
// set the export options
- BulkDataExportOptions options = new BulkDataExportOptions();
+ BulkExportJobParameters options = new BulkExportJobParameters();
options.setResourceTypes(Sets.newHashSet("Patient"));
- options.setGroupId(new IdType("Group", "G2"));
+ options.setGroupId("Group/G2");
options.setFilters(new HashSet<>());
- options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
myCaptureQueriesListener.clear();
@@ -255,11 +277,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
String obsId3 = myClient.create().resource(observation).execute().getId().toUnqualifiedVersionless().getValue();
// set the export options
- BulkDataExportOptions options = new BulkDataExportOptions();
+ BulkExportJobParameters options = new BulkExportJobParameters();
options.setResourceTypes(Sets.newHashSet("Patient", "Observation", "Encounter"));
- options.setPatientIds(Sets.newHashSet(new IdType("Patient", "P1")));
+ options.setPatientIds(Set.of("Patient/P1"));
options.setFilters(new HashSet<>());
- options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
verifyBulkExportResults(options, List.of("Patient/P1", obsId, encId), List.of("Patient/P2", obsId2, encId2, obsId3));
@@ -317,11 +339,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
String encId3 = myClient.create().resource(encounter).execute().getId().toUnqualifiedVersionless().getValue();
// set the export options
- BulkDataExportOptions options = new BulkDataExportOptions();
+ BulkExportJobParameters options = new BulkExportJobParameters();
options.setResourceTypes(Sets.newHashSet("Patient", "Observation", "Encounter"));
- options.setPatientIds(Sets.newHashSet(new IdType("Patient", "P1"), new IdType("Patient", "P2")));
+ options.setPatientIds(Set.of("Patient/P1", "Patient/P2"));
options.setFilters(new HashSet<>());
- options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
verifyBulkExportResults(options, List.of("Patient/P1", obsId, encId, "Patient/P2", obsId2, encId2), List.of("Patient/P3", obsId3, encId3));
@@ -399,11 +421,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
myClient.update().resource(group).execute();
// set the export options
- BulkDataExportOptions options = new BulkDataExportOptions();
+ BulkExportJobParameters options = new BulkExportJobParameters();
options.setResourceTypes(Sets.newHashSet("Patient", "Encounter"));
- options.setGroupId(new IdType("Group", "G1"));
+ options.setGroupId("Group/G1");
options.setFilters(new HashSet<>());
- options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
verifyBulkExportResults(options, List.of("Patient/P1", practId, orgId, encId, encId2, locId), List.of("Patient/P2", orgId2, encId3, locId2));
}
@@ -440,11 +462,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
myClient.update().resource(group).execute();
// set the export options
- BulkDataExportOptions options = new BulkDataExportOptions();
+ BulkExportJobParameters options = new BulkExportJobParameters();
options.setResourceTypes(Sets.newHashSet("Patient", "Encounter", "Observation"));
- options.setGroupId(new IdType("Group", "G1"));
+ options.setGroupId("Group/G1");
options.setFilters(new HashSet<>());
- options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
verifyBulkExportResults(options, List.of("Patient/P1", practId, encId, obsId), Collections.emptyList());
}
@@ -497,11 +519,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
myClient.update().resource(group).execute();
// set the export options
- BulkDataExportOptions options = new BulkDataExportOptions();
+ BulkExportJobParameters options = new BulkExportJobParameters();
options.setResourceTypes(Sets.newHashSet("Patient", "Observation", "Provenance"));
- options.setGroupId(new IdType("Group", "G1"));
+ options.setGroupId("Group/G1");
options.setFilters(new HashSet<>());
- options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
verifyBulkExportResults(options, List.of("Patient/P1", obsId, provId, devId, devId2), List.of("Patient/P2", provId2, devId3));
}
@@ -535,11 +557,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
myClient.update().resource(observation).execute();
// Set the export options
- BulkDataExportOptions options = new BulkDataExportOptions();
+ BulkExportJobParameters options = new BulkExportJobParameters();
options.setResourceTypes(Sets.newHashSet("Patient", "Observation", "Group"));
- options.setGroupId(new IdType("Group", "B"));
+ options.setGroupId("Group/B");
options.setFilters(new HashSet<>());
- options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
options.setSince(timeDate);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
// Should get the sub-resource (Observation) even the patient hasn't been updated after the _since param
@@ -575,9 +597,9 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
String questRespAuthId = myClient.create().resource(questionnaireResponseAuth).execute().getId().toUnqualifiedVersionless().getValue();
// set the export options
- BulkDataExportOptions options = new BulkDataExportOptions();
+ BulkExportJobParameters options = new BulkExportJobParameters();
options.setResourceTypes(Sets.newHashSet("Patient", "Basic", "DocumentReference", "QuestionnaireResponse"));
- options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
verifyBulkExportResults(options, List.of("Patient/P1", basicId, docRefId, questRespAuthId, questRespSubId), Collections.emptyList());
}
@@ -623,9 +645,9 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
String obsPerId = myClient.create().resource(observationPer).execute().getId().toUnqualifiedVersionless().getValue();
// set the export options
- BulkDataExportOptions options = new BulkDataExportOptions();
+ BulkExportJobParameters options = new BulkExportJobParameters();
options.setResourceTypes(Sets.newHashSet("Patient", "Observation", "CarePlan", "MedicationAdministration", "ServiceRequest"));
- options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
verifyBulkExportResults(options, List.of("Patient/P1", carePlanId, medAdminId, sevReqId, obsSubId, obsPerId), Collections.emptyList());
}
@@ -646,9 +668,9 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
// set the export options
- BulkDataExportOptions options = new BulkDataExportOptions();
+ BulkExportJobParameters options = new BulkExportJobParameters();
options.setResourceTypes(Sets.newHashSet("Patient", "Device"));
- options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
verifyBulkExportResults(options, List.of("Patient/P1", deviceId), Collections.emptyList());
}
@@ -675,11 +697,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
device.setPatient(patientReference);
final IIdType deviceIdType = myClient.create().resource(device).execute().getId().toUnqualifiedVersionless();
- final BulkDataExportOptions options = new BulkDataExportOptions();
+ final BulkExportJobParameters options = new BulkExportJobParameters();
options.setResourceTypes(resourceTypesForExport);
- options.setGroupId(new IdType("Group", "B"));
+ options.setGroupId("Group/B");
options.setFilters(new HashSet<>());
- options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
final List expectedContainedIds;
@@ -700,18 +722,57 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
expectedIds.add(createObservation(withStatus("final")).getValue());
}
- final BulkDataExportOptions options = new BulkDataExportOptions();
+ final BulkExportJobParameters options = new BulkExportJobParameters();
options.setResourceTypes(Set.of("Patient", "Observation"));
options.setFilters(Set.of("Patient?active=true", "Patient?active=false", "Observation?status=final"));
- options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.SYSTEM);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
JobInstance finalJobInstance = verifyBulkExportResults(options, expectedIds, List.of());
assertEquals(40, finalJobInstance.getCombinedRecordsProcessed());
}
- private JobInstance verifyBulkExportResults(BulkDataExportOptions theOptions, List theContainedList, List theExcludedList) {
- Batch2JobStartResponse startResponse = myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(theOptions));
+ @Test
+ public void testSystemBulkExport_WithBulkExportInclusionInterceptor() {
+
+ class BoysOnlyInterceptor {
+
+ @Hook(Pointcut.STORAGE_BULK_EXPORT_RESOURCE_INCLUSION)
+ public boolean include(IBaseResource theResource) {
+ if (((Patient)theResource).getGender() == Enumerations.AdministrativeGender.FEMALE) {
+ return false;
+ }
+ return true;
+ }
+
+ }
+ myInterceptorRegistry.registerInterceptor(new BoysOnlyInterceptor());
+ try {
+
+ List expectedIds = new ArrayList<>();
+ for (int i = 0; i < 10; i++) {
+ expectedIds.add(createPatient(withActiveTrue(), withGender("male")).getValue());
+ }
+ for (int i = 0; i < 10; i++) {
+ createPatient(withActiveTrue(), withGender("female"));
+ }
+
+ final BulkExportJobParameters options = new BulkExportJobParameters();
+ options.setResourceTypes(Set.of("Patient", "Observation"));
+ options.setFilters(Set.of("Patient?active=true", "Patient?active=false", "Observation?status=final"));
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.SYSTEM);
+ options.setOutputFormat(Constants.CT_FHIR_NDJSON);
+
+ JobInstance finalJobInstance = verifyBulkExportResults(options, expectedIds, List.of());
+ assertEquals(10, finalJobInstance.getCombinedRecordsProcessed());
+
+ } finally {
+ myInterceptorRegistry.unregisterInterceptorsIf(t->t instanceof BoysOnlyInterceptor);
+ }
+ }
+
+ private JobInstance verifyBulkExportResults(BulkExportJobParameters theOptions, List theContainedList, List theExcludedList) {
+ Batch2JobStartResponse startResponse = startNewJob(theOptions);
assertNotNull(startResponse);
assertFalse(startResponse.isUsesCachedResult());
@@ -721,14 +782,14 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
await()
.atMost(200, TimeUnit.SECONDS)
- .until(() -> myJobRunner.getJobInfo(startResponse.getInstanceId()).getStatus() == BulkExportJobStatusEnum.COMPLETE);
+ .until(() -> myJobCoordinator.getInstance(startResponse.getInstanceId()).getStatus() == StatusEnum.COMPLETED);
await()
.atMost(200, TimeUnit.SECONDS)
- .until(() -> myJobRunner.getJobInfo(startResponse.getInstanceId()).getReport() != null);
+ .until(() -> myJobCoordinator.getInstance(startResponse.getInstanceId()).getReport() != null);
// Iterate over the files
- String report = myJobRunner.getJobInfo(startResponse.getInstanceId()).getReport();
+ String report = myJobCoordinator.getInstance(startResponse.getInstanceId()).getReport();
BulkExportJobResults results = JsonUtil.deserialize(report, BulkExportJobResults.class);
Set foundIds = new HashSet<>();
@@ -777,11 +838,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
@Test
public void testValidateParameters_InvalidPostFetch_NoParams() {
// Setup
- final BulkDataExportOptions options = createOptionsWithPostFetchFilterUrl("foo");
+ final BulkExportJobParameters options = createOptionsWithPostFetchFilterUrl("foo");
// Test
try {
- myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
+ startNewJob(options);
fail();
} catch (InvalidRequestException e) {
@@ -796,11 +857,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
@Test
public void testValidateParameters_InvalidPostFetch_NoParamsAfterQuestionMark() {
// Setup
- final BulkDataExportOptions options = createOptionsWithPostFetchFilterUrl("Patient?");
+ final BulkExportJobParameters options = createOptionsWithPostFetchFilterUrl("Patient?");
// Test
try {
- myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
+ startNewJob(options);
fail();
} catch (InvalidRequestException e) {
@@ -815,11 +876,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
@Test
public void testValidateParameters_InvalidPostFetch_InvalidResourceType() {
// Setup
- final BulkDataExportOptions options = createOptionsWithPostFetchFilterUrl("Foo?active=true");
+ final BulkExportJobParameters options = createOptionsWithPostFetchFilterUrl("Foo?active=true");
// Test
try {
- myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
+ startNewJob(options);
fail();
} catch (InvalidRequestException e) {
@@ -834,11 +895,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
@Test
public void testValidateParameters_InvalidPostFetch_UnsupportedParam() {
// Setup
- final BulkDataExportOptions options = createOptionsWithPostFetchFilterUrl("Observation?subject.identifier=blah");
+ final BulkExportJobParameters options = createOptionsWithPostFetchFilterUrl("Observation?subject.identifier=blah");
// Test
try {
- myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
+ startNewJob(options);
fail();
} catch (InvalidRequestException e) {
@@ -853,11 +914,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
@Test
public void testValidateParameters_InvalidPostFetch_UnknownParam() {
// Setup
- final BulkDataExportOptions options = createOptionsWithPostFetchFilterUrl("Observation?foo=blah");
+ final BulkExportJobParameters options = createOptionsWithPostFetchFilterUrl("Observation?foo=blah");
// Test
try {
- myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
+ startNewJob(options);
fail();
} catch (InvalidRequestException e) {
@@ -868,12 +929,20 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
}
}
+ private Batch2JobStartResponse startNewJob(BulkExportJobParameters theParameters) {
+ JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
+ startRequest.setJobDefinitionId(Batch2JobDefinitionConstants.BULK_EXPORT);
+ startRequest.setUseCache(false);
+ startRequest.setParameters(theParameters);
+ return myJobCoordinator.startInstance(mySrd, startRequest);
+ }
+
@Nonnull
- private static BulkDataExportOptions createOptionsWithPostFetchFilterUrl(String postFetchUrl) {
- final BulkDataExportOptions options = new BulkDataExportOptions();
+ private static BulkExportJobParameters createOptionsWithPostFetchFilterUrl(String postFetchUrl) {
+ final BulkExportJobParameters options = new BulkExportJobParameters();
options.setResourceTypes(Set.of("Patient"));
options.setFilters(new HashSet<>());
- options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.SYSTEM);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
options.setPostFetchFilterUrls(Set.of(postFetchUrl));
return options;
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkExportUseCaseTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkExportUseCaseTest.java
index 704a5c4ba3c..5329366c9f9 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkExportUseCaseTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkExportUseCaseTest.java
@@ -1,13 +1,14 @@
package ca.uhn.fhir.jpa.bulk;
+import ca.uhn.fhir.batch2.api.IJobCoordinator;
import ca.uhn.fhir.batch2.api.IJobMaintenanceService;
import ca.uhn.fhir.batch2.api.IJobPersistence;
import ca.uhn.fhir.batch2.model.JobInstance;
+import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
+import ca.uhn.fhir.batch2.model.StatusEnum;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
-import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
-import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson;
import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository;
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository;
@@ -16,13 +17,13 @@ import ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
-import ca.uhn.fhir.jpa.util.BulkExportUtils;
import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
-import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
+import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
import ca.uhn.fhir.util.BundleBuilder;
import ca.uhn.fhir.util.JsonUtil;
import ca.uhn.fhir.util.UrlUtil;
@@ -44,7 +45,6 @@ import org.hl7.fhir.r4.model.Extension;
import org.hl7.fhir.r4.model.Group;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.InstantType;
-import org.hl7.fhir.r4.model.Meta;
import org.hl7.fhir.r4.model.Observation;
import org.hl7.fhir.r4.model.Organization;
import org.hl7.fhir.r4.model.Parameters;
@@ -96,7 +96,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
private static final Logger ourLog = LoggerFactory.getLogger(BulkExportUseCaseTest.class);
@Autowired
- private IBatch2JobRunner myJobRunner;
+ private IJobCoordinator myJobCoordinator;
@Autowired
private IJobPersistence myJobPersistence;
@@ -439,15 +439,18 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
myPatientDao.update(patient);
}
- BulkDataExportOptions options = new BulkDataExportOptions();
+ BulkExportJobParameters options = new BulkExportJobParameters();
options.setResourceTypes(Collections.singleton("Patient"));
options.setFilters(Collections.emptySet());
- options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.SYSTEM);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
myCaptureQueriesListener.clear();
- Batch2JobStartResponse startResponse = myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
+ JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
+ startRequest.setJobDefinitionId(Batch2JobDefinitionConstants.BULK_EXPORT);
+ startRequest.setParameters(options);
+ Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(mySrd, startRequest);
assertNotNull(startResponse);
@@ -562,25 +565,28 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
int patientsCreated = myPatientDao.search(SearchParameterMap.newSynchronous(), details).size();
assertEquals(numPatients, patientsCreated);
- BulkDataExportOptions options = new BulkDataExportOptions();
+ BulkExportJobParameters options = new BulkExportJobParameters();
options.setResourceTypes(Sets.newHashSet("Patient"));
- options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
- Batch2JobStartResponse job = myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
+ JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
+ startRequest.setJobDefinitionId(Batch2JobDefinitionConstants.BULK_EXPORT);
+ startRequest.setParameters(options);
+ Batch2JobStartResponse job = myJobCoordinator.startInstance(mySrd, startRequest);
myBatch2JobHelper.awaitJobCompletion(job.getInstanceId(), 60);
- ourLog.debug("Job status after awaiting - {}", myJobRunner.getJobInfo(job.getInstanceId()).getStatus());
+ ourLog.debug("Job status after awaiting - {}", myJobCoordinator.getInstance(job.getInstanceId()).getStatus());
await()
.atMost(300, TimeUnit.SECONDS)
.until(() -> {
- BulkExportJobStatusEnum status = myJobRunner.getJobInfo(job.getInstanceId()).getStatus();
- if (!BulkExportJobStatusEnum.COMPLETE.equals(status)) {
+ StatusEnum status = myJobCoordinator.getInstance(job.getInstanceId()).getStatus();
+ if (!StatusEnum.COMPLETED.equals(status)) {
fail("Job status was changed from COMPLETE to " + status);
}
- return myJobRunner.getJobInfo(job.getInstanceId()).getReport() != null;
+ return myJobCoordinator.getInstance(job.getInstanceId()).getReport() != null;
});
- String report = myJobRunner.getJobInfo(job.getInstanceId()).getReport();
+ String report = myJobCoordinator.getInstance(job.getInstanceId()).getReport();
BulkExportJobResults results = JsonUtil.deserialize(report, BulkExportJobResults.class);
List binaryUrls = results.getResourceTypeToBinaryIds().get("Patient");
@@ -1364,15 +1370,15 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
}
BulkExportJobResults startGroupBulkExportJobAndAwaitCompletion(HashSet theResourceTypes, HashSet theFilters, String theGroupId) {
- return startBulkExportJobAndAwaitCompletion(BulkDataExportOptions.ExportStyle.GROUP, theResourceTypes, theFilters, theGroupId);
+ return startBulkExportJobAndAwaitCompletion(BulkExportJobParameters.ExportStyle.GROUP, theResourceTypes, theFilters, theGroupId);
}
BulkExportJobResults startSystemBulkExportJobAndAwaitCompletion(Set theResourceTypes, Set theFilters) {
- return startBulkExportJobAndAwaitCompletion(BulkDataExportOptions.ExportStyle.SYSTEM, theResourceTypes, theFilters, null);
+ return startBulkExportJobAndAwaitCompletion(BulkExportJobParameters.ExportStyle.SYSTEM, theResourceTypes, theFilters, null);
}
BulkExportJobResults startBulkExportJobAndAwaitCompletion(
- BulkDataExportOptions.ExportStyle theExportStyle,
+ BulkExportJobParameters.ExportStyle theExportStyle,
Set theResourceTypes,
Set theFilters,
String theGroupOrPatientId
@@ -1398,7 +1404,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
MethodOutcome outcome;
- if (theExportStyle == BulkDataExportOptions.ExportStyle.GROUP) {
+ if (theExportStyle == BulkExportJobParameters.ExportStyle.GROUP) {
outcome = myClient
.operation()
@@ -1408,7 +1414,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
.returnMethodOutcome()
.withAdditionalHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC)
.execute();
- } else if (theExportStyle == BulkDataExportOptions.ExportStyle.PATIENT && theGroupOrPatientId != null) {
+ } else if (theExportStyle == BulkExportJobParameters.ExportStyle.PATIENT && theGroupOrPatientId != null) {
//TODO add support for this actual processor.
fail("Bulk Exports that return no data do not return");
outcome = myClient
@@ -1453,32 +1459,35 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
myBatch2JobHelper.awaitJobCompletion(jobInstanceId, 60);
- await().atMost(300, TimeUnit.SECONDS).until(() -> myJobRunner.getJobInfo(jobInstanceId).getReport() != null);
+ await().atMost(300, TimeUnit.SECONDS).until(() -> myJobCoordinator.getInstance(jobInstanceId).getReport() != null);
- String report = myJobRunner.getJobInfo(jobInstanceId).getReport();
+ String report = myJobCoordinator.getInstance(jobInstanceId).getReport();
BulkExportJobResults results = JsonUtil.deserialize(report, BulkExportJobResults.class);
return results;
}
private void verifyBulkExportResults(String theGroupId, HashSet theFilters, List theContainedList, List theExcludedList) {
- BulkDataExportOptions options = new BulkDataExportOptions();
+ BulkExportJobParameters options = new BulkExportJobParameters();
options.setResourceTypes(Sets.newHashSet("Patient"));
- options.setGroupId(new IdType("Group", theGroupId));
+ options.setGroupId("Group/" + theGroupId);
options.setFilters(theFilters);
- options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
- Batch2JobStartResponse startResponse = myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
+ JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
+ startRequest.setJobDefinitionId(Batch2JobDefinitionConstants.BULK_EXPORT);
+ startRequest.setParameters(options);
+ Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(mySrd, startRequest);
assertNotNull(startResponse);
// Run a scheduled pass to build the export
myBatch2JobHelper.awaitJobCompletion(startResponse.getInstanceId());
- await().until(() -> myJobRunner.getJobInfo(startResponse.getInstanceId()).getReport() != null);
+ await().until(() -> myJobCoordinator.getInstance(startResponse.getInstanceId()).getReport() != null);
// Iterate over the files
- String report = myJobRunner.getJobInfo(startResponse.getInstanceId()).getReport();
+ String report = myJobCoordinator.getInstance(startResponse.getInstanceId()).getReport();
BulkExportJobResults results = JsonUtil.deserialize(report, BulkExportJobResults.class);
for (Map.Entry> file : results.getResourceTypeToBinaryIds().entrySet()) {
List binaryIds = file.getValue();
@@ -1500,6 +1509,6 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
}
private BulkExportJobResults startPatientBulkExportJobAndAwaitResults(HashSet theTypes, HashSet theFilters, String thePatientId) {
- return startBulkExportJobAndAwaitCompletion(BulkDataExportOptions.ExportStyle.PATIENT, theTypes, theFilters, thePatientId);
+ return startBulkExportJobAndAwaitCompletion(BulkExportJobParameters.ExportStyle.PATIENT, theTypes, theFilters, thePatientId);
}
}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/export/ExpandResourcesStepJpaTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/export/ExpandResourcesStepJpaTest.java
index 72d7a6caf58..2b4257efe84 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/export/ExpandResourcesStepJpaTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/export/ExpandResourcesStepJpaTest.java
@@ -3,14 +3,13 @@ package ca.uhn.fhir.jpa.bulk.export;
import ca.uhn.fhir.batch2.api.IJobDataSink;
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
import ca.uhn.fhir.batch2.jobs.export.ExpandResourcesStep;
-import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import ca.uhn.fhir.batch2.jobs.export.models.ExpandedResourcesList;
import ca.uhn.fhir.batch2.jobs.export.models.ResourceIdList;
import ca.uhn.fhir.batch2.jobs.models.BatchResourceId;
import ca.uhn.fhir.batch2.model.JobInstance;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
-import org.hl7.fhir.r4.model.Observation;
import org.hl7.fhir.r4.model.Patient;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/export/FetchResourceIdsStepJpaTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/export/FetchResourceIdsStepJpaTest.java
index f90a46ea3b7..e6e6fd08108 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/export/FetchResourceIdsStepJpaTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/export/FetchResourceIdsStepJpaTest.java
@@ -4,16 +4,13 @@ import ca.uhn.fhir.batch2.api.IJobDataSink;
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
import ca.uhn.fhir.batch2.api.VoidModel;
import ca.uhn.fhir.batch2.jobs.export.FetchResourceIdsStep;
-import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import ca.uhn.fhir.batch2.jobs.export.models.ResourceIdList;
import ca.uhn.fhir.batch2.model.JobInstance;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoR4TagsTest;
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
-import ca.uhn.fhir.rest.api.Constants;
-import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import org.hl7.fhir.r4.model.DateTimeType;
-import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.OrganizationAffiliation;
import org.junit.jupiter.api.Test;
import org.mockito.ArgumentCaptor;
@@ -25,7 +22,6 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
-import java.util.HashSet;
import java.util.List;
public class FetchResourceIdsStepJpaTest extends BaseJpaR4Test {
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/ResponseTerminologyTranslationInterceptorTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/ResponseTerminologyTranslationInterceptorTest.java
index d5039002cbd..f9967311119 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/ResponseTerminologyTranslationInterceptorTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/ResponseTerminologyTranslationInterceptorTest.java
@@ -1,14 +1,15 @@
package ca.uhn.fhir.jpa.interceptor;
+import ca.uhn.fhir.batch2.api.IJobCoordinator;
+import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
-import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
-import ca.uhn.fhir.jpa.util.BulkExportUtils;
import ca.uhn.fhir.rest.api.Constants;
-import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationInterceptor;
+import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
import ca.uhn.fhir.util.JsonUtil;
import com.google.common.collect.Sets;
import org.hamcrest.Matchers;
@@ -43,7 +44,7 @@ public class ResponseTerminologyTranslationInterceptorTest extends BaseResourceP
private ResponseTerminologyTranslationInterceptor myResponseTerminologyTranslationInterceptor;
@Autowired
- private IBatch2JobRunner myJobRunner;
+ private IJobCoordinator myJobCoordinator;
@BeforeEach
public void beforeEach() {
@@ -198,23 +199,26 @@ public class ResponseTerminologyTranslationInterceptorTest extends BaseResourceP
private void createBulkJobAndCheckCodingList(List codingList) {
// Create a bulk job
- BulkDataExportOptions options = new BulkDataExportOptions();
+ BulkExportJobParameters options = new BulkExportJobParameters();
options.setResourceTypes(Sets.newHashSet("Observation"));
options.setFilters(Sets.newHashSet(TEST_OBV_FILTER));
- options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.SYSTEM);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
- Batch2JobStartResponse startResponse = myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
+ JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
+ startRequest.setJobDefinitionId(Batch2JobDefinitionConstants.BULK_EXPORT);
+ startRequest.setParameters(options);
+ Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(mySrd, startRequest);
assertNotNull(startResponse);
// Run a scheduled pass to build the export
myBatch2JobHelper.awaitJobCompletion(startResponse.getInstanceId());
- await().until(() -> myJobRunner.getJobInfo(startResponse.getInstanceId()).getReport() != null);
+ await().until(() -> myJobCoordinator.getInstance(startResponse.getInstanceId()).getReport() != null);
// Iterate over the files
- String report = myJobRunner.getJobInfo(startResponse.getInstanceId()).getReport();
+ String report = myJobCoordinator.getInstance(startResponse.getInstanceId()).getReport();
BulkExportJobResults results = JsonUtil.deserialize(report, BulkExportJobResults.class);
for (Map.Entry> file : results.getResourceTypeToBinaryIds().entrySet()) {
String resourceTypeInFile = file.getKey();
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/AuthorizationInterceptorJpaR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/AuthorizationInterceptorJpaR4Test.java
index db4648b33a9..9001bbcd925 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/AuthorizationInterceptorJpaR4Test.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/AuthorizationInterceptorJpaR4Test.java
@@ -1,7 +1,7 @@
package ca.uhn.fhir.jpa.provider.r4;
import ca.uhn.fhir.i18n.Msg;
-import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
+import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider;
import ca.uhn.fhir.jpa.delete.ThreadSafeResourceDeleterSvc;
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/MultitenantServerR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/MultitenantServerR4Test.java
index 11d10b5b8f1..64d0faeba73 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/MultitenantServerR4Test.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/MultitenantServerR4Test.java
@@ -1,15 +1,15 @@
package ca.uhn.fhir.jpa.provider.r4;
+import ca.uhn.fhir.batch2.api.IJobCoordinator;
+import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider;
+import ca.uhn.fhir.batch2.model.JobInstance;
+import ca.uhn.fhir.batch2.model.StatusEnum;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
-import ca.uhn.fhir.jpa.api.model.Batch2JobInfo;
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
-import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
-import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson;
-import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
import ca.uhn.fhir.jpa.entity.PartitionEntity;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
@@ -18,7 +18,7 @@ import ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
-import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import ca.uhn.fhir.rest.server.RestfulServer;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException;
@@ -621,7 +621,7 @@ public class MultitenantServerR4Test extends BaseMultitenantResourceProviderR4Te
private BulkDataExportProvider myProvider;
@Mock
- private IBatch2JobRunner myJobRunner;
+ private IJobCoordinator myJobCoordinator;
@Spy
private RequestPartitionHelperSvc myRequestPartitionHelperSvc = new MultitenantServerR4Test.PartitionTesting.MyRequestPartitionHelperSvc();
@@ -661,21 +661,22 @@ public class MultitenantServerR4Test extends BaseMultitenantResourceProviderR4Te
map.put("Patient", Arrays.asList("Binary/1", "Binary/2"));
results.setResourceTypeToBinaryIds(map);
- Batch2JobInfo jobInfo = new Batch2JobInfo();
- jobInfo.setJobId(jobId);
- jobInfo.setStatus(BulkExportJobStatusEnum.COMPLETE);
+ JobInstance jobInfo = new JobInstance();
+ jobInfo.setInstanceId(jobId);
+ jobInfo.setStatus(StatusEnum.COMPLETED);
jobInfo.setReport(JsonUtil.serialize(results));
+ jobInfo.setParameters(new BulkExportJobParameters());
// Create a bulk job
- BulkDataExportOptions options = new BulkDataExportOptions();
+ BulkExportJobParameters options = new BulkExportJobParameters();
options.setResourceTypes(Sets.newHashSet("Patient"));
- options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.SYSTEM);
Batch2JobStartResponse startResponse = new Batch2JobStartResponse();
startResponse.setInstanceId(jobId);
- when(myJobRunner.startNewJob(isNotNull(), any()))
+ when(myJobCoordinator.startInstance(isNotNull(), any()))
.thenReturn(startResponse);
- when(myJobRunner.getJobInfo(anyString()))
+ when(myJobCoordinator.getInstance(anyString()))
.thenReturn(jobInfo);
// mocking
diff --git a/hapi-fhir-jpaserver-test-r4b/pom.xml b/hapi-fhir-jpaserver-test-r4b/pom.xml
index 34c78eff8fa..8714277b6dc 100644
--- a/hapi-fhir-jpaserver-test-r4b/pom.xml
+++ b/hapi-fhir-jpaserver-test-r4b/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-r5/pom.xml b/hapi-fhir-jpaserver-test-r5/pom.xml
index 024d1e78c78..2479bbd930b 100644
--- a/hapi-fhir-jpaserver-test-r5/pom.xml
+++ b/hapi-fhir-jpaserver-test-r5/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/BaseResourceProviderR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/BaseResourceProviderR5Test.java
index 6717e74faa0..beceaadf5c3 100644
--- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/BaseResourceProviderR5Test.java
+++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/BaseResourceProviderR5Test.java
@@ -3,7 +3,7 @@ package ca.uhn.fhir.jpa.provider.r5;
import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeProvider;
import ca.uhn.fhir.batch2.jobs.reindex.ReindexProvider;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
-import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
+import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider;
import ca.uhn.fhir.jpa.dao.r5.BaseJpaR5Test;
import ca.uhn.fhir.jpa.graphql.GraphQLProvider;
import ca.uhn.fhir.jpa.provider.DiffProvider;
diff --git a/hapi-fhir-jpaserver-test-utilities/pom.xml b/hapi-fhir-jpaserver-test-utilities/pom.xml
index 1eac5604c1b..8a2ee1b70af 100644
--- a/hapi-fhir-jpaserver-test-utilities/pom.xml
+++ b/hapi-fhir-jpaserver-test-utilities/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/BaseResourceProviderR4Test.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/BaseResourceProviderR4Test.java
index 923b7e6b238..67e5a243ded 100644
--- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/BaseResourceProviderR4Test.java
+++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/BaseResourceProviderR4Test.java
@@ -22,7 +22,7 @@ package ca.uhn.fhir.jpa.provider;
import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeProvider;
import ca.uhn.fhir.batch2.jobs.reindex.ReindexProvider;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
-import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
+import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider;
import ca.uhn.fhir.jpa.dao.data.IPartitionDao;
import ca.uhn.fhir.jpa.graphql.GraphQLProvider;
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/search/MockHapiTransactionService.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/search/MockHapiTransactionService.java
index fe17dee5879..d3780313b2f 100644
--- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/search/MockHapiTransactionService.java
+++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/search/MockHapiTransactionService.java
@@ -1,3 +1,22 @@
+/*-
+ * #%L
+ * HAPI FHIR JPA Server Test Utilities
+ * %%
+ * Copyright (C) 2014 - 2023 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
package ca.uhn.fhir.jpa.search;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java
index 09b9d81d833..30baac3eaf7 100644
--- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java
+++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java
@@ -37,7 +37,7 @@ import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.binary.interceptor.BinaryStorageInterceptor;
import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
-import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
+import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
import ca.uhn.fhir.jpa.dao.data.IMdmLinkJpaRepository;
diff --git a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml
index 4ddebadc16a..84a6d6dec11 100644
--- a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml
+++ b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java
index 2c01e75503b..60127c26465 100644
--- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java
+++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java
@@ -8,7 +8,7 @@ import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
-import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
+import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider;
import ca.uhn.fhir.jpa.delete.ThreadSafeResourceDeleterSvc;
import ca.uhn.fhir.jpa.graphql.GraphQLProvider;
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
diff --git a/hapi-fhir-server-mdm/pom.xml b/hapi-fhir-server-mdm/pom.xml
index ae9f6657fb1..4c49668e12e 100644
--- a/hapi-fhir-server-mdm/pom.xml
+++ b/hapi-fhir-server-mdm/pom.xml
@@ -7,7 +7,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-server-openapi/pom.xml b/hapi-fhir-server-openapi/pom.xml
index b0a6e4c11dd..0dcc05a3523 100644
--- a/hapi-fhir-server-openapi/pom.xml
+++ b/hapi-fhir-server-openapi/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-server/pom.xml b/hapi-fhir-server/pom.xml
index 9cda64eaffc..464786fa00b 100644
--- a/hapi-fhir-server/pom.xml
+++ b/hapi-fhir-server/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/bulk/BulkDataExportOptions.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/bulk/BulkDataExportOptions.java
deleted file mode 100644
index eb609f07d02..00000000000
--- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/bulk/BulkDataExportOptions.java
+++ /dev/null
@@ -1,145 +0,0 @@
-/*-
- * #%L
- * HAPI FHIR - Server Framework
- * %%
- * Copyright (C) 2014 - 2023 Smile CDR, Inc.
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-package ca.uhn.fhir.rest.api.server.bulk;
-
-import org.hl7.fhir.instance.model.api.IIdType;
-
-import javax.annotation.Nonnull;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-import java.util.Set;
-
-// TODO: JA in next ticket - We have 3 more or less identical classes representing
-// bulk data job parameters: BulkDataExportOptions, BulkExportJobParameters, and BulkExportParameters
-// They don't seem to serve any distinct purpose so they should be collapsed into 1
-public class BulkDataExportOptions {
-
-
- public enum ExportStyle {
- PATIENT,
- GROUP,
- SYSTEM
- }
-
- private String myOutputFormat;
- private Set myResourceTypes;
- private Date mySince;
- private Set myFilters;
- private Set myPostFetchFilterUrls;
- private ExportStyle myExportStyle;
- private boolean myExpandMdm;
- private IIdType myGroupId;
- private Set myPatientIds;
-
- private String myExportIdentifier;
-
- public void setOutputFormat(String theOutputFormat) {
- myOutputFormat = theOutputFormat;
- }
-
- public void setResourceTypes(Set theResourceTypes) {
- myResourceTypes = theResourceTypes;
- }
-
- public void setSince(Date theSince) {
- mySince = theSince;
- }
-
- public void setFilters(Set theFilters) {
- myFilters = theFilters;
- }
-
- public ExportStyle getExportStyle() {
- return myExportStyle;
- }
-
- public void setExportStyle(ExportStyle theExportStyle) {
- myExportStyle = theExportStyle;
- }
-
- public String getOutputFormat() {
- return myOutputFormat;
- }
-
- @Nonnull
- public Set getResourceTypes() {
- if (myResourceTypes == null) {
- myResourceTypes = Set.of();
- }
- return myResourceTypes;
- }
-
- public Date getSince() {
- return mySince;
- }
-
- @Nonnull
- public Set getFilters() {
- if (myFilters == null) {
- myFilters = Set.of();
- }
- return myFilters;
- }
-
- @Nonnull
- public Set getPostFetchFilterUrls() {
- if (myPostFetchFilterUrls == null) {
- myPostFetchFilterUrls = Set.of();
- }
- return myPostFetchFilterUrls;
- }
-
- public void setPostFetchFilterUrls(Set thePostFetchFilterUrls) {
- myPostFetchFilterUrls = thePostFetchFilterUrls;
- }
-
- public boolean isExpandMdm() {
- return myExpandMdm;
- }
-
- public void setExpandMdm(boolean theExpandMdm) {
- myExpandMdm = theExpandMdm;
- }
-
- public IIdType getGroupId() {
- return myGroupId;
- }
-
- public void setGroupId(IIdType theGroupId) {
- myGroupId = theGroupId;
- }
-
- public Set getPatientIds() {
- return myPatientIds;
- }
-
- public void setPatientIds(Set thePatientIds) {
- myPatientIds = thePatientIds;
- }
-
- public String getExportIdentifier() {
- return myExportIdentifier;
- }
-
- public void setExportIdentifier(String theExportIdentifier) {
- myExportIdentifier = theExportIdentifier;
- }
-}
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/models/BulkExportJobParameters.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/bulk/BulkExportJobParameters.java
similarity index 63%
rename from hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/models/BulkExportJobParameters.java
rename to hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/bulk/BulkExportJobParameters.java
index 379b53673af..40740e05876 100644
--- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/models/BulkExportJobParameters.java
+++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/bulk/BulkExportJobParameters.java
@@ -1,6 +1,6 @@
/*-
* #%L
- * hapi-fhir-storage-batch2-jobs
+ * HAPI FHIR - Server Framework
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
@@ -17,24 +17,26 @@
* limitations under the License.
* #L%
*/
-package ca.uhn.fhir.batch2.jobs.export.models;
+package ca.uhn.fhir.rest.api.server.bulk;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
-import ca.uhn.fhir.jpa.api.model.BulkExportParameters;
-import ca.uhn.fhir.jpa.util.JsonDateDeserializer;
-import ca.uhn.fhir.jpa.util.JsonDateSerializer;
-import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
+import ca.uhn.fhir.rest.server.util.JsonDateDeserializer;
+import ca.uhn.fhir.rest.server.util.JsonDateSerializer;
+import ca.uhn.fhir.model.api.IModelJson;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import java.util.ArrayList;
+import java.util.Collection;
import java.util.Date;
import java.util.List;
-public class BulkExportJobParameters extends BulkExportJobBase {
+public class BulkExportJobParameters implements IModelJson {
- // list of resource types to export
+ /**
+ * List of resource types to export.
+ */
@JsonProperty("resourceTypes")
private List myResourceTypes;
@@ -50,22 +52,43 @@ public class BulkExportJobParameters extends BulkExportJobBase {
@JsonProperty("exportId")
private String myExportId;
+ /**
+ * Filters are used to narrow down the resources to export.
+ * Eg:
+ * Patient/123?group=a
+ * "group=a" is a filter
+ */
@JsonProperty("filters")
private List myFilters;
+ /**
+ * URLs to be applied by the inMemoryMatcher after the SQL select
+ */
@JsonProperty("postFetchFilterUrls")
private List myPostFetchFilterUrls;
+ /**
+ * Output format.
+ * Currently unsupported (all outputs are ndjson)
+ */
@JsonProperty("outputFormat")
private String myOutputFormat;
- // TODO - move enum
+ /**
+ * Export style - Patient, Group or Everything
+ */
@JsonProperty("exportStyle")
- private BulkDataExportOptions.ExportStyle myExportStyle;
+ private ExportStyle myExportStyle;
+ /**
+ * Patient id(s)
+ */
@JsonProperty("patientIds")
private List myPatientIds;
+ /**
+ * The request which originated the request.
+ */
@JsonProperty("originalRequestUrl")
private String myOriginalRequestUrl;
@@ -75,33 +98,27 @@ public class BulkExportJobParameters extends BulkExportJobBase {
@JsonProperty("groupId")
private String myGroupId;
+ /**
+ * For group export;
+ * whether or not to expand mdm
+ */
@JsonProperty("expandMdm")
private boolean myExpandMdm;
+ /**
+ * The partition for the request if applicable.
+ */
@JsonProperty("partitionId")
private RequestPartitionId myPartitionId;
- public static BulkExportJobParameters createFromExportJobParameters(BulkExportParameters theParameters) {
- BulkExportJobParameters params = new BulkExportJobParameters();
- params.setResourceTypes(theParameters.getResourceTypes());
- params.setExportStyle(theParameters.getExportStyle());
- params.setExportIdentifier(theParameters.getExportIdentifier());
- params.setFilters(theParameters.getFilters());
- params.setPostFetchFilterUrls(theParameters.getPostFetchFilterUrls());
- params.setGroupId(theParameters.getGroupId());
- params.setOutputFormat(theParameters.getOutputFormat());
- params.setSince(theParameters.getSince());
- params.setExpandMdm(theParameters.isExpandMdm());
- params.setPatientIds(theParameters.getPatientIds());
- params.setOriginalRequestUrl(theParameters.getOriginalRequestUrl());
- params.setPartitionId(theParameters.getPartitionId());
- return params;
- }
-
public String getExportIdentifier() {
return myExportId;
}
+ public void setExportIdentifier(String theExportId) {
+ myExportId = theExportId;
+ }
+
public List getResourceTypes() {
if (myResourceTypes == null) {
myResourceTypes = new ArrayList<>();
@@ -109,12 +126,11 @@ public class BulkExportJobParameters extends BulkExportJobBase {
return myResourceTypes;
}
- public void setExportIdentifier(String theExportId) {
- myExportId = theExportId;
- }
-
- public void setResourceTypes(List theResourceTypes) {
- myResourceTypes = theResourceTypes;
+ public void setResourceTypes(Collection theResourceTypes) {
+ getResourceTypes().clear();
+ if (theResourceTypes != null) {
+ getResourceTypes().addAll(theResourceTypes);
+ }
}
public Date getSince() {
@@ -126,11 +142,17 @@ public class BulkExportJobParameters extends BulkExportJobBase {
}
public List getFilters() {
+ if (myFilters == null) {
+ myFilters = new ArrayList<>();
+ }
return myFilters;
}
- public void setFilters(List theFilters) {
- myFilters = theFilters;
+ public void setFilters(Collection theFilters) {
+ getFilters().clear();
+ if (theFilters != null) {
+ getFilters().addAll(theFilters);
+ }
}
public List getPostFetchFilterUrls() {
@@ -140,8 +162,11 @@ public class BulkExportJobParameters extends BulkExportJobBase {
return myPostFetchFilterUrls;
}
- public void setPostFetchFilterUrls(List thePostFetchFilterUrls) {
- myPostFetchFilterUrls = thePostFetchFilterUrls;
+ public void setPostFetchFilterUrls(Collection thePostFetchFilterUrls) {
+ getPostFetchFilterUrls().clear();
+ if (thePostFetchFilterUrls != null) {
+ getPostFetchFilterUrls().addAll(thePostFetchFilterUrls);
+ }
}
public String getOutputFormat() {
@@ -152,20 +177,26 @@ public class BulkExportJobParameters extends BulkExportJobBase {
myOutputFormat = theOutputFormat;
}
- public BulkDataExportOptions.ExportStyle getExportStyle() {
+ public ExportStyle getExportStyle() {
return myExportStyle;
}
- public void setExportStyle(BulkDataExportOptions.ExportStyle theExportStyle) {
+ public void setExportStyle(ExportStyle theExportStyle) {
myExportStyle = theExportStyle;
}
public List getPatientIds() {
+ if (myPatientIds == null) {
+ myPatientIds = new ArrayList<>();
+ }
return myPatientIds;
}
- public void setPatientIds(List thePatientIds) {
- myPatientIds = thePatientIds;
+ public void setPatientIds(Collection thePatientIds) {
+ getPatientIds().clear();
+ if (thePatientIds != null) {
+ getPatientIds().addAll(thePatientIds);
+ }
}
public String getGroupId() {
@@ -188,7 +219,7 @@ public class BulkExportJobParameters extends BulkExportJobBase {
return myOriginalRequestUrl;
}
- private void setOriginalRequestUrl(String theOriginalRequestUrl) {
+ public void setOriginalRequestUrl(String theOriginalRequestUrl) {
this.myOriginalRequestUrl = theOriginalRequestUrl;
}
@@ -200,4 +231,7 @@ public class BulkExportJobParameters extends BulkExportJobBase {
this.myPartitionId = thePartitionId;
}
+ public enum ExportStyle {
+ PATIENT, GROUP, SYSTEM
+ }
}
diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AuthorizationInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AuthorizationInterceptor.java
index 7bafffc525e..929d0a00c57 100644
--- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AuthorizationInterceptor.java
+++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AuthorizationInterceptor.java
@@ -28,7 +28,7 @@ import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.IPreResourceShowDetails;
import ca.uhn.fhir.rest.api.server.RequestDetails;
-import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
import ca.uhn.fhir.rest.server.interceptor.consent.ConsentInterceptor;
import com.google.common.collect.Lists;
@@ -195,6 +195,7 @@ public class AuthorizationInterceptor implements IRuleApplier {
this.myAuthorizationSearchParamMatcher = theAuthorizationSearchParamMatcher;
}
+ @Override
@Nullable
public IAuthorizationSearchParamMatcher getSearchParamMatcher() {
return myAuthorizationSearchParamMatcher;
@@ -410,7 +411,7 @@ public class AuthorizationInterceptor implements IRuleApplier {
}
@Hook(Pointcut.STORAGE_INITIATE_BULK_EXPORT)
- public void initiateBulkExport(RequestDetails theRequestDetails, BulkDataExportOptions theBulkExportOptions, Pointcut thePointcut) {
+ public void initiateBulkExport(RequestDetails theRequestDetails, BulkExportJobParameters theBulkExportOptions, Pointcut thePointcut) {
// RestOperationTypeEnum restOperationType = determineRestOperationTypeFromBulkExportOptions(theBulkExportOptions);
RestOperationTypeEnum restOperationType = RestOperationTypeEnum.EXTENDED_OPERATION_SERVER;
@@ -424,18 +425,18 @@ public class AuthorizationInterceptor implements IRuleApplier {
* TODO GGG This method should eventually be used when invoking the rules applier.....however we currently rely on the incorrect
* behaviour of passing down `EXTENDED_OPERATION_SERVER`.
*/
- private RestOperationTypeEnum determineRestOperationTypeFromBulkExportOptions(BulkDataExportOptions theBulkExportOptions) {
+ private RestOperationTypeEnum determineRestOperationTypeFromBulkExportOptions(BulkExportJobParameters theBulkExportOptions) {
RestOperationTypeEnum restOperationType = RestOperationTypeEnum.EXTENDED_OPERATION_SERVER;
- BulkDataExportOptions.ExportStyle exportStyle = theBulkExportOptions.getExportStyle();
- if (exportStyle.equals(BulkDataExportOptions.ExportStyle.SYSTEM)) {
+ BulkExportJobParameters.ExportStyle exportStyle = theBulkExportOptions.getExportStyle();
+ if (exportStyle.equals(BulkExportJobParameters.ExportStyle.SYSTEM)) {
restOperationType = RestOperationTypeEnum.EXTENDED_OPERATION_SERVER;
- } else if (exportStyle.equals(BulkDataExportOptions.ExportStyle.PATIENT)) {
+ } else if (exportStyle.equals(BulkExportJobParameters.ExportStyle.PATIENT)) {
if (theBulkExportOptions.getPatientIds().size() == 1) {
restOperationType = RestOperationTypeEnum.EXTENDED_OPERATION_INSTANCE;
} else {
restOperationType = RestOperationTypeEnum.EXTENDED_OPERATION_TYPE;
}
- } else if (exportStyle.equals(BulkDataExportOptions.ExportStyle.GROUP)) {
+ } else if (exportStyle.equals(BulkExportJobParameters.ExportStyle.GROUP)) {
restOperationType = RestOperationTypeEnum.EXTENDED_OPERATION_INSTANCE;
}
return restOperationType;
diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBulkExportImpl.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBulkExportImpl.java
index b2e42fb967c..1d8839cb26e 100644
--- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBulkExportImpl.java
+++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBulkExportImpl.java
@@ -23,11 +23,12 @@ import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
-import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import java.util.Collection;
+import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
@@ -40,7 +41,7 @@ public class RuleBulkExportImpl extends BaseRule {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(RuleBulkExportImpl.class);
private String myGroupId;
private String myPatientId;
- private BulkDataExportOptions.ExportStyle myWantExportStyle;
+ private BulkExportJobParameters.ExportStyle myWantExportStyle;
private Collection myResourceTypes;
private boolean myWantAnyStyle;
@@ -58,7 +59,7 @@ public class RuleBulkExportImpl extends BaseRule {
return null;
}
- BulkDataExportOptions options = (BulkDataExportOptions) theRequestDetails.getAttribute(AuthorizationInterceptor.REQUEST_ATTRIBUTE_BULK_DATA_EXPORT_OPTIONS);
+ BulkExportJobParameters options = (BulkExportJobParameters) theRequestDetails.getAttribute(AuthorizationInterceptor.REQUEST_ATTRIBUTE_BULK_DATA_EXPORT_OPTIONS);
if (!myWantAnyStyle && options.getExportStyle() != myWantExportStyle) {
return null;
@@ -75,13 +76,13 @@ public class RuleBulkExportImpl extends BaseRule {
}
}
- if (myWantAnyStyle || myWantExportStyle == BulkDataExportOptions.ExportStyle.SYSTEM) {
+ if (myWantAnyStyle || myWantExportStyle == BulkExportJobParameters.ExportStyle.SYSTEM) {
return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier);
}
if (isNotBlank(myGroupId) && options.getGroupId() != null) {
String expectedGroupId = new IdDt(myGroupId).toUnqualifiedVersionless().getValue();
- String actualGroupId = options.getGroupId().toUnqualifiedVersionless().getValue();
+ String actualGroupId = new IdDt(options.getGroupId()).toUnqualifiedVersionless().getValue();
if (Objects.equals(expectedGroupId, actualGroupId)) {
return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier);
}
@@ -90,12 +91,12 @@ public class RuleBulkExportImpl extends BaseRule {
// TODO This is a _bad bad bad implementation_ but we are out of time.
// 1. If a claimed resource ID is present in the parameters, and the permission contains one, check for membership
// 2. If not a member, Deny.
- if (myWantExportStyle == BulkDataExportOptions.ExportStyle.PATIENT && isNotBlank(myPatientId)) {
+ if (myWantExportStyle == BulkExportJobParameters.ExportStyle.PATIENT && isNotBlank(myPatientId)) {
final String expectedPatientId = new IdDt(myPatientId).toUnqualifiedVersionless().getValue();
- if (options.getPatientIds() != null) {
+ if (!options.getPatientIds().isEmpty()) {
ourLog.debug("options.getPatientIds() != null");
final String actualPatientIds = options.getPatientIds().stream()
- .map(t -> t.toUnqualifiedVersionless().getValue())
+ .map(t -> new IdDt(t).toUnqualifiedVersionless().getValue())
.collect(Collectors.joining(","));
if (actualPatientIds.contains(expectedPatientId)) {
return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier);
@@ -104,7 +105,7 @@ public class RuleBulkExportImpl extends BaseRule {
return new AuthorizationInterceptor.Verdict(PolicyEnum.DENY,this);
}
- final Set filters = options.getFilters();
+ final List filters = options.getFilters();
// TODO: LD: This admittedly adds more to the tech debt above, and should really be addressed by https://github.com/hapifhir/hapi-fhir/issues/4990
if (! filters.isEmpty()) {
@@ -126,22 +127,22 @@ public class RuleBulkExportImpl extends BaseRule {
}
public void setAppliesToGroupExportOnGroup(String theGroupId) {
- myWantExportStyle = BulkDataExportOptions.ExportStyle.GROUP;
+ myWantExportStyle = BulkExportJobParameters.ExportStyle.GROUP;
myGroupId = theGroupId;
}
public void setAppliesToPatientExportOnGroup(String theGroupId) {
- myWantExportStyle = BulkDataExportOptions.ExportStyle.PATIENT;
+ myWantExportStyle = BulkExportJobParameters.ExportStyle.PATIENT;
myGroupId = theGroupId;
}
public void setAppliesToPatientExport(String thePatientId) {
- myWantExportStyle = BulkDataExportOptions.ExportStyle.PATIENT;
+ myWantExportStyle = BulkExportJobParameters.ExportStyle.PATIENT;
myPatientId = thePatientId;
}
public void setAppliesToSystem() {
- myWantExportStyle = BulkDataExportOptions.ExportStyle.SYSTEM;
+ myWantExportStyle = BulkExportJobParameters.ExportStyle.SYSTEM;
}
public void setResourceTypes(Collection theResourceTypes) {
@@ -156,7 +157,7 @@ public class RuleBulkExportImpl extends BaseRule {
return myGroupId;
}
- BulkDataExportOptions.ExportStyle getWantExportStyle() {
+ BulkExportJobParameters.ExportStyle getWantExportStyle() {
return myWantExportStyle;
}
}
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/JsonDateDeserializer.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/JsonDateDeserializer.java
similarity index 88%
rename from hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/JsonDateDeserializer.java
rename to hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/JsonDateDeserializer.java
index 8def38a9692..cc0be2c96c3 100644
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/JsonDateDeserializer.java
+++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/JsonDateDeserializer.java
@@ -1,6 +1,6 @@
/*-
* #%L
- * HAPI FHIR Storage api
+ * HAPI FHIR - Server Framework
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
@@ -17,12 +17,12 @@
* limitations under the License.
* #L%
*/
-package ca.uhn.fhir.jpa.util;
+package ca.uhn.fhir.rest.server.util;
+import ca.uhn.fhir.model.primitive.DateTimeDt;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
-import org.hl7.fhir.dstu3.model.DateTimeType;
import java.io.IOException;
import java.util.Date;
@@ -35,7 +35,7 @@ public class JsonDateDeserializer extends JsonDeserializer {
public Date deserialize(JsonParser theParser, DeserializationContext theDeserializationContext) throws IOException {
String string = theParser.getValueAsString();
if (isNotBlank(string)) {
- return new DateTimeType(string).getValue();
+ return new DateTimeDt(string).getValue();
}
return null;
}
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/JsonDateSerializer.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/JsonDateSerializer.java
similarity index 82%
rename from hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/JsonDateSerializer.java
rename to hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/JsonDateSerializer.java
index 564dc68d43b..4b1e176fb52 100644
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/JsonDateSerializer.java
+++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/JsonDateSerializer.java
@@ -1,6 +1,6 @@
/*-
* #%L
- * HAPI FHIR Storage api
+ * HAPI FHIR - Server Framework
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
@@ -17,12 +17,13 @@
* limitations under the License.
* #L%
*/
-package ca.uhn.fhir.jpa.util;
+package ca.uhn.fhir.rest.server.util;
+import ca.uhn.fhir.model.primitive.DateTimeDt;
+import ca.uhn.fhir.model.primitive.InstantDt;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.SerializerProvider;
-import org.hl7.fhir.dstu3.model.InstantType;
import java.io.IOException;
import java.util.Date;
@@ -32,7 +33,7 @@ public class JsonDateSerializer extends JsonSerializer {
@Override
public void serialize(Date theValue, JsonGenerator theGen, SerializerProvider theSerializers) throws IOException {
if (theValue != null) {
- theGen.writeString(new InstantType(theValue).getValueAsString());
+ theGen.writeString(new InstantDt(theValue).getValueAsString());
}
}
diff --git a/hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBulkExportImplTest.java b/hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBulkExportImplTest.java
index 8f7d1840d24..2624d7b23af 100644
--- a/hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBulkExportImplTest.java
+++ b/hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBulkExportImplTest.java
@@ -1,10 +1,9 @@
package ca.uhn.fhir.rest.server.interceptor.auth;
import ca.uhn.fhir.interceptor.api.Pointcut;
-import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
-import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
@@ -41,7 +40,7 @@ public class RuleBulkExportImplTest {
Set myWantTypes = new HashSet<>();
myWantTypes.add("Questionnaire");
- BulkDataExportOptions options = new BulkDataExportOptions();
+ BulkExportJobParameters options = new BulkExportJobParameters();
options.setResourceTypes(myWantTypes);
when(myRequestDetails.getAttribute(any())).thenReturn(options);
@@ -63,7 +62,7 @@ public class RuleBulkExportImplTest {
myWantTypes.add("Patient");
myWantTypes.add("Practitioner");
- BulkDataExportOptions options = new BulkDataExportOptions();
+ BulkExportJobParameters options = new BulkExportJobParameters();
options.setResourceTypes(myWantTypes);
when(myRequestDetails.getAttribute(any())).thenReturn(options);
@@ -78,9 +77,9 @@ public class RuleBulkExportImplTest {
myRule.setAppliesToGroupExportOnGroup("invalid group");
myRule.setMode(PolicyEnum.ALLOW);
- BulkDataExportOptions options = new BulkDataExportOptions();
- options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
- options.setGroupId(new IdDt("Group/123"));
+ BulkExportJobParameters options = new BulkExportJobParameters();
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
+ options.setGroupId("Group/123");
when(myRequestDetails.getAttribute(any())).thenReturn(options);
@@ -94,9 +93,9 @@ public class RuleBulkExportImplTest {
myRule.setAppliesToGroupExportOnGroup("Group/1");
myRule.setMode(PolicyEnum.ALLOW);
- BulkDataExportOptions options = new BulkDataExportOptions();
- options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
- options.setGroupId(new IdDt("Group/1"));
+ BulkExportJobParameters options = new BulkExportJobParameters();
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
+ options.setGroupId("Group/1");
when(myRequestDetails.getAttribute(any())).thenReturn(options);
@@ -110,9 +109,9 @@ public class RuleBulkExportImplTest {
RuleBulkExportImpl myRule = new RuleBulkExportImpl("b");
myRule.setAppliesToPatientExport("Patient/123");
myRule.setMode(PolicyEnum.ALLOW);
- BulkDataExportOptions options = new BulkDataExportOptions();
- options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
- options.setPatientIds(Set.of(new IdDt("Patient/123")));
+ BulkExportJobParameters options = new BulkExportJobParameters();
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
+ options.setPatientIds(Set.of("Patient/123"));
when(myRequestDetails.getAttribute(any())).thenReturn(options);
//When
@@ -128,9 +127,9 @@ public class RuleBulkExportImplTest {
RuleBulkExportImpl myRule = new RuleBulkExportImpl("b");
myRule.setAppliesToPatientExport("Patient/123");
myRule.setMode(PolicyEnum.ALLOW);
- BulkDataExportOptions options = new BulkDataExportOptions();
- options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
- options.setPatientIds(Set.of(new IdDt("Patient/456")));
+ BulkExportJobParameters options = new BulkExportJobParameters();
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
+ options.setPatientIds(Set.of("Patient/456"));
when(myRequestDetails.getAttribute(any())).thenReturn(options);
//When
@@ -146,8 +145,8 @@ public class RuleBulkExportImplTest {
RuleBulkExportImpl myRule = new RuleBulkExportImpl("b");
myRule.setAppliesToPatientExport("Patient/123");
myRule.setMode(PolicyEnum.ALLOW);
- BulkDataExportOptions options = new BulkDataExportOptions();
- options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
+ BulkExportJobParameters options = new BulkExportJobParameters();
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
when(myRequestDetails.getAttribute(any())).thenReturn(options);
//When
@@ -163,8 +162,8 @@ public class RuleBulkExportImplTest {
final RuleBulkExportImpl myRule = new RuleBulkExportImpl("b");
myRule.setAppliesToPatientExport("Patient/123");
myRule.setMode(PolicyEnum.ALLOW);
- final BulkDataExportOptions options = new BulkDataExportOptions();
- options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
+ final BulkExportJobParameters options = new BulkExportJobParameters();
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
options.setFilters(Set.of("Patient?_id=123"));
options.setResourceTypes(Set.of("Patient"));
when(myRequestDetails.getAttribute(any())).thenReturn(options);
@@ -182,8 +181,8 @@ public class RuleBulkExportImplTest {
final RuleBulkExportImpl myRule = new RuleBulkExportImpl("b");
myRule.setAppliesToPatientExport("Patient/123");
myRule.setMode(PolicyEnum.ALLOW);
- final BulkDataExportOptions options = new BulkDataExportOptions();
- options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
+ final BulkExportJobParameters options = new BulkExportJobParameters();
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
options.setFilters(Set.of("Patient?_id=123"));
options.setResourceTypes(Set.of("Patient", "Condition", "Immunization"));
when(myRequestDetails.getAttribute(any())).thenReturn(options);
@@ -201,8 +200,8 @@ public class RuleBulkExportImplTest {
final RuleBulkExportImpl myRule = new RuleBulkExportImpl("b");
myRule.setAppliesToPatientExport("Patient/123");
myRule.setMode(PolicyEnum.ALLOW);
- final BulkDataExportOptions options = new BulkDataExportOptions();
- options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
+ final BulkExportJobParameters options = new BulkExportJobParameters();
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
options.setFilters(Set.of("Patient?_id=123"));
options.setResourceTypes(Set.of("Observation"));
when(myRequestDetails.getAttribute(any())).thenReturn(options);
@@ -220,8 +219,8 @@ public class RuleBulkExportImplTest {
final RuleBulkExportImpl myRule = new RuleBulkExportImpl("b");
myRule.setAppliesToPatientExport("Patient/123");
myRule.setMode(PolicyEnum.ALLOW);
- final BulkDataExportOptions options = new BulkDataExportOptions();
- options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
+ final BulkExportJobParameters options = new BulkExportJobParameters();
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
options.setFilters(Set.of("Patient?_id=123"));
when(myRequestDetails.getAttribute(any())).thenReturn(options);
@@ -238,8 +237,8 @@ public class RuleBulkExportImplTest {
final RuleBulkExportImpl myRule = new RuleBulkExportImpl("b");
myRule.setAppliesToPatientExport("Patient/123");
myRule.setMode(PolicyEnum.ALLOW);
- final BulkDataExportOptions options = new BulkDataExportOptions();
- options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
+ final BulkExportJobParameters options = new BulkExportJobParameters();
+ options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
options.setFilters(Set.of("Patient?_id=456"));
options.setResourceTypes(Set.of("Patient"));
when(myRequestDetails.getAttribute(any())).thenReturn(options);
diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml
index 0d20bde6076..ce6320bf3cf 100644
--- a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml
+++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml
@@ -7,7 +7,7 @@
hapi-fhir-serviceloaders
ca.uhn.hapi.fhir
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml
index 00793059d4f..e6b33f740a4 100644
--- a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml
+++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml
@@ -7,7 +7,7 @@
hapi-fhir-serviceloaders
ca.uhn.hapi.fhir
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../pom.xml
@@ -21,7 +21,7 @@
ca.uhn.hapi.fhir
hapi-fhir-caching-api
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml
index bd641c54a72..9335c0a9417 100644
--- a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml
+++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml
@@ -7,7 +7,7 @@
hapi-fhir-serviceloaders
ca.uhn.hapi.fhir
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml
index 0eb41e2c8df..b0a4b034bcb 100644
--- a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml
+++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml
@@ -7,7 +7,7 @@
hapi-fhir
ca.uhn.hapi.fhir
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../../pom.xml
diff --git a/hapi-fhir-serviceloaders/pom.xml b/hapi-fhir-serviceloaders/pom.xml
index 4e73bc49a23..030afbf06eb 100644
--- a/hapi-fhir-serviceloaders/pom.xml
+++ b/hapi-fhir-serviceloaders/pom.xml
@@ -5,7 +5,7 @@
hapi-deployable-pom
ca.uhn.hapi.fhir
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml
index 3e6ca302e86..90ce7c41cc1 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml
index cbc3bea54ac..7b2768e3b06 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir-spring-boot-samples
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
hapi-fhir-spring-boot-sample-client-apache
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml
index f8032b6eb27..2a1ad54b5a0 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir-spring-boot-samples
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml
index e4cabb8e0e5..c7572bb43a5 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir-spring-boot-samples
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml
index 85a18ca835a..1ef53facc77 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir-spring-boot
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml
index 54ec994a71a..fb0e7899832 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-spring-boot/pom.xml b/hapi-fhir-spring-boot/pom.xml
index 7f5e11a4637..e91f6248bf1 100644
--- a/hapi-fhir-spring-boot/pom.xml
+++ b/hapi-fhir-spring-boot/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-sql-migrate/pom.xml b/hapi-fhir-sql-migrate/pom.xml
index 6ffd69ebe51..81fc6daba2c 100644
--- a/hapi-fhir-sql-migrate/pom.xml
+++ b/hapi-fhir-sql-migrate/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-storage-batch2-jobs/pom.xml b/hapi-fhir-storage-batch2-jobs/pom.xml
index e31c8325727..5a3fcc03db1 100644
--- a/hapi-fhir-storage-batch2-jobs/pom.xml
+++ b/hapi-fhir-storage-batch2-jobs/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/config/BatchCommonCtx.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/config/BatchCommonCtx.java
index 8077fdaa254..508b9ccd72b 100644
--- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/config/BatchCommonCtx.java
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/config/BatchCommonCtx.java
@@ -19,11 +19,7 @@
*/
package ca.uhn.fhir.batch2.jobs.config;
-import ca.uhn.fhir.batch2.api.IJobCoordinator;
import ca.uhn.fhir.batch2.jobs.parameters.UrlPartitioner;
-import ca.uhn.fhir.batch2.jobs.services.Batch2JobRunnerImpl;
-import ca.uhn.fhir.context.FhirContext;
-import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import org.springframework.context.annotation.Bean;
@@ -34,8 +30,4 @@ public class BatchCommonCtx {
return new UrlPartitioner(theMatchUrlService, theRequestPartitionHelperSvc);
}
- @Bean
- public IBatch2JobRunner batch2JobRunner(IJobCoordinator theJobCoordinator, FhirContext theFhirContext) {
- return new Batch2JobRunnerImpl(theJobCoordinator, theFhirContext);
- }
}
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/provider/BulkDataExportProvider.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkDataExportProvider.java
similarity index 73%
rename from hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/provider/BulkDataExportProvider.java
rename to hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkDataExportProvider.java
index 2846a8e7415..6603e3c293f 100644
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/provider/BulkDataExportProvider.java
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkDataExportProvider.java
@@ -1,6 +1,6 @@
/*-
* #%L
- * HAPI FHIR Storage api
+ * hapi-fhir-storage-batch2-jobs
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
@@ -17,8 +17,13 @@
* limitations under the License.
* #L%
*/
-package ca.uhn.fhir.jpa.bulk.export.provider;
+package ca.uhn.fhir.batch2.jobs.export;
+import ca.uhn.fhir.batch2.api.IJobCoordinator;
+import ca.uhn.fhir.batch2.api.JobOperationResultJson;
+import ca.uhn.fhir.batch2.model.JobInstance;
+import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
+import ca.uhn.fhir.batch2.model.StatusEnum;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.api.HookParams;
@@ -27,17 +32,11 @@ import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
-import ca.uhn.fhir.jpa.api.model.Batch2JobInfo;
-import ca.uhn.fhir.jpa.api.model.Batch2JobOperationResult;
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
-import ca.uhn.fhir.jpa.api.model.BulkExportParameters;
-import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
-import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
-import ca.uhn.fhir.jpa.util.BulkExportUtils;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.primitive.StringDt;
import ca.uhn.fhir.rest.annotation.IdParam;
@@ -47,28 +46,21 @@ import ca.uhn.fhir.rest.api.CacheControlDirective;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.PreferHeader;
import ca.uhn.fhir.rest.api.RequestTypeEnum;
-import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
-import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import ca.uhn.fhir.rest.server.RestfulServerUtils;
-import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
-import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
-import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor;
-import ca.uhn.fhir.rest.server.interceptor.auth.IRuleApplier;
-import ca.uhn.fhir.rest.server.interceptor.auth.PolicyEnum;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.util.ArrayUtil;
+import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
import ca.uhn.fhir.util.JsonUtil;
import ca.uhn.fhir.util.OperationOutcomeUtil;
import ca.uhn.fhir.util.SearchParameterUtil;
import ca.uhn.fhir.util.UrlUtil;
import com.google.common.annotations.VisibleForTesting;
-import com.google.common.collect.Lists;
-import org.apache.commons.collections4.ListUtils;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
import org.hl7.fhir.instance.model.api.IIdType;
@@ -83,6 +75,7 @@ import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
@@ -91,16 +84,18 @@ import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
-import static org.apache.commons.collections4.CollectionUtils.isEmpty;
+import static ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters.ExportStyle;
+import static ca.uhn.fhir.util.DatatypeUtil.toStringValue;
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
import static org.apache.commons.lang3.StringUtils.isEmpty;
import static org.slf4j.LoggerFactory.getLogger;
-
public class BulkDataExportProvider {
public static final String FARM_TO_TABLE_TYPE_FILTER_REGEX = "(?:,)(?=[A-Z][a-z]+\\?)";
public static final List PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES = List.of("Practitioner", "Organization");
- /** Bulk data $export does not include the Binary type */
+ /**
+ * Bulk data $export does not include the Binary type
+ */
public static final String UNSUPPORTED_BINARY_TYPE = "Binary";
private static final Logger ourLog = getLogger(BulkDataExportProvider.class);
@@ -114,7 +109,7 @@ public class BulkDataExportProvider {
private FhirContext myFhirContext;
@Autowired
- private IBatch2JobRunner myJobRunner;
+ private IJobCoordinator myJobCoordinator;
@Autowired
private JpaStorageSettings myStorageSettings;
@@ -141,15 +136,15 @@ public class BulkDataExportProvider {
// JPA export provider
validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_EXPORT);
- BulkDataExportOptions bulkDataExportOptions = buildSystemBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportId, theTypePostFetchFilterUrl);
+ BulkExportJobParameters BulkExportJobParameters = buildSystemBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportId, theTypePostFetchFilterUrl);
- startJob(theRequestDetails, bulkDataExportOptions);
+ startJob(theRequestDetails, BulkExportJobParameters);
}
private void startJob(ServletRequestDetails theRequestDetails,
- BulkDataExportOptions theOptions) {
+ BulkExportJobParameters theOptions) {
// permission check
- HookParams params = (new HookParams()).add(BulkDataExportOptions.class, theOptions)
+ HookParams params = (new HookParams()).add(BulkExportJobParameters.class, theOptions)
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
CompositeInterceptorBroadcaster.doCallHooks(this.myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_INITIATE_BULK_EXPORT, params);
@@ -157,36 +152,29 @@ public class BulkDataExportProvider {
// get cache boolean
boolean useCache = shouldUseCache(theRequestDetails);
- BulkExportParameters parameters = BulkExportUtils.createBulkExportJobParametersFromExportOptions(theOptions);
- parameters.setUseExistingJobsFirst(useCache);
-
// Set the original request URL as part of the job information, as this is used in the poll-status-endpoint, and is needed for the report.
- parameters.setOriginalRequestUrl(theRequestDetails.getCompleteUrl());
+ theOptions.setOriginalRequestUrl(theRequestDetails.getCompleteUrl());
// If no _type parameter is provided, default to all resource types except Binary
- if (isEmpty(theOptions.getResourceTypes())) {
+ if (theOptions.getResourceTypes().isEmpty()) {
List resourceTypes = new ArrayList<>(myDaoRegistry.getRegisteredDaoTypes());
resourceTypes.remove(UNSUPPORTED_BINARY_TYPE);
- parameters.setResourceTypes(resourceTypes);
+ theOptions.setResourceTypes(resourceTypes);
}
// Determine and validate partition permissions (if needed).
RequestPartitionId partitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, null);
myRequestPartitionHelperService.validateHasPartitionPermissions(theRequestDetails, "Binary", partitionId);
- parameters.setPartitionId(partitionId);
+ theOptions.setPartitionId(partitionId);
// start job
- Batch2JobStartResponse response = myJobRunner.startNewJob(theRequestDetails, parameters);
+ JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
+ startRequest.setParameters(theOptions);
+ startRequest.setUseCache(useCache);
+ startRequest.setJobDefinitionId(Batch2JobDefinitionConstants.BULK_EXPORT);
+ Batch2JobStartResponse response = myJobCoordinator.startInstance(theRequestDetails, startRequest);
- JobInfo info = new JobInfo();
- info.setJobMetadataId(response.getInstanceId());
-
- // We set it to submitted, even if it's using a cached job
- // This isn't an issue because the actual status isn't used
- // instead, when they poll for results, they'll get the real one
- info.setStatus(BulkExportJobStatusEnum.SUBMITTED);
-
- writePollingLocationToResponseHeaders(theRequestDetails, info);
+ writePollingLocationToResponseHeaders(theRequestDetails, response.getInstanceId());
}
private boolean shouldUseCache(ServletRequestDetails theRequestDetails) {
@@ -195,7 +183,7 @@ public class BulkDataExportProvider {
}
private String getServerBase(ServletRequestDetails theRequestDetails) {
- return StringUtils.removeEnd(theRequestDetails.getServerBaseForRequest(), "/");
+ return StringUtils.removeEnd(theRequestDetails.getServerBaseForRequest(), "/");
}
/**
@@ -224,16 +212,18 @@ public class BulkDataExportProvider {
// verify the Group exists before starting the job
validateTargetsExists(theRequestDetails, "Group", List.of(theIdParam));
- BulkDataExportOptions bulkDataExportOptions = buildGroupBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theIdParam, theMdm, theExportIdentifier, theTypePostFetchFilterUrl);
+ BulkExportJobParameters BulkExportJobParameters = buildGroupBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theIdParam, theMdm, theExportIdentifier, theTypePostFetchFilterUrl);
- if (isNotEmpty(bulkDataExportOptions.getResourceTypes())) {
- validateResourceTypesAllContainPatientSearchParams(bulkDataExportOptions.getResourceTypes());
+ if (isNotEmpty(BulkExportJobParameters.getResourceTypes())) {
+ validateResourceTypesAllContainPatientSearchParams(BulkExportJobParameters.getResourceTypes());
} else {
// all patient resource types
- bulkDataExportOptions.setResourceTypes(getPatientCompartmentResources());
+ Set groupTypes = new HashSet<>(getPatientCompartmentResources());
+ groupTypes.removeIf(t-> !myDaoRegistry.isResourceTypeSupported(t));
+ BulkExportJobParameters.setResourceTypes(groupTypes);
}
- startJob(theRequestDetails, bulkDataExportOptions);
+ startJob(theRequestDetails, BulkExportJobParameters);
}
/**
@@ -247,14 +237,14 @@ public class BulkDataExportProvider {
private void validateTargetsExists(RequestDetails theRequestDetails, String theTargetResourceName, Iterable theIdParams) {
RequestPartitionId partitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForRead(theRequestDetails, theTargetResourceName, theIdParams.iterator().next());
SystemRequestDetails requestDetails = new SystemRequestDetails().setRequestPartitionId(partitionId);
- for (IIdType nextId: theIdParams) {
+ for (IIdType nextId : theIdParams) {
myDaoRegistry.getResourceDao(theTargetResourceName)
.read(nextId, requestDetails);
}
}
- private void validateResourceTypesAllContainPatientSearchParams(Set theResourceTypes) {
+ private void validateResourceTypesAllContainPatientSearchParams(Collection theResourceTypes) {
if (theResourceTypes != null) {
List badResourceTypes = theResourceTypes.stream()
.filter(resourceType -> !PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES.contains(resourceType))
@@ -292,13 +282,13 @@ public class BulkDataExportProvider {
validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_EXPORT);
if (thePatient != null) {
- validateTargetsExists(theRequestDetails, "Patient", Lists.transform(thePatient, s -> new IdDt(s.getValue())));
+ validateTargetsExists(theRequestDetails, "Patient", thePatient.stream().map(s -> new IdDt(s.getValue())).collect(Collectors.toList()));
}
- BulkDataExportOptions bulkDataExportOptions = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportIdentifier, thePatient, theTypePostFetchFilterUrl);
- validateResourceTypesAllContainPatientSearchParams(bulkDataExportOptions.getResourceTypes());
+ BulkExportJobParameters BulkExportJobParameters = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportIdentifier, thePatient, theTypePostFetchFilterUrl);
+ validateResourceTypesAllContainPatientSearchParams(BulkExportJobParameters.getResourceTypes());
- startJob(theRequestDetails, bulkDataExportOptions);
+ startJob(theRequestDetails, BulkExportJobParameters);
}
/**
@@ -319,15 +309,16 @@ public class BulkDataExportProvider {
validateTargetsExists(theRequestDetails, "Patient", List.of(theIdParam));
- BulkDataExportOptions bulkDataExportOptions = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportIdentifier, theIdParam, theTypePostFetchFilterUrl);
- validateResourceTypesAllContainPatientSearchParams(bulkDataExportOptions.getResourceTypes());
+ BulkExportJobParameters BulkExportJobParameters = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportIdentifier, theIdParam, theTypePostFetchFilterUrl);
+ validateResourceTypesAllContainPatientSearchParams(BulkExportJobParameters.getResourceTypes());
- startJob(theRequestDetails, bulkDataExportOptions);
+ startJob(theRequestDetails, BulkExportJobParameters);
}
/**
* $export-poll-status
*/
+ @SuppressWarnings("unchecked")
@Operation(name = JpaConstants.OPERATION_EXPORT_POLL_STATUS, manualResponse = true, idempotent = true, deleteEnabled = true)
public void exportPollStatus(
@OperationParam(name = JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID, typeName = "string", min = 0, max = 1) IPrimitiveType theJobId,
@@ -338,7 +329,7 @@ public class BulkDataExportProvider {
// When export-poll-status through POST
// Get theJobId from the request details
- if (theJobId == null){
+ if (theJobId == null) {
Parameters parameters = (Parameters) theRequestDetails.getResource();
Parameters.ParametersParameterComponent parameter = parameters.getParameter().stream()
.filter(param -> param.getName().equals(JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID))
@@ -347,22 +338,20 @@ public class BulkDataExportProvider {
theJobId = (IPrimitiveType) parameter.getValue();
}
- Batch2JobInfo info = myJobRunner.getJobInfo(theJobId.getValueAsString());
- if (info == null) {
- throw new ResourceNotFoundException(Msg.code(2040) + "Unknown instance ID: " + theJobId + ". Please check if the input job ID is valid.");
- }
+ JobInstance info = myJobCoordinator.getInstance(theJobId.getValueAsString());
- if(info.getRequestPartitionId() != null) {
+ BulkExportJobParameters parameters = info.getParameters(BulkExportJobParameters.class);
+ if (parameters.getPartitionId() != null) {
// Determine and validate permissions for partition (if needed)
RequestPartitionId partitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, null);
myRequestPartitionHelperService.validateHasPartitionPermissions(theRequestDetails, "Binary", partitionId);
- if(!info.getRequestPartitionId().equals(partitionId)){
+ if (!parameters.getPartitionId().equals(partitionId)) {
throw new InvalidRequestException(Msg.code(2304) + "Invalid partition in request for Job ID " + theJobId);
}
}
switch (info.getStatus()) {
- case COMPLETE:
+ case COMPLETED:
if (theRequestDetails.getRequestType() == RequestTypeEnum.DELETE) {
handleDeleteRequest(theJobId, response, info.getStatus());
} else {
@@ -405,14 +394,14 @@ public class BulkDataExportProvider {
}
}
break;
- case ERROR:
+ case FAILED:
response.setStatus(Constants.STATUS_HTTP_500_INTERNAL_ERROR);
response.setContentType(Constants.CT_FHIR_JSON);
// Create an OperationOutcome response
IBaseOperationOutcome oo = OperationOutcomeUtil.newInstance(myFhirContext);
- OperationOutcomeUtil.addIssue(myFhirContext, oo, "error", info.getErrorMsg(), null, null);
+ OperationOutcomeUtil.addIssue(myFhirContext, oo, "error", info.getErrorMessage(), null, null);
myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToWriter(oo, response.getWriter());
response.getWriter().close();
break;
@@ -420,8 +409,11 @@ public class BulkDataExportProvider {
// Deliberate fall through
ourLog.warn("Unrecognized status encountered: {}. Treating as BUILDING/SUBMITTED", info.getStatus().name());
//noinspection fallthrough
- case BUILDING:
- case SUBMITTED:
+ case FINALIZE:
+ case QUEUED:
+ case IN_PROGRESS:
+ case CANCELLED:
+ case ERRORED:
if (theRequestDetails.getRequestType() == RequestTypeEnum.DELETE) {
handleDeleteRequest(theJobId, response, info.getStatus());
} else {
@@ -437,10 +429,10 @@ public class BulkDataExportProvider {
}
}
- private void handleDeleteRequest(IPrimitiveType theJobId, HttpServletResponse response, BulkExportJobStatusEnum theOrigStatus) throws IOException {
+ private void handleDeleteRequest(IPrimitiveType theJobId, HttpServletResponse response, StatusEnum theOrigStatus) throws IOException {
IBaseOperationOutcome outcome = OperationOutcomeUtil.newInstance(myFhirContext);
- Batch2JobOperationResult resultMessage = myJobRunner.cancelInstance(theJobId.getValueAsString());
- if (theOrigStatus.equals(BulkExportJobStatusEnum.COMPLETE)) {
+ JobOperationResultJson resultMessage = myJobCoordinator.cancelInstance(theJobId.getValueAsString());
+ if (theOrigStatus.equals(StatusEnum.COMPLETED)) {
response.setStatus(Constants.STATUS_HTTP_404_NOT_FOUND);
OperationOutcomeUtil.addIssue(myFhirContext, outcome, "error", "Job instance <" + theJobId.getValueAsString() + "> was already cancelled or has completed. Nothing to do.", null, null);
} else {
@@ -451,7 +443,7 @@ public class BulkDataExportProvider {
response.getWriter().close();
}
- private String getTransitionTimeOfJobInfo(Batch2JobInfo theInfo) {
+ private String getTransitionTimeOfJobInfo(JobInstance theInfo) {
if (theInfo.getEndTime() != null) {
return new InstantType(theInfo.getEndTime()).getValueAsString();
} else if (theInfo.getStartTime() != null) {
@@ -462,43 +454,43 @@ public class BulkDataExportProvider {
}
}
- private BulkDataExportOptions buildSystemBulkExportOptions(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, List> theTypeFilter, IPrimitiveType theExportId, List> theTypePostFetchFilterUrl) {
- return buildBulkDataExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportId, BulkDataExportOptions.ExportStyle.SYSTEM, theTypePostFetchFilterUrl);
+ private BulkExportJobParameters buildSystemBulkExportOptions(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, List> theTypeFilter, IPrimitiveType theExportId, List> theTypePostFetchFilterUrl) {
+ return buildBulkExportJobParameters(theOutputFormat, theType, theSince, theTypeFilter, theExportId, BulkExportJobParameters.ExportStyle.SYSTEM, theTypePostFetchFilterUrl);
}
- private BulkDataExportOptions buildGroupBulkExportOptions(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, List> theTypeFilter, IIdType theGroupId, IPrimitiveType theExpandMdm, IPrimitiveType theExportId, List> theTypePostFetchFilterUrl) {
- BulkDataExportOptions bulkDataExportOptions = buildBulkDataExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportId, BulkDataExportOptions.ExportStyle.GROUP, theTypePostFetchFilterUrl);
- bulkDataExportOptions.setGroupId(theGroupId);
+ private BulkExportJobParameters buildGroupBulkExportOptions(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, List> theTypeFilter, IIdType theGroupId, IPrimitiveType theExpandMdm, IPrimitiveType theExportId, List> theTypePostFetchFilterUrl) {
+ BulkExportJobParameters BulkExportJobParameters = buildBulkExportJobParameters(theOutputFormat, theType, theSince, theTypeFilter, theExportId, ExportStyle.GROUP, theTypePostFetchFilterUrl);
+ BulkExportJobParameters.setGroupId(toStringValue(theGroupId));
boolean mdm = false;
if (theExpandMdm != null) {
mdm = theExpandMdm.getValue();
}
- bulkDataExportOptions.setExpandMdm(mdm);
+ BulkExportJobParameters.setExpandMdm(mdm);
- return bulkDataExportOptions;
+ return BulkExportJobParameters;
}
- private BulkDataExportOptions buildPatientBulkExportOptions(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, List> theTypeFilter, IPrimitiveType theExportIdentifier, List> thePatientIds, List> theTypePostFetchFilterUrl) {
+ private BulkExportJobParameters buildPatientBulkExportOptions(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, List> theTypeFilter, IPrimitiveType theExportIdentifier, List> thePatientIds, List> theTypePostFetchFilterUrl) {
IPrimitiveType type = theType;
if (type == null) {
// Type is optional, but the job requires it
type = new StringDt("Patient");
}
- BulkDataExportOptions bulkDataExportOptions = buildBulkDataExportOptions(theOutputFormat, type, theSince, theTypeFilter, theExportIdentifier, BulkDataExportOptions.ExportStyle.PATIENT, theTypePostFetchFilterUrl);
+ BulkExportJobParameters BulkExportJobParameters = buildBulkExportJobParameters(theOutputFormat, type, theSince, theTypeFilter, theExportIdentifier, ExportStyle.PATIENT, theTypePostFetchFilterUrl);
if (thePatientIds != null) {
- bulkDataExportOptions.setPatientIds(thePatientIds.stream().map((pid) -> new IdType(pid.getValueAsString())).collect(Collectors.toSet()));
+ BulkExportJobParameters.setPatientIds(thePatientIds.stream().map(IPrimitiveType::getValueAsString).collect(Collectors.toSet()));
}
- return bulkDataExportOptions;
+ return BulkExportJobParameters;
}
- private BulkDataExportOptions buildPatientBulkExportOptions(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, List> theTypeFilter, IPrimitiveType theExportIdentifier, IIdType thePatientId, List> theTypePostFetchFilterUrl) {
- BulkDataExportOptions bulkDataExportOptions = buildBulkDataExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportIdentifier, BulkDataExportOptions.ExportStyle.PATIENT, theTypePostFetchFilterUrl);
- bulkDataExportOptions.setPatientIds(Collections.singleton(thePatientId));
- return bulkDataExportOptions;
+ private BulkExportJobParameters buildPatientBulkExportOptions(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, List> theTypeFilter, IPrimitiveType theExportIdentifier, IIdType thePatientId, List> theTypePostFetchFilterUrl) {
+ BulkExportJobParameters BulkExportJobParameters = buildBulkExportJobParameters(theOutputFormat, theType, theSince, theTypeFilter, theExportIdentifier, ExportStyle.PATIENT, theTypePostFetchFilterUrl);
+ BulkExportJobParameters.setPatientIds(Collections.singleton(thePatientId.getValue()));
+ return BulkExportJobParameters;
}
- private BulkDataExportOptions buildBulkDataExportOptions(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, List> theTypeFilter, IPrimitiveType theExportIdentifier, BulkDataExportOptions.ExportStyle theExportStyle, List> theTypePostFetchFilterUrl) {
+ private BulkExportJobParameters buildBulkExportJobParameters(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, List> theTypeFilter, IPrimitiveType theExportIdentifier, BulkExportJobParameters.ExportStyle theExportStyle, List> theTypePostFetchFilterUrl) {
String outputFormat = theOutputFormat != null ? theOutputFormat.getValueAsString() : Constants.CT_FHIR_NDJSON;
Set resourceTypes = null;
@@ -518,23 +510,23 @@ public class BulkDataExportProvider {
Set typeFilters = splitTypeFilters(theTypeFilter);
Set typePostFetchFilterUrls = splitTypeFilters(theTypePostFetchFilterUrl);
- BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions();
- bulkDataExportOptions.setFilters(typeFilters);
- bulkDataExportOptions.setPostFetchFilterUrls(typePostFetchFilterUrls);
- bulkDataExportOptions.setExportStyle(theExportStyle);
- bulkDataExportOptions.setExportIdentifier(exportIdentifier);
- bulkDataExportOptions.setSince(since);
- bulkDataExportOptions.setResourceTypes(resourceTypes);
- bulkDataExportOptions.setOutputFormat(outputFormat);
- return bulkDataExportOptions;
+ BulkExportJobParameters BulkExportJobParameters = new BulkExportJobParameters();
+ BulkExportJobParameters.setFilters(typeFilters);
+ BulkExportJobParameters.setPostFetchFilterUrls(typePostFetchFilterUrls);
+ BulkExportJobParameters.setExportStyle(theExportStyle);
+ BulkExportJobParameters.setExportIdentifier(exportIdentifier);
+ BulkExportJobParameters.setSince(since);
+ BulkExportJobParameters.setResourceTypes(resourceTypes);
+ BulkExportJobParameters.setOutputFormat(outputFormat);
+ return BulkExportJobParameters;
}
- public void writePollingLocationToResponseHeaders(ServletRequestDetails theRequestDetails, JobInfo theOutcome) {
+ public void writePollingLocationToResponseHeaders(ServletRequestDetails theRequestDetails, String theInstanceId) {
String serverBase = getServerBase(theRequestDetails);
if (serverBase == null) {
throw new InternalErrorException(Msg.code(2136) + "Unable to get the server base.");
}
- String pollLocation = serverBase + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + theOutcome.getJobMetadataId();
+ String pollLocation = serverBase + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + theInstanceId;
pollLocation = UrlUtil.sanitizeHeaderValue(pollLocation);
HttpServletResponse response = theRequestDetails.getServletResponse();
@@ -559,20 +551,12 @@ public class BulkDataExportProvider {
Arrays
.stream(typeFilterString.split(FARM_TO_TABLE_TYPE_FILTER_REGEX))
.filter(StringUtils::isNotBlank)
- .forEach(t -> retVal.add(t));
+ .forEach(retVal::add);
}
return retVal;
}
- public static void validatePreferAsyncHeader(ServletRequestDetails theRequestDetails, String theOperationName) {
- String preferHeader = theRequestDetails.getHeader(Constants.HEADER_PREFER);
- PreferHeader prefer = RestfulServerUtils.parsePreferHeader(null, preferHeader);
- if (!prefer.getRespondAsync()) {
- throw new InvalidRequestException(Msg.code(513) + "Must request async processing for " + theOperationName);
- }
- }
-
@VisibleForTesting
public void setStorageSettings(JpaStorageSettings theStorageSettings) {
myStorageSettings = theStorageSettings;
@@ -582,4 +566,12 @@ public class BulkDataExportProvider {
public void setDaoRegistry(DaoRegistry theDaoRegistry) {
myDaoRegistry = theDaoRegistry;
}
+
+ public static void validatePreferAsyncHeader(ServletRequestDetails theRequestDetails, String theOperationName) {
+ String preferHeader = theRequestDetails.getHeader(Constants.HEADER_PREFER);
+ PreferHeader prefer = RestfulServerUtils.parsePreferHeader(null, preferHeader);
+ if (!prefer.getRespondAsync()) {
+ throw new InvalidRequestException(Msg.code(513) + "Must request async processing for " + theOperationName);
+ }
+ }
}
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportAppCtx.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportAppCtx.java
index c41295a8b01..2c4b1a27fb8 100644
--- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportAppCtx.java
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportAppCtx.java
@@ -23,7 +23,7 @@ import ca.uhn.fhir.batch2.api.VoidModel;
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportBinaryFileId;
import ca.uhn.fhir.batch2.jobs.export.models.ExpandedResourcesList;
import ca.uhn.fhir.batch2.jobs.export.models.ResourceIdList;
-import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import ca.uhn.fhir.batch2.model.JobDefinition;
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
import ca.uhn.fhir.model.api.IModelJson;
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportCreateReportStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportCreateReportStep.java
index 629c4e3c6ed..aea4759ae84 100644
--- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportCreateReportStep.java
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportCreateReportStep.java
@@ -27,7 +27,7 @@ import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
import ca.uhn.fhir.batch2.api.RunOutcome;
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportBinaryFileId;
-import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import ca.uhn.fhir.batch2.model.ChunkOutcome;
import ca.uhn.fhir.batch2.model.JobInstance;
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidator.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidator.java
index e0114a6d70d..7c1ff82abe7 100644
--- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidator.java
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidator.java
@@ -20,15 +20,13 @@
package ca.uhn.fhir.batch2.jobs.export;
import ca.uhn.fhir.batch2.api.IJobParametersValidator;
-import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.binary.api.IBinaryStorageSvc;
-import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult;
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryResourceMatcher;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.RequestDetails;
-import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
@@ -83,7 +81,7 @@ public class BulkExportJobParametersValidator implements IJobParametersValidator
}
// validate for group
- BulkDataExportOptions.ExportStyle style = theParameters.getExportStyle();
+ BulkExportJobParameters.ExportStyle style = theParameters.getExportStyle();
if (style == null) {
errorMsgs.add("Export style is required");
}
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportUtil.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportUtil.java
deleted file mode 100644
index 96f8278d16d..00000000000
--- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportUtil.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*-
- * #%L
- * hapi-fhir-storage-batch2-jobs
- * %%
- * Copyright (C) 2014 - 2023 Smile CDR, Inc.
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-package ca.uhn.fhir.batch2.jobs.export;
-
-import ca.uhn.fhir.batch2.model.StatusEnum;
-import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
-import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
-import org.slf4j.Logger;
-
-import static org.slf4j.LoggerFactory.getLogger;
-
-public class BulkExportUtil {
- private static final Logger ourLog = getLogger(BulkExportUtil.class);
-
- private BulkExportUtil() {
-
- }
-
- /**
- * Converts Batch2 StatusEnum -> BulkExportJobStatusEnum
- */
- public static BulkExportJobStatusEnum fromBatchStatus(StatusEnum status) {
- switch (status) {
- case QUEUED:
- case FINALIZE:
- return BulkExportJobStatusEnum.SUBMITTED;
- case COMPLETED :
- return BulkExportJobStatusEnum.COMPLETE;
- case ERRORED:
- case IN_PROGRESS:
- return BulkExportJobStatusEnum.BUILDING;
- default:
- ourLog.warn("Unrecognized status {}; treating as FAILED/CANCELLED/ERRORED", status.name());
- case FAILED:
- case CANCELLED:
- return BulkExportJobStatusEnum.ERROR;
- }
- }
-}
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourcesStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourcesStep.java
index e1bd48781df..b0bf68a051c 100644
--- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourcesStep.java
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourcesStep.java
@@ -24,7 +24,10 @@ import ca.uhn.fhir.batch2.api.IJobStepWorker;
import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
import ca.uhn.fhir.batch2.api.RunOutcome;
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
-import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
+import ca.uhn.fhir.interceptor.api.HookParams;
+import ca.uhn.fhir.interceptor.api.Pointcut;
+import ca.uhn.fhir.interceptor.executor.InterceptorService;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import ca.uhn.fhir.batch2.jobs.export.models.ExpandedResourcesList;
import ca.uhn.fhir.batch2.jobs.export.models.ResourceIdList;
import ca.uhn.fhir.context.FhirContext;
@@ -90,6 +93,9 @@ public class ExpandResourcesStep implements IJobStepWorker t.substring(0, t.indexOf('?')).equals(resourceType))
.collect(Collectors.toList());
+
if (!postFetchFilterUrls.isEmpty()) {
applyPostFetchFiltering(allResources, postFetchFilterUrls, instanceId, chunkId);
}
@@ -132,6 +139,19 @@ public class ExpandResourcesStep implements IJobStepWorker iter = allResources.iterator(); iter.hasNext(); ) {
+ HookParams params = new HookParams()
+ .add(BulkExportJobParameters.class, theStepExecutionDetails.getParameters())
+ .add(IBaseResource.class, iter.next());
+ boolean outcome = myInterceptorService.callHooks(Pointcut.STORAGE_BULK_EXPORT_RESOURCE_INCLUSION, params);
+ if (!outcome) {
+ iter.remove();
+ }
+ }
+ }
+
// encode them - Key is resource type, Value is a collection of serialized resources of that type
ListMultimap resources = encodeToString(allResources, parameters);
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/FetchResourceIdsStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/FetchResourceIdsStep.java
index 66517ed5dc7..4e591a90cf8 100644
--- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/FetchResourceIdsStep.java
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/FetchResourceIdsStep.java
@@ -25,7 +25,7 @@ import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
import ca.uhn.fhir.batch2.api.RunOutcome;
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
import ca.uhn.fhir.batch2.api.VoidModel;
-import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import ca.uhn.fhir.batch2.jobs.export.models.ResourceIdList;
import ca.uhn.fhir.batch2.jobs.models.BatchResourceId;
import ca.uhn.fhir.i18n.Msg;
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/WriteBinaryStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/WriteBinaryStep.java
index bc7f85088c2..fd06677bc9a 100644
--- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/WriteBinaryStep.java
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/WriteBinaryStep.java
@@ -25,7 +25,7 @@ import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
import ca.uhn.fhir.batch2.api.RunOutcome;
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportBinaryFileId;
-import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import ca.uhn.fhir.batch2.jobs.export.models.ExpandedResourcesList;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.i18n.Msg;
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/BulkDataImportProvider.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/BulkDataImportProvider.java
index ac08549e993..9c886a1fa3d 100644
--- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/BulkDataImportProvider.java
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/BulkDataImportProvider.java
@@ -59,7 +59,7 @@ import java.util.Date;
import java.util.List;
import java.util.Optional;
-import static ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider.validatePreferAsyncHeader;
+import static ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider.validatePreferAsyncHeader;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
public class BulkDataImportProvider {
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/services/Batch2JobRunnerImpl.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/services/Batch2JobRunnerImpl.java
deleted file mode 100644
index 0515c23d08c..00000000000
--- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/services/Batch2JobRunnerImpl.java
+++ /dev/null
@@ -1,136 +0,0 @@
-/*-
- * #%L
- * hapi-fhir-storage-batch2-jobs
- * %%
- * Copyright (C) 2014 - 2023 Smile CDR, Inc.
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-package ca.uhn.fhir.batch2.jobs.services;
-
-import ca.uhn.fhir.batch2.api.IJobCoordinator;
-import ca.uhn.fhir.batch2.api.JobOperationResultJson;
-import ca.uhn.fhir.batch2.jobs.export.BulkExportUtil;
-import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
-import ca.uhn.fhir.batch2.model.JobInstance;
-import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
-import ca.uhn.fhir.context.FhirContext;
-import ca.uhn.fhir.i18n.Msg;
-import ca.uhn.fhir.jpa.api.model.Batch2JobInfo;
-import ca.uhn.fhir.jpa.api.model.Batch2JobOperationResult;
-import ca.uhn.fhir.jpa.api.model.BulkExportParameters;
-import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
-import ca.uhn.fhir.jpa.batch.models.Batch2BaseJobParameters;
-import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
-import ca.uhn.fhir.rest.api.server.RequestDetails;
-import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
-import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
-import org.slf4j.Logger;
-
-import javax.annotation.Nonnull;
-
-import static org.slf4j.LoggerFactory.getLogger;
-
-public class Batch2JobRunnerImpl implements IBatch2JobRunner {
- private static final Logger ourLog = getLogger(IBatch2JobRunner.class);
-
- private final IJobCoordinator myJobCoordinator;
-
- private final FhirContext myFhirContext;
-
- public Batch2JobRunnerImpl(IJobCoordinator theJobCoordinator, FhirContext theFhirContext) {
- myFhirContext = theFhirContext;
- myJobCoordinator = theJobCoordinator;
- }
-
- @Override
- public Batch2JobStartResponse startNewJob(RequestDetails theRequestDetails, Batch2BaseJobParameters theParameters) {
- switch (theParameters.getJobDefinitionId()) {
- case Batch2JobDefinitionConstants.BULK_EXPORT:
- if (theParameters instanceof BulkExportParameters) {
- return startBatch2BulkExportJob(theRequestDetails, (BulkExportParameters) theParameters);
- }
- else {
- ourLog.error("Invalid parameters for " + Batch2JobDefinitionConstants.BULK_EXPORT);
- }
- break;
- default:
- // Dear future devs - add your case above
- ourLog.error("Invalid JobDefinitionId " + theParameters.getJobDefinitionId());
- break;
- }
- return null;
- }
-
- @Override
- public Batch2JobInfo getJobInfo(String theJobId) {
- JobInstance instance = myJobCoordinator.getInstance(theJobId);
- if (instance == null) {
- throw new ResourceNotFoundException(Msg.code(2240) + " : " + theJobId);
- }
- return fromJobInstanceToBatch2JobInfo(instance);
- }
-
- @Override
- public Batch2JobOperationResult cancelInstance(String theJobId) throws ResourceNotFoundException {
- JobOperationResultJson cancelResult = myJobCoordinator.cancelInstance(theJobId);
- if (cancelResult == null) {
- throw new ResourceNotFoundException(Msg.code(2195) + " : " + theJobId);
- }
- return fromJobOperationResultToBatch2JobOperationResult(cancelResult);
- }
-
- private Batch2JobOperationResult fromJobOperationResultToBatch2JobOperationResult(@Nonnull JobOperationResultJson theResultJson) {
- Batch2JobOperationResult result = new Batch2JobOperationResult();
- result.setOperation(theResultJson.getOperation());
- result.setMessage(theResultJson.getMessage());
- result.setSuccess(theResultJson.getSuccess());
- return result;
- }
-
- private Batch2JobInfo fromJobInstanceToBatch2JobInfo(@Nonnull JobInstance theInstance) {
- Batch2JobInfo info = new Batch2JobInfo();
- info.setJobId(theInstance.getInstanceId());
- // should convert this to a more generic enum for all batch2 (which is what it seems like)
- // or use the status enum only (combine with bulk export enum)
- // on the Batch2JobInfo
- info.setStatus(BulkExportUtil.fromBatchStatus(theInstance.getStatus()));
- info.setCancelled(theInstance.isCancelled());
- info.setStartTime(theInstance.getStartTime());
- info.setEndTime(theInstance.getEndTime());
- info.setReport(theInstance.getReport());
- info.setErrorMsg(theInstance.getErrorMessage());
- info.setCombinedRecordsProcessed(theInstance.getCombinedRecordsProcessed());
- if ( Batch2JobDefinitionConstants.BULK_EXPORT.equals(theInstance.getJobDefinitionId())) {
- BulkExportJobParameters parameters = theInstance.getParameters(BulkExportJobParameters.class);
- info.setRequestPartitionId(parameters.getPartitionId());
- }
-
- return info;
- }
-
- private Batch2JobStartResponse startBatch2BulkExportJob(RequestDetails theRequestDetails, BulkExportParameters theParameters) {
- JobInstanceStartRequest request = createStartRequest(theParameters);
- BulkExportJobParameters parameters = BulkExportJobParameters.createFromExportJobParameters(theParameters);
- request.setParameters(parameters);
- return myJobCoordinator.startInstance(theRequestDetails, request);
- }
-
- private JobInstanceStartRequest createStartRequest(Batch2BaseJobParameters theParameters) {
- JobInstanceStartRequest request = new JobInstanceStartRequest();
- request.setJobDefinitionId(theParameters.getJobDefinitionId());
- request.setUseCache(theParameters.isUseExistingJobsFirst());
- return request;
- }
-}
diff --git a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidatorTest.java b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidatorTest.java
index f1bc7089edc..cc8b8dc2758 100644
--- a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidatorTest.java
+++ b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidatorTest.java
@@ -1,10 +1,9 @@
package ca.uhn.fhir.batch2.jobs.export;
-import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.binary.api.IBinaryStorageSvc;
import ca.uhn.fhir.rest.api.Constants;
-import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
@@ -39,7 +38,7 @@ public class BulkExportJobParametersValidatorTest {
BulkExportJobParameters parameters = new BulkExportJobParameters();
parameters.setResourceTypes(Arrays.asList("Patient", "Observation"));
parameters.setOutputFormat(Constants.CT_FHIR_NDJSON);
- parameters.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
+ parameters.setExportStyle(BulkExportJobParameters.ExportStyle.SYSTEM);
return parameters;
}
@@ -96,7 +95,7 @@ public class BulkExportJobParametersValidatorTest {
public void validate_validParametersForPatient_returnsEmptyList() {
// setup
BulkExportJobParameters parameters = createSystemExportParameters();
- parameters.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
+ parameters.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
// when
when(myDaoRegistry.isResourceTypeSupported(anyString()))
@@ -115,7 +114,7 @@ public class BulkExportJobParametersValidatorTest {
// setup
String resourceType = "notValid";
BulkExportJobParameters parameters = createSystemExportParameters();
- parameters.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
+ parameters.setExportStyle(BulkExportJobParameters.ExportStyle.SYSTEM);
parameters.setResourceTypes(Collections.singletonList(resourceType));
// test
@@ -131,7 +130,7 @@ public class BulkExportJobParametersValidatorTest {
public void validate_validateParametersForGroup_returnsEmptyList() {
// setup
BulkExportJobParameters parameters = createSystemExportParameters();
- parameters.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
+ parameters.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
parameters.setGroupId("groupId");
parameters.setExpandMdm(true);
@@ -151,7 +150,7 @@ public class BulkExportJobParametersValidatorTest {
public void validate_groupParametersWithoutGroupId_returnsError() {
// setup
BulkExportJobParameters parameters = createSystemExportParameters();
- parameters.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
+ parameters.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
// test
List result = myValidator.validate(null, parameters);
diff --git a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourcesStepTest.java b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourcesStepTest.java
index 0fccbe875eb..194a694d934 100644
--- a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourcesStepTest.java
+++ b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourcesStepTest.java
@@ -4,7 +4,8 @@ package ca.uhn.fhir.batch2.jobs.export;
import ca.uhn.fhir.batch2.api.IJobDataSink;
import ca.uhn.fhir.batch2.api.RunOutcome;
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
-import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
+import ca.uhn.fhir.interceptor.executor.InterceptorService;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import ca.uhn.fhir.batch2.jobs.export.models.ExpandedResourcesList;
import ca.uhn.fhir.batch2.jobs.export.models.ResourceIdList;
import ca.uhn.fhir.batch2.jobs.models.BatchResourceId;
@@ -21,7 +22,6 @@ import ca.uhn.fhir.jpa.dao.tx.NonTransactionalHapiTransactionService;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
-import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
import ca.uhn.fhir.rest.server.SimpleBundleProvider;
import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationSvc;
@@ -67,6 +67,9 @@ public class ExpandResourcesStepTest {
@Mock
IIdHelperService myIdHelperService;
+ @Spy
+ private InterceptorService myInterceptorService = new InterceptorService();
+
@Spy
private FhirContext myFhirContext = FhirContext.forR4Cached();
@@ -82,7 +85,7 @@ public class ExpandResourcesStepTest {
private BulkExportJobParameters createParameters(boolean thePartitioned) {
BulkExportJobParameters parameters = new BulkExportJobParameters();
parameters.setResourceTypes(Arrays.asList("Patient", "Observation"));
- parameters.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
+ parameters.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
parameters.setOutputFormat("json");
parameters.setSince(new Date());
if (thePartitioned) {
diff --git a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/FetchResourceIdsStepTest.java b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/FetchResourceIdsStepTest.java
index 33408c335e0..fdf4bc2f69b 100644
--- a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/FetchResourceIdsStepTest.java
+++ b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/FetchResourceIdsStepTest.java
@@ -4,7 +4,7 @@ import ca.uhn.fhir.batch2.api.IJobDataSink;
import ca.uhn.fhir.batch2.api.RunOutcome;
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
import ca.uhn.fhir.batch2.api.VoidModel;
-import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import ca.uhn.fhir.batch2.jobs.export.models.ResourceIdList;
import ca.uhn.fhir.batch2.jobs.models.BatchResourceId;
import ca.uhn.fhir.batch2.model.JobInstance;
@@ -13,7 +13,6 @@ import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkExportProcessor;
import ca.uhn.fhir.jpa.bulk.export.model.ExportPIDIteratorParameters;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
-import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import ch.qos.logback.classic.Level;
import ch.qos.logback.classic.Logger;
import ch.qos.logback.classic.spi.ILoggingEvent;
@@ -77,7 +76,7 @@ public class FetchResourceIdsStepTest {
BulkExportJobParameters jobParameters = new BulkExportJobParameters();
jobParameters.setSince(new Date());
jobParameters.setOutputFormat("json");
- jobParameters.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
+ jobParameters.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
jobParameters.setResourceTypes(Arrays.asList("Patient", "Observation"));
if (thePartitioned) {
jobParameters.setPartitionId(RequestPartitionId.fromPartitionName("Partition-A"));
diff --git a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/WriteBinaryStepTest.java b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/WriteBinaryStepTest.java
index d25126ac84a..5d697002cb8 100644
--- a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/WriteBinaryStepTest.java
+++ b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/WriteBinaryStepTest.java
@@ -6,7 +6,7 @@ import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
import ca.uhn.fhir.batch2.api.RunOutcome;
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportBinaryFileId;
-import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import ca.uhn.fhir.batch2.jobs.export.models.ExpandedResourcesList;
import ca.uhn.fhir.batch2.model.JobInstance;
import ca.uhn.fhir.context.FhirContext;
diff --git a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/services/Batch2JobRunnerImplTest.java b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/services/Batch2JobRunnerImplTest.java
deleted file mode 100644
index c9749a38167..00000000000
--- a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/services/Batch2JobRunnerImplTest.java
+++ /dev/null
@@ -1,198 +0,0 @@
-package ca.uhn.fhir.batch2.jobs.services;
-
-import ca.uhn.fhir.batch2.api.IJobCoordinator;
-import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
-import ca.uhn.fhir.batch2.model.JobInstance;
-import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
-import ca.uhn.fhir.batch2.model.StatusEnum;
-import ca.uhn.fhir.interceptor.model.RequestPartitionId;
-import ca.uhn.fhir.jpa.api.model.Batch2JobInfo;
-import ca.uhn.fhir.jpa.api.model.BulkExportParameters;
-import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
-import ca.uhn.fhir.jpa.batch.models.Batch2BaseJobParameters;
-import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
-import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
-import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
-import ch.qos.logback.classic.Level;
-import ch.qos.logback.classic.Logger;
-import ch.qos.logback.classic.spi.ILoggingEvent;
-import ch.qos.logback.core.read.ListAppender;
-import org.junit.jupiter.api.AfterEach;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.api.extension.ExtendWith;
-import org.mockito.ArgumentCaptor;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-import org.mockito.junit.jupiter.MockitoExtension;
-import org.slf4j.LoggerFactory;
-
-import java.util.Collections;
-import java.util.Date;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertFalse;
-import static org.junit.jupiter.api.Assertions.assertNull;
-import static org.junit.jupiter.api.Assertions.assertTrue;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.ArgumentMatchers.eq;
-import static org.mockito.ArgumentMatchers.isNotNull;
-import static org.mockito.Mockito.never;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-
-@ExtendWith(MockitoExtension.class)
-public class Batch2JobRunnerImplTest {
-
- private static final Logger ourLog = (Logger) LoggerFactory.getLogger(IBatch2JobRunner.class);
-
- @Mock
- private ListAppender myAppender;
-
- @Mock
- private IJobCoordinator myJobCoordinator;
-
- @InjectMocks
- private Batch2JobRunnerImpl myJobRunner;
-
- @BeforeEach
- public void init() {
- ourLog.addAppender(myAppender);
- }
-
- @AfterEach
- public void end() {
- ourLog.detachAppender(myAppender);
- }
-
- @Test
- public void startJob_invalidJobDefinitionId_logsAndDoesNothing() {
- // setup
- String jobId = "invalid";
- ourLog.setLevel(Level.ERROR);
-
- // test
- myJobRunner.startNewJob(new SystemRequestDetails(), new Batch2BaseJobParameters(jobId));
-
- // verify
- ArgumentCaptor captor = ArgumentCaptor.forClass(ILoggingEvent.class);
- verify(myAppender).doAppend(captor.capture());
- assertTrue(captor.getValue().getMessage()
- .contains("Invalid JobDefinitionId " + jobId));
- verify(myJobCoordinator, never())
- .startInstance(any(JobInstanceStartRequest.class));
- }
-
- @Test
- public void startJob_invalidParametersForExport_logsAndDoesNothing() {
- // setup
- ourLog.setLevel(Level.ERROR);
-
- // test
- myJobRunner.startNewJob(new SystemRequestDetails(), new Batch2BaseJobParameters(Batch2JobDefinitionConstants.BULK_EXPORT));
-
- // verify
- ArgumentCaptor captor = ArgumentCaptor.forClass(ILoggingEvent.class);
- verify(myAppender).doAppend(captor.capture());
- String msg = captor.getValue().getMessage();
- String expectedMsg = "Invalid parameters for " + Batch2JobDefinitionConstants.BULK_EXPORT;
- assertTrue(msg
- .contains(expectedMsg),
- msg + " != " + expectedMsg);
- verify(myJobCoordinator, never())
- .startInstance(any(JobInstanceStartRequest.class));
- }
-
- @Test
- public void startJob_bulkExport_callsAsExpected() {
- // setup
- BulkExportParameters parameters = new BulkExportParameters(Batch2JobDefinitionConstants.BULK_EXPORT);
- parameters.setResourceTypes(Collections.singletonList("Patient"));
-
- // when
- String jobInstanceId = "test_job_instance";
- Date end = new Date();
- Date start = new Date(end.getTime()-100);
- JobInstance mockJobInstance = createMockJobInstance(parameters, jobInstanceId, start, end);
- when(myJobCoordinator.getInstance(eq(jobInstanceId))).thenReturn(mockJobInstance);
-
- // test
- myJobRunner.startNewJob(new SystemRequestDetails(), parameters);
-
- // verify
- ArgumentCaptor captor = ArgumentCaptor.forClass(JobInstanceStartRequest.class);
- verify(myJobCoordinator)
- .startInstance(isNotNull(), captor.capture());
- JobInstanceStartRequest val = captor.getValue();
- // we need to verify something in the parameters
- ourLog.info(val.getParameters());
- assertTrue(val.getParameters().contains("Patient"));
- assertFalse(val.getParameters().contains("allPartitions"));
- assertFalse(val.getParameters().contains("Partition-A"));
-
- Batch2JobInfo jobInfo = myJobRunner.getJobInfo(jobInstanceId);
- verifyBatch2JobInfo(jobInfo, jobInstanceId, start, end, null);
- }
-
- private JobInstance createMockJobInstance(BulkExportParameters theParameters, String theJobInstanceId, Date start, Date end) {
- JobInstance mockJobInstance = new JobInstance();
- mockJobInstance.setInstanceId(theJobInstanceId);
- mockJobInstance.setStatus(StatusEnum.COMPLETED);
- mockJobInstance.setCancelled(false);
- mockJobInstance.setStartTime(start);
- mockJobInstance.setEndTime(end);
- mockJobInstance.setReport("test report");
- mockJobInstance.setJobDefinitionId(Batch2JobDefinitionConstants.BULK_EXPORT);
- mockJobInstance.setParameters(BulkExportJobParameters.createFromExportJobParameters(theParameters));
- return mockJobInstance;
- }
-
- private void verifyBatch2JobInfo(Batch2JobInfo jobInfo, String theJobId, Date start, Date end, RequestPartitionId partitionId) {
- assertEquals(jobInfo.getJobId(), theJobId );
- assertFalse(jobInfo.isCancelled());
- assertEquals(jobInfo.getStartTime(), start);
- assertEquals(jobInfo.getEndTime(), end);
- assertEquals(jobInfo.getReport(), "test report");
- assertEquals(jobInfo.getStatus(), BulkExportJobStatusEnum.COMPLETE);
- if (partitionId != null) {
- assertEquals(jobInfo.getRequestPartitionId(), partitionId);
- } else {
- assertNull(jobInfo.getRequestPartitionId());
- }
- }
-
- @Test
- public void startJob_bulkExport_partitioned() {
- // setup
- BulkExportParameters parameters = new BulkExportParameters(Batch2JobDefinitionConstants.BULK_EXPORT);
- parameters.setResourceTypes(Collections.singletonList("Patient"));
- RequestPartitionId partitionId = RequestPartitionId.fromPartitionName("Partition-A");
- parameters.setPartitionId(partitionId);
-
- // when
- String jobInstanceId = "test_job_instance";
- Date end = new Date();
- Date start = new Date(end.getTime()-100);
- JobInstance mockJobInstance = createMockJobInstance(parameters, jobInstanceId, start, end);
- when(myJobCoordinator.getInstance(eq(jobInstanceId))).thenReturn(mockJobInstance);
-
- // test
- myJobRunner.startNewJob(new SystemRequestDetails(), parameters);
-
- // verify
- ArgumentCaptor captor = ArgumentCaptor.forClass(JobInstanceStartRequest.class);
- verify(myJobCoordinator)
- .startInstance(isNotNull(), captor.capture());
- JobInstanceStartRequest val = captor.getValue();
- // we need to verify something in the parameters
- ourLog.info(val.getParameters());
- assertTrue(val.getParameters().contains("Patient"));
- assertTrue(val.getParameters().contains("Partition-A"));
- assertTrue(val.getParameters().contains("\"allPartitions\":false"));
-
- Batch2JobInfo jobInfo = myJobRunner.getJobInfo(jobInstanceId);
- verifyBatch2JobInfo(jobInfo, jobInstanceId, start, end, partitionId);
-
- }
-
-}
diff --git a/hapi-fhir-storage-batch2-test-utilities/pom.xml b/hapi-fhir-storage-batch2-test-utilities/pom.xml
index 503e5115921..7dd205ad68d 100644
--- a/hapi-fhir-storage-batch2-test-utilities/pom.xml
+++ b/hapi-fhir-storage-batch2-test-utilities/pom.xml
@@ -7,7 +7,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-storage-batch2/pom.xml b/hapi-fhir-storage-batch2/pom.xml
index 83c24b0fd38..8ae1a897b88 100644
--- a/hapi-fhir-storage-batch2/pom.xml
+++ b/hapi-fhir-storage-batch2/pom.xml
@@ -7,7 +7,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobCoordinator.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobCoordinator.java
index 1a4dc066b2c..a9246b75f90 100644
--- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobCoordinator.java
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobCoordinator.java
@@ -29,6 +29,7 @@ import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import org.springframework.data.domain.Page;
+import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.List;
import java.util.Set;
@@ -67,6 +68,7 @@ public interface IJobCoordinator {
* @return Returns the current instance details
* @throws ResourceNotFoundException If the instance ID can not be found
*/
+ @Nonnull
JobInstance getInstance(String theInstanceId) throws ResourceNotFoundException;
/**
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobCoordinatorImpl.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobCoordinatorImpl.java
index b0b47e5d9b8..3909c70ce0f 100644
--- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobCoordinatorImpl.java
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobCoordinatorImpl.java
@@ -96,6 +96,8 @@ public class JobCoordinatorImpl implements IJobCoordinator {
if (isBlank(paramsString)) {
throw new InvalidRequestException(Msg.code(2065) + "No parameters supplied");
}
+ Validate.notBlank(theStartRequest.getJobDefinitionId(), "No job definition ID supplied in start request");
+
// if cache - use that first
if (theStartRequest.isUseCache()) {
FetchJobInstancesRequest request = new FetchJobInstancesRequest(theStartRequest.getJobDefinitionId(), theStartRequest.getParameters(), getStatesThatTriggerCache());
@@ -142,6 +144,7 @@ public class JobCoordinatorImpl implements IJobCoordinator {
}
@Override
+ @Nonnull
public JobInstance getInstance(String theInstanceId) {
return myJobQuerySvc.fetchInstance(theInstanceId);
}
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobQuerySvc.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobQuerySvc.java
index 617ae373d2a..58f8b065573 100644
--- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobQuerySvc.java
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobQuerySvc.java
@@ -53,6 +53,7 @@ class JobQuerySvc {
myJobDefinitionRegistry = theJobDefinitionRegistry;
}
+ @Nonnull
JobInstance fetchInstance(String theInstanceId) {
return myJobPersistence.fetchInstance(theInstanceId)
.map(this::massageInstanceForUserAccess)
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/ChunkRangeJson.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/ChunkRangeJson.java
index 67dbe566459..c2b24b07497 100644
--- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/ChunkRangeJson.java
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/ChunkRangeJson.java
@@ -19,8 +19,8 @@
*/
package ca.uhn.fhir.batch2.jobs.chunk;
-import ca.uhn.fhir.jpa.util.JsonDateDeserializer;
-import ca.uhn.fhir.jpa.util.JsonDateSerializer;
+import ca.uhn.fhir.rest.server.util.JsonDateDeserializer;
+import ca.uhn.fhir.rest.server.util.JsonDateSerializer;
import ca.uhn.fhir.model.api.IModelJson;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobInstance.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobInstance.java
index e339cea80e1..9b7d8207210 100644
--- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobInstance.java
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobInstance.java
@@ -20,8 +20,8 @@
package ca.uhn.fhir.batch2.model;
import ca.uhn.fhir.batch2.api.IJobInstance;
-import ca.uhn.fhir.jpa.util.JsonDateDeserializer;
-import ca.uhn.fhir.jpa.util.JsonDateSerializer;
+import ca.uhn.fhir.rest.server.util.JsonDateDeserializer;
+import ca.uhn.fhir.rest.server.util.JsonDateSerializer;
import ca.uhn.fhir.model.api.IModelJson;
import ca.uhn.fhir.util.JsonUtil;
import ca.uhn.fhir.util.Logs;
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobInstanceStartRequest.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobInstanceStartRequest.java
index 6c8427b749f..51d1f66e663 100644
--- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobInstanceStartRequest.java
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobInstanceStartRequest.java
@@ -20,6 +20,7 @@
package ca.uhn.fhir.batch2.model;
import ca.uhn.fhir.model.api.IModelJson;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import ca.uhn.fhir.util.JsonUtil;
import com.fasterxml.jackson.annotation.JsonProperty;
@@ -56,6 +57,16 @@ public class JobInstanceStartRequest implements IModelJson {
setParameters(theJobInstance.getParameters());
}
+ /**
+ * Constructor
+ *
+ * @since 6.8.0
+ */
+ public JobInstanceStartRequest(String theJobDefinitionId, IModelJson theParameters) {
+ setJobDefinitionId(theJobDefinitionId);
+ setParameters(theParameters);
+ }
+
public String getJobDefinitionId() {
return myJobDefinitionId;
}
diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunk.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunk.java
index 0d888ad1fb6..5c26d391976 100644
--- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunk.java
+++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunk.java
@@ -19,8 +19,8 @@
*/
package ca.uhn.fhir.batch2.model;
-import ca.uhn.fhir.jpa.util.JsonDateDeserializer;
-import ca.uhn.fhir.jpa.util.JsonDateSerializer;
+import ca.uhn.fhir.rest.server.util.JsonDateDeserializer;
+import ca.uhn.fhir.rest.server.util.JsonDateSerializer;
import ca.uhn.fhir.model.api.IModelJson;
import ca.uhn.fhir.util.JsonUtil;
import com.fasterxml.jackson.annotation.JsonProperty;
diff --git a/hapi-fhir-storage-cr/pom.xml b/hapi-fhir-storage-cr/pom.xml
index b17fa0af2e7..06af52b822d 100644
--- a/hapi-fhir-storage-cr/pom.xml
+++ b/hapi-fhir-storage-cr/pom.xml
@@ -7,7 +7,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-storage-mdm/pom.xml b/hapi-fhir-storage-mdm/pom.xml
index f5ff33599ec..91e5fafe815 100644
--- a/hapi-fhir-storage-mdm/pom.xml
+++ b/hapi-fhir-storage-mdm/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-storage-test-utilities/pom.xml b/hapi-fhir-storage-test-utilities/pom.xml
index fcffe2a6823..489a3b70706 100644
--- a/hapi-fhir-storage-test-utilities/pom.xml
+++ b/hapi-fhir-storage-test-utilities/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-storage/pom.xml b/hapi-fhir-storage/pom.xml
index 153fdd00580..321ccc0061a 100644
--- a/hapi-fhir-storage/pom.xml
+++ b/hapi-fhir-storage/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/Batch2JobInfo.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/Batch2JobInfo.java
deleted file mode 100644
index 945e24c9564..00000000000
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/Batch2JobInfo.java
+++ /dev/null
@@ -1,125 +0,0 @@
-/*-
- * #%L
- * HAPI FHIR Storage api
- * %%
- * Copyright (C) 2014 - 2023 Smile CDR, Inc.
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-package ca.uhn.fhir.jpa.api.model;
-
-import ca.uhn.fhir.interceptor.model.RequestPartitionId;
-import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
-
-import java.util.Date;
-
-public class Batch2JobInfo {
-
- // job instance id
- private String myJobId;
-
- // job status - see StatusEnum
- private BulkExportJobStatusEnum myStatus;
-
- // cancelled boolean
- private boolean myCancelled;
-
- // start time
- private Date myStartTime;
-
- // end time
- private Date myEndTime;
-
- // error message
- private String myErrorMsg;
-
- // the output report (stringified json of whatever the reduction step outputs)
- private String myReport;
-
- private RequestPartitionId myRequestPartitionId;
- private Integer myCombinedRecordsProcessed;
-
- public String getJobId() {
- return myJobId;
- }
-
- public void setJobId(String theJobId) {
- myJobId = theJobId;
- }
-
- public BulkExportJobStatusEnum getStatus() {
- return myStatus;
- }
-
- public void setStatus(BulkExportJobStatusEnum theStatus) {
- myStatus = theStatus;
- }
-
- public boolean isCancelled() {
- return myCancelled;
- }
-
- public void setCancelled(boolean theCancelled) {
- myCancelled = theCancelled;
- }
-
- public Date getStartTime() {
- return myStartTime;
- }
-
- public void setStartTime(Date theStartTime) {
- myStartTime = theStartTime;
- }
-
- public Date getEndTime() {
- return myEndTime;
- }
-
- public void setEndTime(Date theEndTime) {
- myEndTime = theEndTime;
- }
-
- public String getReport() {
- return myReport;
- }
-
- public void setReport(String theReport) {
- myReport = theReport;
- }
-
- public String getErrorMsg() {
- return myErrorMsg;
- }
-
- public void setErrorMsg(String theErrorMsg) {
- myErrorMsg = theErrorMsg;
- }
-
- public RequestPartitionId getRequestPartitionId() {
- return myRequestPartitionId;
- }
-
- public void setRequestPartitionId(RequestPartitionId theRequestPartitionId) {
- myRequestPartitionId = theRequestPartitionId;
- }
-
- public Integer getCombinedRecordsProcessed() {
- return myCombinedRecordsProcessed;
- }
-
- public void setCombinedRecordsProcessed(Integer theCombinedRecordsProcessed) {
- myCombinedRecordsProcessed = theCombinedRecordsProcessed;
- }
-
-}
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/BulkExportParameters.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/BulkExportParameters.java
deleted file mode 100644
index 75534ddf094..00000000000
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/BulkExportParameters.java
+++ /dev/null
@@ -1,201 +0,0 @@
-/*-
- * #%L
- * HAPI FHIR Storage api
- * %%
- * Copyright (C) 2014 - 2023 Smile CDR, Inc.
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-package ca.uhn.fhir.jpa.api.model;
-
-import ca.uhn.fhir.interceptor.model.RequestPartitionId;
-import ca.uhn.fhir.jpa.batch.models.Batch2BaseJobParameters;
-import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
-
-import javax.annotation.Nonnull;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-
-public class BulkExportParameters extends Batch2BaseJobParameters {
-
- /**
- * List of resource types to export.
- */
- private List myResourceTypes;
-
- /**
- * The earliest date from which to export resources.
- */
- private Date mySince;
-
- /**
- * Filters are used to narrow down the resources to export.
- * Eg:
- * Patient/123?group=a
- * "group=a" is a filter
- */
- private List myFilters;
-
- /**
- * URLs to be applied by the inMemoryMatcher after the SQL select
- */
- private List myPostFetchFilterUrls;
-
- /**
- * Export style - Patient, Group or Everything
- */
- private BulkDataExportOptions.ExportStyle myExportStyle;
-
- /**
- * Group id
- */
- private String myGroupId;
-
- /**
- * Output format.
- * Currently unsupported (all outputs are ndjson)
- */
- private String myOutputFormat;
-
- /**
- * For group export;
- * whether or not to expand mdm
- */
- private boolean myExpandMdm;
-
-
- /**
- * Patient id(s)
- */
- private List myPatientIds;
-
- /**
- * The request which originated the request.
- */
- private String myOriginalRequestUrl;
- private String myExportIdentifier;
-
-
-
- /**
- * The partition for the request if applicable.
- */
- private RequestPartitionId myPartitionId;
-
- public boolean isExpandMdm() {
- return myExpandMdm;
- }
-
- public void setExpandMdm(boolean theExpandMdm) {
- myExpandMdm = theExpandMdm;
- }
-
- public BulkExportParameters(@Nonnull String theJobDefinitionId) {
- super(theJobDefinitionId);
- }
-
- public List getResourceTypes() {
- return myResourceTypes;
- }
-
- public void setExportIdentifier(String theExportIdentifier) {
- myExportIdentifier = theExportIdentifier;
- }
- public String getExportIdentifier() {
- return myExportIdentifier;
- }
-
- public void setResourceTypes(List theResourceTypes) {
- myResourceTypes = theResourceTypes;
- }
-
- public Date getSince() {
- return mySince;
- }
-
- public void setSince(Date theSince) {
- mySince = theSince;
- }
-
- public List getFilters() {
- if (myFilters == null) {
- myFilters = new ArrayList<>();
- }
- return myFilters;
- }
-
- public void setFilters(List theFilters) {
- myFilters = theFilters;
- }
-
- public List getPostFetchFilterUrls() {
- if (myPostFetchFilterUrls == null) {
- myPostFetchFilterUrls = new ArrayList<>();
- }
- return myPostFetchFilterUrls;
- }
-
- public void setPostFetchFilterUrls(List thePostFetchFilterUrls) {
- myPostFetchFilterUrls = thePostFetchFilterUrls;
- }
-
- public BulkDataExportOptions.ExportStyle getExportStyle() {
- return myExportStyle;
- }
-
- public void setExportStyle(BulkDataExportOptions.ExportStyle theExportStyle) {
- myExportStyle = theExportStyle;
- }
-
- public String getGroupId() {
- return myGroupId;
- }
-
- public void setGroupId(String theGroupId) {
- myGroupId = theGroupId;
- }
-
- public String getOutputFormat() {
- return myOutputFormat;
- }
-
- public void setOutputFormat(String theOutputFormat) {
- myOutputFormat = theOutputFormat;
- }
-
- public List getPatientIds() {
- return myPatientIds;
- }
-
- public void setPatientIds(List thePatientIds) {
- myPatientIds = thePatientIds;
- }
-
- public String getOriginalRequestUrl() {
- return myOriginalRequestUrl;
- }
-
- public void setOriginalRequestUrl(String theOriginalRequestUrl) {
- myOriginalRequestUrl = theOriginalRequestUrl;
- }
-
- public RequestPartitionId getPartitionId() {
- return myPartitionId;
- }
-
- public void setPartitionId(RequestPartitionId thePartitionId) {
- myPartitionId = thePartitionId;
- }
-}
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IBatch2JobRunner.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IBatch2JobRunner.java
deleted file mode 100644
index 900b852abc5..00000000000
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IBatch2JobRunner.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*-
- * #%L
- * HAPI FHIR Storage api
- * %%
- * Copyright (C) 2014 - 2023 Smile CDR, Inc.
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-package ca.uhn.fhir.jpa.api.svc;
-
-import ca.uhn.fhir.jpa.api.model.Batch2JobInfo;
-import ca.uhn.fhir.jpa.api.model.Batch2JobOperationResult;
-import ca.uhn.fhir.jpa.batch.models.Batch2BaseJobParameters;
-import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
-import ca.uhn.fhir.rest.api.server.RequestDetails;
-
-public interface IBatch2JobRunner {
-
- /**
- * Start the job with the given parameters
- * @param theParameters
- * @return returns the job id
- */
- Batch2JobStartResponse startNewJob(RequestDetails theRequestDetails, Batch2BaseJobParameters theParameters);
-
- /**
- * Returns information about a provided job.
- *
- * @param theJobId - the job id
- * @return - the batch2 job info
- */
- Batch2JobInfo getJobInfo(String theJobId);
-
- /**
- * Cancels the job provided
- */
- Batch2JobOperationResult cancelInstance(String theJobId);
-}
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/StoredDetails.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/StoredDetails.java
index 7a082434d04..e760237d5cf 100644
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/StoredDetails.java
+++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/StoredDetails.java
@@ -19,8 +19,8 @@
*/
package ca.uhn.fhir.jpa.binary.api;
-import ca.uhn.fhir.jpa.util.JsonDateDeserializer;
-import ca.uhn.fhir.jpa.util.JsonDateSerializer;
+import ca.uhn.fhir.rest.server.util.JsonDateDeserializer;
+import ca.uhn.fhir.rest.server.util.JsonDateSerializer;
import ca.uhn.fhir.model.api.IModelJson;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/BulkExportJobStatusEnum.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/BulkExportJobStatusEnum.java
deleted file mode 100644
index d5c7a4375bd..00000000000
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/BulkExportJobStatusEnum.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*-
- * #%L
- * HAPI FHIR Storage api
- * %%
- * Copyright (C) 2014 - 2023 Smile CDR, Inc.
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-package ca.uhn.fhir.jpa.bulk.export.model;
-
-import com.fasterxml.jackson.annotation.JsonFormat;
-
-/**
- * This may be in a "jpa" package, but it is not a jpa model!
- */
-@JsonFormat(shape = JsonFormat.Shape.STRING)
-public enum BulkExportJobStatusEnum {
-
- /**
- * Sorting OK!
- */
-
- SUBMITTED,
- BUILDING,
- COMPLETE,
- ERROR;
-
-}
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/BulkExportResponseJson.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/BulkExportResponseJson.java
index e22fee79be7..edb6022cb67 100644
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/BulkExportResponseJson.java
+++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/BulkExportResponseJson.java
@@ -20,8 +20,8 @@
package ca.uhn.fhir.jpa.bulk.export.model;
-import ca.uhn.fhir.jpa.util.JsonDateDeserializer;
-import ca.uhn.fhir.jpa.util.JsonDateSerializer;
+import ca.uhn.fhir.rest.server.util.JsonDateDeserializer;
+import ca.uhn.fhir.rest.server.util.JsonDateSerializer;
import ca.uhn.fhir.model.api.IModelJson;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonInclude;
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/ExportPIDIteratorParameters.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/ExportPIDIteratorParameters.java
index 20a2335384d..a797086fca9 100644
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/ExportPIDIteratorParameters.java
+++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/ExportPIDIteratorParameters.java
@@ -20,7 +20,7 @@
package ca.uhn.fhir.jpa.bulk.export.model;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
-import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
@@ -59,7 +59,7 @@ public class ExportPIDIteratorParameters {
/**
* The export style
*/
- private BulkDataExportOptions.ExportStyle myExportStyle;
+ private BulkExportJobParameters.ExportStyle myExportStyle;
/**
* the group id
*/
@@ -123,11 +123,11 @@ public class ExportPIDIteratorParameters {
myInstanceId = theInstanceId;
}
- public BulkDataExportOptions.ExportStyle getExportStyle() {
+ public BulkExportJobParameters.ExportStyle getExportStyle() {
return myExportStyle;
}
- public void setExportStyle(BulkDataExportOptions.ExportStyle theExportStyle) {
+ public void setExportStyle(BulkExportJobParameters.ExportStyle theExportStyle) {
myExportStyle = theExportStyle;
}
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/provider/JobInfo.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/provider/JobInfo.java
deleted file mode 100644
index bd6d51f6e50..00000000000
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/provider/JobInfo.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*-
- * #%L
- * HAPI FHIR Storage api
- * %%
- * Copyright (C) 2014 - 2023 Smile CDR, Inc.
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-package ca.uhn.fhir.jpa.bulk.export.provider;
-
-import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
-
-public class JobInfo {
- /**
- * Legacy - but this jobId is not the job id
- * but the actual id of the record storing metadata of the job
- */
- private String myJobMetadataId;
- private BulkExportJobStatusEnum myStatus;
-
- public String getJobMetadataId() {
- return myJobMetadataId;
- }
-
- public JobInfo setJobMetadataId(String theJobId) {
- myJobMetadataId = theJobId;
- return this;
- }
-
- public BulkExportJobStatusEnum getStatus() {
- return myStatus;
- }
-
- public JobInfo setStatus(BulkExportJobStatusEnum theStatus) {
- myStatus = theStatus;
- return this;
- }
-}
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/BulkExportUtils.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/BulkExportUtils.java
deleted file mode 100644
index f6280b7efb5..00000000000
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/BulkExportUtils.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*-
- * #%L
- * HAPI FHIR Storage api
- * %%
- * Copyright (C) 2014 - 2023 Smile CDR, Inc.
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-package ca.uhn.fhir.jpa.util;
-
-import ca.uhn.fhir.jpa.api.model.BulkExportParameters;
-import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
-import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
-import org.apache.commons.collections4.CollectionUtils;
-import org.hl7.fhir.instance.model.api.IIdType;
-
-import java.util.ArrayList;
-import java.util.stream.Collectors;
-
-public class BulkExportUtils {
- private BulkExportUtils() {}
-
- /**
- * Converts the BulkDataExportOptions -> BulkExportParameters
- */
- public static BulkExportParameters createBulkExportJobParametersFromExportOptions(BulkDataExportOptions theOptions) {
- BulkExportParameters parameters = new BulkExportParameters(Batch2JobDefinitionConstants.BULK_EXPORT);
-
- parameters.setSince(theOptions.getSince());
- parameters.setOutputFormat(theOptions.getOutputFormat());
- parameters.setExportStyle(theOptions.getExportStyle());
- parameters.setFilters(new ArrayList<>(theOptions.getFilters()));
- parameters.setPostFetchFilterUrls(new ArrayList<>(theOptions.getPostFetchFilterUrls()));
- if (theOptions.getGroupId() != null) {
- parameters.setGroupId(theOptions.getGroupId().getValue());
- }
- if (CollectionUtils.isNotEmpty(theOptions.getResourceTypes())) {
- parameters.setResourceTypes(new ArrayList<>(theOptions.getResourceTypes()));
- }
- if (CollectionUtils.isNotEmpty(theOptions.getPatientIds())) {
- parameters.setPatientIds(theOptions.getPatientIds().stream().map(IIdType::getValue).collect(Collectors.toList()));
- }
- parameters.setExpandMdm(theOptions.isExpandMdm());
- parameters.setUseExistingJobsFirst(true);
- parameters.setExportIdentifier(theOptions.getExportIdentifier());
-
- return parameters;
- }
-}
diff --git a/hapi-fhir-structures-dstu2.1/pom.xml b/hapi-fhir-structures-dstu2.1/pom.xml
index 42d2fd19c18..fccc47c2a8d 100644
--- a/hapi-fhir-structures-dstu2.1/pom.xml
+++ b/hapi-fhir-structures-dstu2.1/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-structures-dstu2/pom.xml b/hapi-fhir-structures-dstu2/pom.xml
index 4bbf21be756..a2fccc49335 100644
--- a/hapi-fhir-structures-dstu2/pom.xml
+++ b/hapi-fhir-structures-dstu2/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-structures-dstu3/pom.xml b/hapi-fhir-structures-dstu3/pom.xml
index 88e3aad18fc..845e166aa1d 100644
--- a/hapi-fhir-structures-dstu3/pom.xml
+++ b/hapi-fhir-structures-dstu3/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-structures-hl7org-dstu2/pom.xml b/hapi-fhir-structures-hl7org-dstu2/pom.xml
index 16dff2036ae..99fb23c507d 100644
--- a/hapi-fhir-structures-hl7org-dstu2/pom.xml
+++ b/hapi-fhir-structures-hl7org-dstu2/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-structures-r4/pom.xml b/hapi-fhir-structures-r4/pom.xml
index 1c000cd8da1..cfbb2444b3b 100644
--- a/hapi-fhir-structures-r4/pom.xml
+++ b/hapi-fhir-structures-r4/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-structures-r4b/pom.xml b/hapi-fhir-structures-r4b/pom.xml
index 52932128517..17c2cf99e70 100644
--- a/hapi-fhir-structures-r4b/pom.xml
+++ b/hapi-fhir-structures-r4b/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-structures-r5/pom.xml b/hapi-fhir-structures-r5/pom.xml
index c1edd3fad2f..096e5e22334 100644
--- a/hapi-fhir-structures-r5/pom.xml
+++ b/hapi-fhir-structures-r5/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-test-utilities/pom.xml b/hapi-fhir-test-utilities/pom.xml
index df9d06e6fab..9076329e7b2 100644
--- a/hapi-fhir-test-utilities/pom.xml
+++ b/hapi-fhir-test-utilities/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/OperationRuleTestUtil.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/OperationRuleTestUtil.java
index de3dd9ea9a4..600efab4949 100644
--- a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/OperationRuleTestUtil.java
+++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/OperationRuleTestUtil.java
@@ -19,7 +19,7 @@
*/
package ca.uhn.fhir.rest.server.interceptor.auth;
-import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
+import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import org.hl7.fhir.instance.model.api.IBaseResource;
import java.util.HashSet;
@@ -59,7 +59,7 @@ public final class OperationRuleTestUtil {
return ((RuleBulkExportImpl)theRule).getGroupId();
}
- public static BulkDataExportOptions.ExportStyle getWantExportStyle(IAuthRule theRule) {
+ public static BulkExportJobParameters.ExportStyle getWantExportStyle(IAuthRule theRule) {
return ((RuleBulkExportImpl)theRule).getWantExportStyle();
}
}
diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/ITestDataBuilder.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/ITestDataBuilder.java
index 82a9c4ecd41..68d5a8c2a2b 100644
--- a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/ITestDataBuilder.java
+++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/ITestDataBuilder.java
@@ -80,6 +80,13 @@ public interface ITestDataBuilder {
return t -> __setPrimitiveChild(getFhirContext(), t, "active", "boolean", "false");
}
+ /**
+ * Set Patient.gender
+ */
+ default ICreationArgument withGender(String theGender) {
+ return t -> __setPrimitiveChild(getFhirContext(), t, "gender", "code", theGender);
+ }
+
default ICreationArgument withFamily(String theFamily) {
return t -> {
IPrimitiveType> family = (IPrimitiveType>) getFhirContext().getElementDefinition("string").newInstance();
diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/server/BaseJettyServerExtension.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/server/BaseJettyServerExtension.java
index 214eb244804..5087b7e74e3 100644
--- a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/server/BaseJettyServerExtension.java
+++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/server/BaseJettyServerExtension.java
@@ -72,6 +72,7 @@ public abstract class BaseJettyServerExtension> myRequestHeaders = new ArrayList<>();
private final List myRequestContentTypes = new ArrayList<>();
+ private final List myServletFilters = new ArrayList<>();
private String myServletPath = "/*";
private Server myServer;
private CloseableHttpClient myHttpClient;
@@ -102,6 +103,13 @@ public abstract class BaseJettyServerExtension
ca.uhn.hapi.fhir
hapi-fhir
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-validation-resources-dstu2.1/pom.xml b/hapi-fhir-validation-resources-dstu2.1/pom.xml
index 54920d28b6f..a61fa01a48b 100644
--- a/hapi-fhir-validation-resources-dstu2.1/pom.xml
+++ b/hapi-fhir-validation-resources-dstu2.1/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-validation-resources-dstu2/pom.xml b/hapi-fhir-validation-resources-dstu2/pom.xml
index 485aee4823e..57f4daf36cf 100644
--- a/hapi-fhir-validation-resources-dstu2/pom.xml
+++ b/hapi-fhir-validation-resources-dstu2/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-validation-resources-dstu3/pom.xml b/hapi-fhir-validation-resources-dstu3/pom.xml
index 17ce6a5eac8..58ac108727c 100644
--- a/hapi-fhir-validation-resources-dstu3/pom.xml
+++ b/hapi-fhir-validation-resources-dstu3/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-validation-resources-r4/pom.xml b/hapi-fhir-validation-resources-r4/pom.xml
index 2bfd504a643..a4a2988a161 100644
--- a/hapi-fhir-validation-resources-r4/pom.xml
+++ b/hapi-fhir-validation-resources-r4/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-validation-resources-r4b/pom.xml b/hapi-fhir-validation-resources-r4b/pom.xml
index 9ee5d0c9eb3..ffb780fa9d6 100644
--- a/hapi-fhir-validation-resources-r4b/pom.xml
+++ b/hapi-fhir-validation-resources-r4b/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-validation-resources-r5/pom.xml b/hapi-fhir-validation-resources-r5/pom.xml
index 2bc03354607..8a36a9652f4 100644
--- a/hapi-fhir-validation-resources-r5/pom.xml
+++ b/hapi-fhir-validation-resources-r5/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-validation/pom.xml b/hapi-fhir-validation/pom.xml
index 65b7b1e2635..220d8ff3195 100644
--- a/hapi-fhir-validation/pom.xml
+++ b/hapi-fhir-validation/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-tinder-plugin/pom.xml b/hapi-tinder-plugin/pom.xml
index 76ff6964f92..f0299d9fbb9 100644
--- a/hapi-tinder-plugin/pom.xml
+++ b/hapi-tinder-plugin/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../pom.xml
diff --git a/hapi-tinder-test/pom.xml b/hapi-tinder-test/pom.xml
index 092511989f5..9059b10857a 100644
--- a/hapi-tinder-test/pom.xml
+++ b/hapi-tinder-test/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../pom.xml
diff --git a/pom.xml b/pom.xml
index 6b7ed5e6ff3..97dd1da7c06 100644
--- a/pom.xml
+++ b/pom.xml
@@ -7,7 +7,7 @@
ca.uhn.hapi.fhir
hapi-fhir
pom
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
HAPI-FHIR
An open-source implementation of the FHIR specification in Java.
diff --git a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml
index fa87c24e745..6e03ace49e4 100644
--- a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml
+++ b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml
@@ -7,7 +7,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../../pom.xml
diff --git a/tests/hapi-fhir-base-test-mindeps-client/pom.xml b/tests/hapi-fhir-base-test-mindeps-client/pom.xml
index f0c3e3c1adf..aa4e15d0064 100644
--- a/tests/hapi-fhir-base-test-mindeps-client/pom.xml
+++ b/tests/hapi-fhir-base-test-mindeps-client/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../../pom.xml
diff --git a/tests/hapi-fhir-base-test-mindeps-server/pom.xml b/tests/hapi-fhir-base-test-mindeps-server/pom.xml
index b11626f0376..6d3fc67bcdd 100644
--- a/tests/hapi-fhir-base-test-mindeps-server/pom.xml
+++ b/tests/hapi-fhir-base-test-mindeps-server/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 6.7.10-SNAPSHOT
+ 6.7.11-SNAPSHOT
../../pom.xml