diff --git a/.editorconfig b/.editorconfig index f19de7e2a01..479bb985c23 100644 --- a/.editorconfig +++ b/.editorconfig @@ -31,6 +31,7 @@ charset = utf-8 indent_style = tab tab_width = 3 indent_size = 3 +continuation_indent_size=3 ij_java_align_consecutive_assignments = false ij_java_align_consecutive_variable_declarations = false ij_java_align_group_field_declarations = false diff --git a/hapi-deployable-pom/pom.xml b/hapi-deployable-pom/pom.xml index 0bee4719670..2925cddee83 100644 --- a/hapi-deployable-pom/pom.xml +++ b/hapi-deployable-pom/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-android/pom.xml b/hapi-fhir-android/pom.xml index 9eb400b46c0..60f23bd7bcc 100644 --- a/hapi-fhir-android/pom.xml +++ b/hapi-fhir-android/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-base/pom.xml b/hapi-fhir-base/pom.xml index ac1fd654c29..00ce73745e2 100644 --- a/hapi-fhir-base/pom.xml +++ b/hapi-fhir-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleBuilder.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleBuilder.java index ca79ffb009b..7e3c568663e 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleBuilder.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleBuilder.java @@ -156,7 +156,8 @@ public class BundleBuilder { // Bundle.entry.request.url IPrimitiveType url = (IPrimitiveType) myContext.getElementDefinition("uri").newInstance(); - url.setValueAsString(theResource.getIdElement().toUnqualifiedVersionless().getValue()); + String resourceType = myContext.getResourceType(theResource); + url.setValueAsString(theResource.getIdElement().toUnqualifiedVersionless().withResourceType(resourceType).getValue()); myEntryRequestUrlChild.getMutator().setValue(request, url); // Bundle.entry.request.url diff --git a/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties b/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties index 26c705f6f74..e1e02ff0e12 100644 --- a/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties +++ b/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties @@ -68,8 +68,8 @@ ca.uhn.fhir.validation.ValidationResult.noIssuesDetected=No issues detected duri # JPA Messages -ca.uhn.fhir.jpa.bulk.svc.BulkDataExportSvcImpl.onlyBinarySelected=Binary resources may not be exported with bulk export -ca.uhn.fhir.jpa.bulk.svc.BulkDataExportSvcImpl.unknownResourceType=Unknown or unsupported resource type: {0} +ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportSvcImpl.onlyBinarySelected=Binary resources may not be exported with bulk export +ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportSvcImpl.unknownResourceType=Unknown or unsupported resource type: {0} ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceVersionConstraintFailure=The operation has failed with a version constraint failure. This generally means that two clients/threads were trying to update the same resource at the same time, and this request was chosen as the failing request. ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceIndexedCompositeStringUniqueConstraintFailure=The operation has failed with a unique index constraint failure. This probably means that the operation was trying to create/update a resource that would have resulted in a duplicate value for a unique index. ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.forcedIdConstraintFailure=The operation has failed with a client-assigned ID constraint failure. This typically means that multiple client threads are trying to create a new resource with the same client-assigned ID at the same time, and this thread was chosen to be rejected. diff --git a/hapi-fhir-bom/pom.xml b/hapi-fhir-bom/pom.xml index 6d6a435eddf..d3c726265b4 100644 --- a/hapi-fhir-bom/pom.xml +++ b/hapi-fhir-bom/pom.xml @@ -3,14 +3,14 @@ 4.0.0 ca.uhn.hapi.fhir hapi-fhir-bom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT pom HAPI FHIR BOM ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml index b9a5e93b0f6..2816ae6159e 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml index d7306fba4a9..7d9dc89fe6c 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir-cli - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml index 4d1fd59e786..1055a8ed4ea 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../../hapi-deployable-pom diff --git a/hapi-fhir-cli/pom.xml b/hapi-fhir-cli/pom.xml index ac83c296578..8784078d56b 100644 --- a/hapi-fhir-cli/pom.xml +++ b/hapi-fhir-cli/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-client-okhttp/pom.xml b/hapi-fhir-client-okhttp/pom.xml index 78ec60b5d68..cef760eeff8 100644 --- a/hapi-fhir-client-okhttp/pom.xml +++ b/hapi-fhir-client-okhttp/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-client/pom.xml b/hapi-fhir-client/pom.xml index 0783c0ea778..e7ac14275c6 100644 --- a/hapi-fhir-client/pom.xml +++ b/hapi-fhir-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-converter/pom.xml b/hapi-fhir-converter/pom.xml index ce5eca39f05..2ec2f22795c 100644 --- a/hapi-fhir-converter/pom.xml +++ b/hapi-fhir-converter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-dist/pom.xml b/hapi-fhir-dist/pom.xml index 837b2992aee..24b559ea001 100644 --- a/hapi-fhir-dist/pom.xml +++ b/hapi-fhir-dist/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-docs/pom.xml b/hapi-fhir-docs/pom.xml index 58d067d2797..40519292d48 100644 --- a/hapi-fhir-docs/pom.xml +++ b/hapi-fhir-docs/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml @@ -78,13 +78,13 @@ ca.uhn.hapi.fhir hapi-fhir-structures-dstu2 - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT compile ca.uhn.hapi.fhir hapi-fhir-jpaserver-subscription - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT compile @@ -101,7 +101,7 @@ ca.uhn.hapi.fhir hapi-fhir-testpage-overlay - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT classes diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/changes.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/changes.yaml new file mode 100644 index 00000000000..c973340bfd7 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/changes.yaml @@ -0,0 +1,25 @@ +--- +- item: + type: "add" + title: "The version of a few dependencies have been bumped to the latest versions +(dependent HAPI modules listed in brackets): + +" diff --git a/hapi-fhir-jacoco/pom.xml b/hapi-fhir-jacoco/pom.xml index bddf4b9e81b..43dd8b85ed7 100644 --- a/hapi-fhir-jacoco/pom.xml +++ b/hapi-fhir-jacoco/pom.xml @@ -11,7 +11,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jaxrsserver-base/pom.xml b/hapi-fhir-jaxrsserver-base/pom.xml index 4d6434eab74..073ecc3dbf0 100644 --- a/hapi-fhir-jaxrsserver-base/pom.xml +++ b/hapi-fhir-jaxrsserver-base/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jaxrsserver-example/pom.xml b/hapi-fhir-jaxrsserver-example/pom.xml index 627009d073a..cce18f1d9c1 100644 --- a/hapi-fhir-jaxrsserver-example/pom.xml +++ b/hapi-fhir-jaxrsserver-example/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-jpaserver-api/pom.xml b/hapi-fhir-jpaserver-api/pom.xml index a1b87034a93..ac534aaccba 100644 --- a/hapi-fhir-jpaserver-api/pom.xml +++ b/hapi-fhir-jpaserver-api/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirSystemDao.java b/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirSystemDao.java index 2d810788d76..00b2ffb8027 100644 --- a/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirSystemDao.java +++ b/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirSystemDao.java @@ -67,6 +67,18 @@ public interface IFhirSystemDao extends IDao { */ IBaseBundle processMessage(RequestDetails theRequestDetails, IBaseBundle theMessage); + /** + * Executes a FHIR transaction using a new database transaction. This method must + * not be called from within a DB transaction. + */ T transaction(RequestDetails theRequestDetails, T theResources); + /** + * Executes a FHIR transaction nested inside the current database transaction. + * This form of the transaction processor can handle write operations only (no reads) + */ + default T transactionNested(RequestDetails theRequestDetails, T theResources) { + throw new UnsupportedOperationException(); + } + } diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml index 7b0116aad18..eedfef43910 100644 --- a/hapi-fhir-jpaserver-base/pom.xml +++ b/hapi-fhir-jpaserver-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/BatchJobsConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/BatchJobsConfig.java index 178ee7358d5..fc4c03c7b34 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/BatchJobsConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/BatchJobsConfig.java @@ -20,17 +20,20 @@ package ca.uhn.fhir.jpa.batch; * #L% */ -import ca.uhn.fhir.jpa.bulk.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.bulk.imprt.job.BulkImportJobConfig; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; @Configuration //When you define a new batch job, add it here. @Import({ - CommonBatchJobConfig.class, - BulkExportJobConfig.class + CommonBatchJobConfig.class, + BulkExportJobConfig.class, + BulkImportJobConfig.class }) public class BatchJobsConfig { + public static final String BULK_IMPORT_JOB_NAME = "bulkImportJob"; public static final String BULK_EXPORT_JOB_NAME = "bulkExportJob"; public static final String GROUP_BULK_EXPORT_JOB_NAME = "groupBulkExportJob"; public static final String PATIENT_BULK_EXPORT_JOB_NAME = "patientBulkExportJob"; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/processors/GoldenResourceAnnotatingProcessor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/processors/GoldenResourceAnnotatingProcessor.java index 4566aa83af3..46c418a1e74 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/processors/GoldenResourceAnnotatingProcessor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/processors/GoldenResourceAnnotatingProcessor.java @@ -24,7 +24,7 @@ import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.fhirpath.IFhirPath; import ca.uhn.fhir.jpa.batch.log.Logs; -import ca.uhn.fhir.jpa.bulk.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc; import ca.uhn.fhir.util.ExtensionUtil; import ca.uhn.fhir.util.HapiExtensions; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/BulkDataExportOptions.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/api/BulkDataExportOptions.java similarity index 98% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/BulkDataExportOptions.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/api/BulkDataExportOptions.java index c63a0df0546..4f50d6fed97 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/BulkDataExportOptions.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/api/BulkDataExportOptions.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.api; +package ca.uhn.fhir.jpa.bulk.export.api; /*- * #%L diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/IBulkDataExportSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/api/IBulkDataExportSvc.java similarity index 91% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/IBulkDataExportSvc.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/api/IBulkDataExportSvc.java index af39667b19b..bbd1d1a6628 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/IBulkDataExportSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/api/IBulkDataExportSvc.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.api; +package ca.uhn.fhir.jpa.bulk.export.api; /*- * #%L @@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.api; * #L% */ -import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; +import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum; import org.hl7.fhir.instance.model.api.IIdType; import javax.transaction.Transactional; @@ -50,7 +50,7 @@ public interface IBulkDataExportSvc { class JobInfo { private String myJobId; - private BulkJobStatusEnum myStatus; + private BulkExportJobStatusEnum myStatus; private List myFiles; private String myRequest; private Date myStatusTime; @@ -90,11 +90,11 @@ public interface IBulkDataExportSvc { } - public BulkJobStatusEnum getStatus() { + public BulkExportJobStatusEnum getStatus() { return myStatus; } - public JobInfo setStatus(BulkJobStatusEnum theStatus) { + public JobInfo setStatus(BulkExportJobStatusEnum theStatus) { myStatus = theStatus; return this; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BaseBulkItemReader.java similarity index 98% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BaseBulkItemReader.java index 9ab5e56a75d..7f934cfb248 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BaseBulkItemReader.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -30,7 +30,6 @@ import ca.uhn.fhir.jpa.dao.ISearchBuilder; import ca.uhn.fhir.jpa.dao.SearchBuilderFactory; import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao; import ca.uhn.fhir.jpa.entity.BulkExportJobEntity; -import ca.uhn.fhir.jpa.entity.Search; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; @@ -102,7 +101,7 @@ public abstract class BaseBulkItemReader implements ItemReader getResourcePidIterator(); + protected abstract Iterator getResourcePidIterator(); protected List createSearchParameterMapsForResourceType() { BulkExportJobEntity jobEntity = getJobEntity(); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportCreateEntityStepListener.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportCreateEntityStepListener.java similarity index 78% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportCreateEntityStepListener.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportCreateEntityStepListener.java index 96b25dc4073..8da195c344c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportCreateEntityStepListener.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportCreateEntityStepListener.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -20,16 +20,12 @@ package ca.uhn.fhir.jpa.bulk.job; * #L% */ -import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; -import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc; -import org.springframework.batch.core.BatchStatus; +import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum; +import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc; import org.springframework.batch.core.ExitStatus; import org.springframework.batch.core.StepExecution; import org.springframework.batch.core.StepExecutionListener; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; - -import static org.apache.commons.lang3.StringUtils.isNotBlank; /** * Will run before and after a job to set the status to whatever is appropriate. @@ -43,7 +39,7 @@ public class BulkExportCreateEntityStepListener implements StepExecutionListener public void beforeStep(StepExecution theStepExecution) { String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString("jobUUID"); if (jobUuid != null) { - myBulkExportDaoSvc.setJobToStatus(jobUuid, BulkJobStatusEnum.BUILDING); + myBulkExportDaoSvc.setJobToStatus(jobUuid, BulkExportJobStatusEnum.BUILDING); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportGenerateResourceFilesStepListener.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportGenerateResourceFilesStepListener.java similarity index 88% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportGenerateResourceFilesStepListener.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportGenerateResourceFilesStepListener.java index cbd7e651762..699055e6404 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportGenerateResourceFilesStepListener.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportGenerateResourceFilesStepListener.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -20,8 +20,8 @@ package ca.uhn.fhir.jpa.bulk.job; * #L% */ -import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; -import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc; +import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum; +import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc; import org.springframework.batch.core.ExitStatus; import org.springframework.batch.core.StepExecution; import org.springframework.batch.core.StepExecutionListener; @@ -55,7 +55,7 @@ public class BulkExportGenerateResourceFilesStepListener implements StepExecutio } assert isNotBlank(jobUuid); String exitDescription = theStepExecution.getExitStatus().getExitDescription(); - myBulkExportDaoSvc.setJobToStatus(jobUuid, BulkJobStatusEnum.ERROR, exitDescription); + myBulkExportDaoSvc.setJobToStatus(jobUuid, BulkExportJobStatusEnum.ERROR, exitDescription); } return theStepExecution.getExitStatus(); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobCloser.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobCloser.java similarity index 83% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobCloser.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobCloser.java index 6e336c16b54..291251894d3 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobCloser.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobCloser.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -20,8 +20,8 @@ package ca.uhn.fhir.jpa.bulk.job; * #L% */ -import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; -import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc; +import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum; +import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.StepContribution; import org.springframework.batch.core.scope.context.ChunkContext; @@ -44,9 +44,9 @@ public class BulkExportJobCloser implements Tasklet { @Override public RepeatStatus execute(StepContribution theStepContribution, ChunkContext theChunkContext) { if (theChunkContext.getStepContext().getStepExecution().getJobExecution().getStatus() == BatchStatus.STARTED) { - myBulkExportDaoSvc.setJobToStatus(myJobUUID, BulkJobStatusEnum.COMPLETE); + myBulkExportDaoSvc.setJobToStatus(myJobUUID, BulkExportJobStatusEnum.COMPLETE); } else { - myBulkExportDaoSvc.setJobToStatus(myJobUUID, BulkJobStatusEnum.ERROR); + myBulkExportDaoSvc.setJobToStatus(myJobUUID, BulkExportJobStatusEnum.ERROR); } return RepeatStatus.FINISHED; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobConfig.java similarity index 88% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobConfig.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobConfig.java index 22a11abcbdc..6a44261b140 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobConfig.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -23,7 +23,7 @@ package ca.uhn.fhir.jpa.bulk.job; import ca.uhn.fhir.jpa.batch.BatchJobsConfig; import ca.uhn.fhir.jpa.batch.processors.GoldenResourceAnnotatingProcessor; import ca.uhn.fhir.jpa.batch.processors.PidToIBaseResourceProcessor; -import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc; +import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc; import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import org.hl7.fhir.instance.model.api.IBaseResource; @@ -35,8 +35,6 @@ import org.springframework.batch.core.configuration.annotation.JobScope; import org.springframework.batch.core.configuration.annotation.StepBuilderFactory; import org.springframework.batch.core.configuration.annotation.StepScope; import org.springframework.batch.item.ItemProcessor; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemWriter; import org.springframework.batch.item.support.CompositeItemProcessor; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; @@ -59,6 +57,7 @@ public class BulkExportJobConfig { public static final String GROUP_ID_PARAMETER = "groupId"; public static final String RESOURCE_TYPES_PARAMETER = "resourceTypes"; public static final int CHUNK_SIZE = 100; + public static final String JOB_DESCRIPTION = "jobDescription"; @Autowired private StepBuilderFactory myStepBuilderFactory; @@ -90,9 +89,9 @@ public class BulkExportJobConfig { @Lazy public Job bulkExportJob() { return myJobBuilderFactory.get(BatchJobsConfig.BULK_EXPORT_JOB_NAME) - .validator(bulkJobParameterValidator()) + .validator(bulkExportJobParameterValidator()) .start(createBulkExportEntityStep()) - .next(partitionStep()) + .next(bulkExportPartitionStep()) .next(closeJobStep()) .build(); } @@ -114,7 +113,7 @@ public class BulkExportJobConfig { public Job groupBulkExportJob() { return myJobBuilderFactory.get(BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME) .validator(groupBulkJobParameterValidator()) - .validator(bulkJobParameterValidator()) + .validator(bulkExportJobParameterValidator()) .start(createBulkExportEntityStep()) .next(groupPartitionStep()) .next(closeJobStep()) @@ -125,7 +124,7 @@ public class BulkExportJobConfig { @Lazy public Job patientBulkExportJob() { return myJobBuilderFactory.get(BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME) - .validator(bulkJobParameterValidator()) + .validator(bulkExportJobParameterValidator()) .start(createBulkExportEntityStep()) .next(patientPartitionStep()) .next(closeJobStep()) @@ -150,8 +149,9 @@ public class BulkExportJobConfig { return new CreateBulkExportEntityTasklet(); } + @Bean - public JobParametersValidator bulkJobParameterValidator() { + public JobParametersValidator bulkExportJobParameterValidator() { return new BulkExportJobParameterValidator(); } @@ -159,7 +159,7 @@ public class BulkExportJobConfig { @Bean public Step groupBulkExportGenerateResourceFilesStep() { return myStepBuilderFactory.get("groupBulkExportGenerateResourceFilesStep") - ., List> chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time. + ., List>chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time. .reader(groupBulkItemReader()) .processor(inflateResourceThenAnnotateWithGoldenResourceProcessor()) .writer(resourceToFileWriter()) @@ -170,17 +170,18 @@ public class BulkExportJobConfig { @Bean public Step bulkExportGenerateResourceFilesStep() { return myStepBuilderFactory.get("bulkExportGenerateResourceFilesStep") - ., List> chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time. + ., List>chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time. .reader(bulkItemReader()) .processor(myPidToIBaseResourceProcessor) .writer(resourceToFileWriter()) .listener(bulkExportGenerateResourceFilesStepListener()) .build(); } + @Bean public Step patientBulkExportGenerateResourceFilesStep() { return myStepBuilderFactory.get("patientBulkExportGenerateResourceFilesStep") - ., List> chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time. + ., List>chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time. .reader(patientBulkItemReader()) .processor(myPidToIBaseResourceProcessor) .writer(resourceToFileWriter()) @@ -214,7 +215,7 @@ public class BulkExportJobConfig { } @Bean - public Step partitionStep() { + public Step bulkExportPartitionStep() { return myStepBuilderFactory.get("partitionStep") .partitioner("bulkExportGenerateResourceFilesStep", bulkExportResourceTypePartitioner()) .step(bulkExportGenerateResourceFilesStep()) @@ -240,7 +241,7 @@ public class BulkExportJobConfig { @Bean @StepScope - public GroupBulkItemReader groupBulkItemReader(){ + public GroupBulkItemReader groupBulkItemReader() { return new GroupBulkItemReader(); } @@ -252,7 +253,7 @@ public class BulkExportJobConfig { @Bean @StepScope - public BulkItemReader bulkItemReader(){ + public BulkItemReader bulkItemReader() { return new BulkItemReader(); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobParameterValidator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobParameterValidator.java similarity index 98% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobParameterValidator.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobParameterValidator.java index 01e503c6687..64d06052d43 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobParameterValidator.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobParameterValidator.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -24,7 +24,6 @@ import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao; import ca.uhn.fhir.jpa.entity.BulkExportJobEntity; import ca.uhn.fhir.rest.api.Constants; import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.JobParametersInvalidException; import org.springframework.batch.core.JobParametersValidator; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobParametersBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobParametersBuilder.java similarity index 95% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobParametersBuilder.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobParametersBuilder.java index 7219c900c87..881e7215620 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobParametersBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobParametersBuilder.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.job; * #L% */ -import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; +import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions; import ca.uhn.fhir.rest.api.Constants; import org.springframework.batch.core.JobParametersBuilder; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkItemReader.java similarity index 94% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkItemReader.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkItemReader.java index 0c9bf4fae2b..83f7c7d0d50 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkItemReader.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -29,7 +29,6 @@ import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import org.slf4j.Logger; -import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; @@ -43,7 +42,7 @@ public class BulkItemReader extends BaseBulkItemReader { private static final Logger ourLog = Logs.getBatchTroubleshootingLog(); @Override - Iterator getResourcePidIterator() { + protected Iterator getResourcePidIterator() { ourLog.info("Bulk export assembling export of type {} for job {}", myResourceType, myJobUUID); Set myReadPids = new HashSet<>(); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/CreateBulkExportEntityTasklet.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/CreateBulkExportEntityTasklet.java similarity index 93% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/CreateBulkExportEntityTasklet.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/CreateBulkExportEntityTasklet.java index 74e85d2188a..3a14e3e1145 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/CreateBulkExportEntityTasklet.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/CreateBulkExportEntityTasklet.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -20,8 +20,8 @@ package ca.uhn.fhir.jpa.bulk.job; * #L% */ -import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.api.Constants; import org.apache.commons.lang3.StringUtils; @@ -87,7 +87,7 @@ public class CreateBulkExportEntityTasklet implements Tasklet { } } - private void addUUIDToJobContext(ChunkContext theChunkContext, String theJobUUID) { + public static void addUUIDToJobContext(ChunkContext theChunkContext, String theJobUUID) { theChunkContext .getStepContext() .getStepExecution() diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkExportJobParametersBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkExportJobParametersBuilder.java similarity index 96% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkExportJobParametersBuilder.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkExportJobParametersBuilder.java index f79adc79ee1..5d9b90f7004 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkExportJobParametersBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkExportJobParametersBuilder.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkItemReader.java similarity index 98% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkItemReader.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkItemReader.java index 30a7567776e..3a10fec2aae 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkItemReader.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -36,7 +36,6 @@ import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.rest.param.ReferenceOrListParam; import ca.uhn.fhir.rest.param.ReferenceParam; -import com.google.common.collect.Multimaps; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; @@ -81,7 +80,7 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade private MdmExpansionCacheSvc myMdmExpansionCacheSvc; @Override - Iterator getResourcePidIterator() { + protected Iterator getResourcePidIterator() { Set myReadPids = new HashSet<>(); //Short circuit out if we detect we are attempting to extract patients diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupIdPresentValidator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupIdPresentValidator.java similarity index 93% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupIdPresentValidator.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupIdPresentValidator.java index a28eaaf0338..8e662049b5c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupIdPresentValidator.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupIdPresentValidator.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -26,7 +26,7 @@ import org.springframework.batch.core.JobParametersInvalidException; import org.springframework.batch.core.JobParametersValidator; -import static ca.uhn.fhir.jpa.bulk.job.BulkExportJobConfig.*; +import static ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig.*; import static org.slf4j.LoggerFactory.getLogger; public class GroupIdPresentValidator implements JobParametersValidator { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/PatientBulkItemReader.java similarity index 97% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/PatientBulkItemReader.java index c206404ac95..93519863ec7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/PatientBulkItemReader.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -61,7 +61,7 @@ public class PatientBulkItemReader extends BaseBulkItemReader implements ItemRea } @Override - Iterator getResourcePidIterator() { + protected Iterator getResourcePidIterator() { if (myDaoConfig.getIndexMissingFields() == DaoConfig.IndexEnabledEnum.DISABLED) { String errorMessage = "You attempted to start a Patient Bulk Export, but the system has `Index Missing Fields` disabled. It must be enabled for Patient Bulk Export"; ourLog.error(errorMessage); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/ResourceToFileWriter.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceToFileWriter.java similarity index 97% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/ResourceToFileWriter.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceToFileWriter.java index 82501c274c8..8b4ebe7e86a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/ResourceToFileWriter.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceToFileWriter.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -25,7 +25,7 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; import ca.uhn.fhir.jpa.batch.log.Logs; -import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc; +import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc; import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity; import ca.uhn.fhir.parser.IParser; import ca.uhn.fhir.rest.api.Constants; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/ResourceTypePartitioner.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceTypePartitioner.java similarity index 96% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/ResourceTypePartitioner.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceTypePartitioner.java index 4cde1ab954b..7eb612d2211 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/ResourceTypePartitioner.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceTypePartitioner.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.job; +package ca.uhn.fhir.jpa.bulk.export.job; /*- * #%L @@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.job; * #L% */ -import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc; +import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc; import org.slf4j.Logger; import org.springframework.batch.core.partition.support.Partitioner; import org.springframework.batch.item.ExecutionContext; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/model/BulkJobStatusEnum.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/BulkExportJobStatusEnum.java similarity index 77% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/model/BulkJobStatusEnum.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/BulkExportJobStatusEnum.java index e4fe675665c..db520b9cfd2 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/model/BulkJobStatusEnum.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/BulkExportJobStatusEnum.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.model; +package ca.uhn.fhir.jpa.bulk.export.model; /*- * #%L @@ -20,7 +20,14 @@ package ca.uhn.fhir.jpa.bulk.model; * #L% */ -public enum BulkJobStatusEnum { +import com.fasterxml.jackson.annotation.JsonFormat; + +@JsonFormat(shape = JsonFormat.Shape.STRING) +public enum BulkExportJobStatusEnum { + + /** + * Sorting OK! + */ SUBMITTED, BUILDING, diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/model/BulkExportResponseJson.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/BulkExportResponseJson.java similarity index 98% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/model/BulkExportResponseJson.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/BulkExportResponseJson.java index 011eb6ddf14..31f3daf32d7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/model/BulkExportResponseJson.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/BulkExportResponseJson.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.model; +package ca.uhn.fhir.jpa.bulk.export.model; /*- * #%L diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/provider/BulkDataExportProvider.java similarity index 98% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/provider/BulkDataExportProvider.java index 28c4bc39e05..57ca30d0f78 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/provider/BulkDataExportProvider.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.provider; +package ca.uhn.fhir.jpa.bulk.export.provider; /*- * #%L @@ -21,9 +21,9 @@ package ca.uhn.fhir.jpa.bulk.provider; */ import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; -import ca.uhn.fhir.jpa.bulk.model.BulkExportResponseJson; +import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.Operation; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkDataExportSvcImpl.java similarity index 95% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkDataExportSvcImpl.java index 7bec3d61a89..872c036f63a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkDataExportSvcImpl.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.svc; +package ca.uhn.fhir.jpa.bulk.export.svc; /*- * #%L @@ -23,16 +23,15 @@ package ca.uhn.fhir.jpa.bulk.svc; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.context.RuntimeSearchParam; -import ca.uhn.fhir.fhirpath.IFhirPath; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.batch.BatchJobsConfig; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; -import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; -import ca.uhn.fhir.jpa.bulk.job.BulkExportJobConfig; -import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; +import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum; import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao; import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao; import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao; @@ -43,16 +42,12 @@ import ca.uhn.fhir.jpa.model.sched.HapiJob; import ca.uhn.fhir.jpa.model.sched.ISchedulerService; import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; import ca.uhn.fhir.jpa.model.util.JpaConstants; -import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.util.UrlUtil; -import com.google.common.collect.Sets; import org.apache.commons.lang3.time.DateUtils; -import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseBinary; -import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.InstantType; import org.quartz.JobExecutionContext; @@ -78,9 +73,9 @@ import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; -import static ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions.ExportStyle.GROUP; -import static ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions.ExportStyle.PATIENT; -import static ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions.ExportStyle.SYSTEM; +import static ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions.ExportStyle.GROUP; +import static ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions.ExportStyle.PATIENT; +import static ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions.ExportStyle.SYSTEM; import static ca.uhn.fhir.util.UrlUtil.escapeUrlParam; import static ca.uhn.fhir.util.UrlUtil.escapeUrlParams; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -136,7 +131,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { Optional jobToProcessOpt = myTxTemplate.execute(t -> { Pageable page = PageRequest.of(0, 1); - Slice submittedJobs = myBulkExportJobDao.findByStatus(page, BulkJobStatusEnum.SUBMITTED); + Slice submittedJobs = myBulkExportJobDao.findByStatus(page, BulkExportJobStatusEnum.SUBMITTED); if (submittedJobs.isEmpty()) { return Optional.empty(); } @@ -158,7 +153,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { Optional submittedJobs = myBulkExportJobDao.findByJobId(jobUuid); if (submittedJobs.isPresent()) { BulkExportJobEntity jobEntity = submittedJobs.get(); - jobEntity.setStatus(BulkJobStatusEnum.ERROR); + jobEntity.setStatus(BulkExportJobStatusEnum.ERROR); jobEntity.setStatusMessage(e.getMessage()); myBulkExportJobDao.save(jobEntity); } @@ -344,7 +339,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { if (useCache) { Date cutoff = DateUtils.addMilliseconds(new Date(), -myReuseBulkExportForMillis); Pageable page = PageRequest.of(0, 10); - Slice existing = myBulkExportJobDao.findExistingJob(page, request, cutoff, BulkJobStatusEnum.ERROR); + Slice existing = myBulkExportJobDao.findExistingJob(page, request, cutoff, BulkExportJobStatusEnum.ERROR); if (!existing.isEmpty()) { return toSubmittedJobInfo(existing.iterator().next()); } @@ -373,7 +368,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { BulkExportJobEntity job = new BulkExportJobEntity(); job.setJobId(UUID.randomUUID().toString()); - job.setStatus(BulkJobStatusEnum.SUBMITTED); + job.setStatus(BulkExportJobStatusEnum.SUBMITTED); job.setSince(since); job.setCreated(new Date()); job.setRequest(request); @@ -445,7 +440,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { retVal.setStatusMessage(job.getStatusMessage()); retVal.setRequest(job.getRequest()); - if (job.getStatus() == BulkJobStatusEnum.COMPLETE) { + if (job.getStatus() == BulkExportJobStatusEnum.COMPLETE) { for (BulkExportCollectionEntity nextCollection : job.getCollections()) { for (BulkExportCollectionFileEntity nextFile : nextCollection.getFiles()) { retVal.addFile() diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkExportCollectionFileDaoSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkExportCollectionFileDaoSvc.java similarity index 96% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkExportCollectionFileDaoSvc.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkExportCollectionFileDaoSvc.java index 268cd4c29e6..cc255829231 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkExportCollectionFileDaoSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkExportCollectionFileDaoSvc.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.svc; +package ca.uhn.fhir.jpa.bulk.export.svc; /*- * #%L diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkExportDaoSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkExportDaoSvc.java similarity index 90% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkExportDaoSvc.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkExportDaoSvc.java index d69f8cbc235..7aa6521b68e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkExportDaoSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkExportDaoSvc.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.bulk.svc; +package ca.uhn.fhir.jpa.bulk.export.svc; /*- * #%L @@ -20,9 +20,7 @@ package ca.uhn.fhir.jpa.bulk.svc; * #L% */ -import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.jpa.api.dao.DaoRegistry; -import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; +import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum; import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao; import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao; import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao; @@ -84,12 +82,12 @@ public class BulkExportDaoSvc { } @Transactional - public void setJobToStatus(String theJobUUID, BulkJobStatusEnum theStatus) { + public void setJobToStatus(String theJobUUID, BulkExportJobStatusEnum theStatus) { setJobToStatus(theJobUUID, theStatus, null); } @Transactional - public void setJobToStatus(String theJobUUID, BulkJobStatusEnum theStatus, String theStatusMessage) { + public void setJobToStatus(String theJobUUID, BulkExportJobStatusEnum theStatus, String theStatusMessage) { Optional oJob = myBulkExportJobDao.findByJobId(theJobUUID); if (!oJob.isPresent()) { ourLog.error("Job with UUID {} doesn't exist!", theJobUUID); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/api/IBulkDataImportSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/api/IBulkDataImportSvc.java new file mode 100644 index 00000000000..7e6ef86b6ad --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/api/IBulkDataImportSvc.java @@ -0,0 +1,93 @@ +package ca.uhn.fhir.jpa.bulk.imprt.api; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum; + +import javax.annotation.Nonnull; +import java.util.List; + +public interface IBulkDataImportSvc { + + /** + * Create a new job in {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#STAGING STAGING} state (meaning it won't yet be worked on and can be added to) + */ + String createNewJob(BulkImportJobJson theJobDescription, @Nonnull List theInitialFiles); + + /** + * Add more files to a job in {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#STAGING STAGING} state + * + * @param theJobId The job ID + * @param theFiles The files to add to the job + */ + void addFilesToJob(String theJobId, List theFiles); + + /** + * Move a job from {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#STAGING STAGING} + * state to {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#READY READY} + * state, meaning that is is a candidate to be picked up for processing + * + * @param theJobId The job ID + */ + void markJobAsReadyForActivation(String theJobId); + + /** + * This method is intended to be called from the job scheduler, and will begin execution on + * the next job in status {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#READY READY} + * + * @return Returns {@literal true} if a job was activated + */ + boolean activateNextReadyJob(); + + /** + * Updates the job status for the given job + */ + void setJobToStatus(String theJobId, BulkImportJobStatusEnum theStatus); + + /** + * Updates the job status for the given job + */ + void setJobToStatus(String theJobId, BulkImportJobStatusEnum theStatus, String theStatusMessage); + + /** + * Gets the number of files available for a given Job ID + * + * @param theJobId The job ID + * @return The file count + */ + BulkImportJobJson fetchJob(String theJobId); + + /** + * Fetch a given file by job ID + * + * @param theJobId The job ID + * @param theFileIndex The index of the file within the job + * @return The file + */ + BulkImportJobFileJson fetchFile(String theJobId, int theFileIndex); + + /** + * Delete all input files associated with a particular job + */ + void deleteJobFiles(String theJobId); +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/ActivateBulkImportEntityStepListener.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/ActivateBulkImportEntityStepListener.java new file mode 100644 index 00000000000..4e842856773 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/ActivateBulkImportEntityStepListener.java @@ -0,0 +1,51 @@ +package ca.uhn.fhir.jpa.bulk.imprt.job; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.StepExecutionListener; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Will run before and after a job to set the status to whatever is appropriate. + */ +public class ActivateBulkImportEntityStepListener implements StepExecutionListener { + + @Autowired + private IBulkDataImportSvc myBulkImportDaoSvc; + + @Override + public void beforeStep(StepExecution theStepExecution) { + String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BulkExportJobConfig.JOB_UUID_PARAMETER); + if (jobUuid != null) { + myBulkImportDaoSvc.setJobToStatus(jobUuid, BulkImportJobStatusEnum.RUNNING); + } + } + + @Override + public ExitStatus afterStep(StepExecution theStepExecution) { + return ExitStatus.EXECUTING; + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportFileReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportFileReader.java new file mode 100644 index 00000000000..601e76244f0 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportFileReader.java @@ -0,0 +1,76 @@ +package ca.uhn.fhir.jpa.bulk.imprt.job; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.jpa.batch.log.Logs; +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson; +import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord; +import ca.uhn.fhir.util.IoUtil; +import com.google.common.io.LineReader; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; + +import java.io.StringReader; + +@SuppressWarnings("UnstableApiUsage") +public class BulkImportFileReader implements ItemReader { + + @Autowired + private IBulkDataImportSvc myBulkDataImportSvc; + @Autowired + private FhirContext myFhirContext; + @Value("#{stepExecutionContext['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}") + private String myJobUuid; + @Value("#{stepExecutionContext['" + BulkImportPartitioner.FILE_INDEX + "']}") + private int myFileIndex; + + private StringReader myReader; + private LineReader myLineReader; + private int myLineIndex; + private String myTenantName; + + @Override + public ParsedBulkImportRecord read() throws Exception { + + if (myReader == null) { + BulkImportJobFileJson file = myBulkDataImportSvc.fetchFile(myJobUuid, myFileIndex); + myTenantName = file.getTenantName(); + myReader = new StringReader(file.getContents()); + myLineReader = new LineReader(myReader); + } + + String nextLine = myLineReader.readLine(); + if (nextLine == null) { + IoUtil.closeQuietly(myReader); + return null; + } + + Logs.getBatchTroubleshootingLog().debug("Reading line {} file index {} for job: {}", myLineIndex++, myFileIndex, myJobUuid); + + IBaseResource parsed = myFhirContext.newJsonParser().parseResource(nextLine); + return new ParsedBulkImportRecord(myTenantName, parsed); + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportFileWriter.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportFileWriter.java new file mode 100644 index 00000000000..5f893474c26 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportFileWriter.java @@ -0,0 +1,74 @@ +package ca.uhn.fhir.jpa.bulk.imprt.job; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.api.dao.DaoRegistry; +import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum; +import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord; +import ca.uhn.fhir.jpa.partition.SystemRequestDetails; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; + +import java.util.List; + +public class BulkImportFileWriter implements ItemWriter { + + private static final Logger ourLog = LoggerFactory.getLogger(BulkImportFileWriter.class); + @Value("#{stepExecutionContext['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}") + private String myJobUuid; + @Value("#{stepExecutionContext['" + BulkImportPartitioner.FILE_INDEX + "']}") + private int myFileIndex; + @Value("#{stepExecutionContext['" + BulkImportPartitioner.ROW_PROCESSING_MODE + "']}") + private JobFileRowProcessingModeEnum myRowProcessingMode; + @Autowired + private DaoRegistry myDaoRegistry; + + @SuppressWarnings({"SwitchStatementWithTooFewBranches", "rawtypes", "unchecked"}) + @Override + public void write(List theItemLists) throws Exception { + ourLog.info("Beginning bulk import write {} chunks Job[{}] FileIndex[{}]", theItemLists.size(), myJobUuid, myFileIndex); + + for (ParsedBulkImportRecord nextItem : theItemLists) { + + SystemRequestDetails requestDetails = new SystemRequestDetails(); + requestDetails.setTenantId(nextItem.getTenantName()); + + // Yeah this is a lame switch - We'll add more later I swear + switch (myRowProcessingMode) { + default: + case FHIR_TRANSACTION: + IFhirSystemDao systemDao = myDaoRegistry.getSystemDao(); + IBaseResource inputBundle = nextItem.getRowContent(); + systemDao.transactionNested(requestDetails, inputBundle); + break; + } + + } + + } + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobCloser.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobCloser.java new file mode 100644 index 00000000000..504874e327d --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobCloser.java @@ -0,0 +1,57 @@ +package ca.uhn.fhir.jpa.bulk.imprt.job; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.StepContribution; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.step.tasklet.Tasklet; +import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; + +/** + * Will run before and after a job to set the status to whatever is appropriate. + */ +public class BulkImportJobCloser implements Tasklet { + + @Value("#{jobParameters['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}") + private String myJobUUID; + + @Autowired + private IBulkDataImportSvc myBulkDataImportSvc; + + @Override + public RepeatStatus execute(StepContribution theStepContribution, ChunkContext theChunkContext) { + BatchStatus executionStatus = theChunkContext.getStepContext().getStepExecution().getJobExecution().getStatus(); + if (executionStatus == BatchStatus.STARTED) { + myBulkDataImportSvc.setJobToStatus(myJobUUID, BulkImportJobStatusEnum.COMPLETE); + myBulkDataImportSvc.deleteJobFiles(myJobUUID); + } else { + myBulkDataImportSvc.setJobToStatus(myJobUUID, BulkImportJobStatusEnum.ERROR, "Found job in status: " + executionStatus); + myBulkDataImportSvc.deleteJobFiles(myJobUUID); + } + return RepeatStatus.FINISHED; + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobConfig.java new file mode 100644 index 00000000000..fd86a8ff3ab --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobConfig.java @@ -0,0 +1,169 @@ +package ca.uhn.fhir.jpa.bulk.imprt.job; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.batch.BatchConstants; +import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.JobParametersValidator; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.JobBuilderFactory; +import org.springframework.batch.core.configuration.annotation.JobScope; +import org.springframework.batch.core.configuration.annotation.StepBuilderFactory; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.partition.PartitionHandler; +import org.springframework.batch.core.partition.support.TaskExecutorPartitionHandler; +import org.springframework.batch.item.ItemWriter; +import org.springframework.batch.repeat.CompletionPolicy; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Lazy; +import org.springframework.core.task.TaskExecutor; + +import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.BULK_IMPORT_JOB_NAME; + +/** + * Spring batch Job configuration file. Contains all necessary plumbing to run a + * Bulk Export job. + */ +@Configuration +public class BulkImportJobConfig { + + public static final String JOB_PARAM_COMMIT_INTERVAL = "commitInterval"; + + @Autowired + private StepBuilderFactory myStepBuilderFactory; + + @Autowired + private JobBuilderFactory myJobBuilderFactory; + + @Autowired + @Qualifier(BatchConstants.JOB_LAUNCHING_TASK_EXECUTOR) + private TaskExecutor myTaskExecutor; + + @Bean(name = BULK_IMPORT_JOB_NAME) + @Lazy + public Job bulkImportJob() throws Exception { + return myJobBuilderFactory.get(BULK_IMPORT_JOB_NAME) + .validator(bulkImportJobParameterValidator()) + .start(bulkImportPartitionStep()) + .next(bulkImportCloseJobStep()) + .build(); + } + + @Bean + public JobParametersValidator bulkImportJobParameterValidator() { + return new BulkImportJobParameterValidator(); + } + + @Bean + public CreateBulkImportEntityTasklet createBulkImportEntityTasklet() { + return new CreateBulkImportEntityTasklet(); + } + + @Bean + @JobScope + public ActivateBulkImportEntityStepListener activateBulkImportEntityStepListener() { + return new ActivateBulkImportEntityStepListener(); + } + + @Bean + public Step bulkImportPartitionStep() throws Exception { + return myStepBuilderFactory.get("bulkImportPartitionStep") + .partitioner("bulkImportPartitionStep", bulkImportPartitioner()) + .partitionHandler(partitionHandler()) + .listener(activateBulkImportEntityStepListener()) + .gridSize(10) + .build(); + } + + private PartitionHandler partitionHandler() throws Exception { + assert myTaskExecutor != null; + + TaskExecutorPartitionHandler retVal = new TaskExecutorPartitionHandler(); + retVal.setStep(bulkImportProcessFilesStep()); + retVal.setTaskExecutor(myTaskExecutor); + retVal.afterPropertiesSet(); + return retVal; + } + + @Bean + public Step bulkImportCloseJobStep() { + return myStepBuilderFactory.get("bulkImportCloseJobStep") + .tasklet(bulkImportJobCloser()) + .build(); + } + + @Bean + @JobScope + public BulkImportJobCloser bulkImportJobCloser() { + return new BulkImportJobCloser(); + } + + @Bean + @JobScope + public BulkImportPartitioner bulkImportPartitioner() { + return new BulkImportPartitioner(); + } + + + @Bean + public Step bulkImportProcessFilesStep() { + CompletionPolicy completionPolicy = completionPolicy(); + + return myStepBuilderFactory.get("bulkImportProcessFilesStep") + .chunk(completionPolicy) + .reader(bulkImportFileReader()) + .writer(bulkImportFileWriter()) + .listener(bulkImportStepListener()) + .listener(completionPolicy) + .build(); + } + + @Bean + @StepScope + public CompletionPolicy completionPolicy() { + return new BulkImportProcessStepCompletionPolicy(); + } + + @Bean + @StepScope + public ItemWriter bulkImportFileWriter() { + return new BulkImportFileWriter(); + } + + + @Bean + @StepScope + public BulkImportFileReader bulkImportFileReader() { + return new BulkImportFileReader(); + } + + @Bean + @StepScope + public BulkImportStepListener bulkImportStepListener() { + return new BulkImportStepListener(); + } + + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobParameterValidator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobParameterValidator.java new file mode 100644 index 00000000000..a46405fec31 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobParameterValidator.java @@ -0,0 +1,70 @@ +package ca.uhn.fhir.jpa.bulk.imprt.job; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.dao.data.IBulkImportJobDao; +import ca.uhn.fhir.jpa.entity.BulkImportJobEntity; +import org.apache.commons.lang3.StringUtils; +import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.JobParametersInvalidException; +import org.springframework.batch.core.JobParametersValidator; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.support.TransactionTemplate; + +import java.util.Optional; + +/** + * This class will prevent a job from running if the UUID does not exist or is invalid. + */ +public class BulkImportJobParameterValidator implements JobParametersValidator { + + @Autowired + private IBulkImportJobDao myBulkImportJobDao; + @Autowired + private PlatformTransactionManager myTransactionManager; + + @Override + public void validate(JobParameters theJobParameters) throws JobParametersInvalidException { + if (theJobParameters == null) { + throw new JobParametersInvalidException("This job needs Parameters: [jobUUID]"); + } + + TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager); + String errorMessage = txTemplate.execute(tx -> { + StringBuilder errorBuilder = new StringBuilder(); + String jobUUID = theJobParameters.getString(BulkExportJobConfig.JOB_UUID_PARAMETER); + Optional oJob = myBulkImportJobDao.findByJobId(jobUUID); + if (!StringUtils.isBlank(jobUUID) && !oJob.isPresent()) { + errorBuilder.append("There is no persisted job that exists with UUID: "); + errorBuilder.append(jobUUID); + errorBuilder.append(". "); + } + + return errorBuilder.toString(); + }); + + if (!StringUtils.isEmpty(errorMessage)) { + throw new JobParametersInvalidException(errorMessage); + } + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportPartitioner.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportPartitioner.java new file mode 100644 index 00000000000..626c8caa016 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportPartitioner.java @@ -0,0 +1,72 @@ +package ca.uhn.fhir.jpa.bulk.imprt.job; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson; +import org.slf4j.Logger; +import org.springframework.batch.core.partition.support.Partitioner; +import org.springframework.batch.item.ExecutionContext; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; + +import javax.annotation.Nonnull; +import java.util.HashMap; +import java.util.Map; + +import static org.slf4j.LoggerFactory.getLogger; + +public class BulkImportPartitioner implements Partitioner { + public static final String FILE_INDEX = "fileIndex"; + public static final String ROW_PROCESSING_MODE = "rowProcessingMode"; + + private static final Logger ourLog = getLogger(BulkImportPartitioner.class); + + @Value("#{jobParameters['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}") + private String myJobUUID; + + @Autowired + private IBulkDataImportSvc myBulkDataImportSvc; + + @Nonnull + @Override + public Map partition(int gridSize) { + Map retVal = new HashMap<>(); + + BulkImportJobJson job = myBulkDataImportSvc.fetchJob(myJobUUID); + + for (int i = 0; i < job.getFileCount(); i++) { + + ExecutionContext context = new ExecutionContext(); + context.putString(BulkExportJobConfig.JOB_UUID_PARAMETER, myJobUUID); + context.putInt(FILE_INDEX, i); + context.put(ROW_PROCESSING_MODE, job.getProcessingMode()); + + String key = "FILE" + i; + retVal.put(key, context); + } + + return retVal; + } + + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportProcessStepCompletionPolicy.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportProcessStepCompletionPolicy.java new file mode 100644 index 00000000000..3a3afefc636 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportProcessStepCompletionPolicy.java @@ -0,0 +1,41 @@ +package ca.uhn.fhir.jpa.bulk.imprt.job; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.springframework.batch.repeat.RepeatContext; +import org.springframework.batch.repeat.policy.CompletionPolicySupport; +import org.springframework.beans.factory.annotation.Value; + +import static ca.uhn.fhir.jpa.bulk.imprt.job.BulkImportJobConfig.JOB_PARAM_COMMIT_INTERVAL; + +public class BulkImportProcessStepCompletionPolicy extends CompletionPolicySupport { + + @Value("#{jobParameters['" + JOB_PARAM_COMMIT_INTERVAL + "']}") + private int myChunkSize; + + @Override + public boolean isComplete(RepeatContext context) { + if (context.getStartedCount() < myChunkSize) { + return false; + } + return true; + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportStepListener.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportStepListener.java new file mode 100644 index 00000000000..8cb1c9b2693 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportStepListener.java @@ -0,0 +1,63 @@ +package ca.uhn.fhir.jpa.bulk.imprt.job; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.StepExecutionListener; +import org.springframework.beans.factory.annotation.Autowired; + +import javax.annotation.Nonnull; + +import static org.apache.commons.lang3.StringUtils.isNotBlank; + +/** + * This class sets the job status to ERROR if any failures occur while actually + * generating the export files. + */ +public class BulkImportStepListener implements StepExecutionListener { + + @Autowired + private IBulkDataImportSvc myBulkDataImportSvc; + + @Override + public void beforeStep(@Nonnull StepExecution stepExecution) { + // nothing + } + + @Override + public ExitStatus afterStep(StepExecution theStepExecution) { + if (theStepExecution.getExitStatus().getExitCode().equals(ExitStatus.FAILED.getExitCode())) { + //Try to fetch it from the parameters first, and if it doesn't exist, fetch it from the context. + String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BulkExportJobConfig.JOB_UUID_PARAMETER); + if (jobUuid == null) { + jobUuid = theStepExecution.getJobExecution().getExecutionContext().getString(BulkExportJobConfig.JOB_UUID_PARAMETER); + } + assert isNotBlank(jobUuid); + String exitDescription = theStepExecution.getExitStatus().getExitDescription(); + myBulkDataImportSvc.setJobToStatus(jobUuid, BulkImportJobStatusEnum.ERROR, exitDescription); + } + return theStepExecution.getExitStatus(); + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/CreateBulkImportEntityTasklet.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/CreateBulkImportEntityTasklet.java new file mode 100644 index 00000000000..c543ba4961f --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/CreateBulkImportEntityTasklet.java @@ -0,0 +1,45 @@ +package ca.uhn.fhir.jpa.bulk.imprt.job; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.bulk.export.job.CreateBulkExportEntityTasklet; +import ca.uhn.fhir.util.ValidateUtil; +import org.springframework.batch.core.StepContribution; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.step.tasklet.Tasklet; +import org.springframework.batch.repeat.RepeatStatus; + +import java.util.Map; + +public class CreateBulkImportEntityTasklet implements Tasklet { + + @Override + public RepeatStatus execute(StepContribution theStepContribution, ChunkContext theChunkContext) throws Exception { + Map jobParameters = theChunkContext.getStepContext().getJobParameters(); + + //We can leave early if they provided us with an existing job. + ValidateUtil.isTrueOrThrowInvalidRequest(jobParameters.containsKey(BulkExportJobConfig.JOB_UUID_PARAMETER), "Job doesn't have a UUID"); + CreateBulkExportEntityTasklet.addUUIDToJobContext(theChunkContext, (String) jobParameters.get(BulkExportJobConfig.JOB_UUID_PARAMETER)); + return RepeatStatus.FINISHED; + } + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobFileJson.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobFileJson.java new file mode 100644 index 00000000000..fb215a2fdce --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobFileJson.java @@ -0,0 +1,51 @@ +package ca.uhn.fhir.jpa.bulk.imprt.model; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.model.api.IModelJson; +import com.fasterxml.jackson.annotation.JsonProperty; + +public class BulkImportJobFileJson implements IModelJson { + + @JsonProperty("tenantName") + private String myTenantName; + @JsonProperty("contents") + private String myContents; + + public String getTenantName() { + return myTenantName; + } + + public BulkImportJobFileJson setTenantName(String theTenantName) { + myTenantName = theTenantName; + return this; + } + + public String getContents() { + return myContents; + } + + public BulkImportJobFileJson setContents(String theContents) { + myContents = theContents; + return this; + } + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobJson.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobJson.java new file mode 100644 index 00000000000..fe6ea10d0ba --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobJson.java @@ -0,0 +1,72 @@ +package ca.uhn.fhir.jpa.bulk.imprt.model; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.model.api.IModelJson; +import com.fasterxml.jackson.annotation.JsonProperty; + +public class BulkImportJobJson implements IModelJson { + + @JsonProperty("processingMode") + private JobFileRowProcessingModeEnum myProcessingMode; + @JsonProperty("jobDescription") + private String myJobDescription; + @JsonProperty("fileCount") + private int myFileCount; + @JsonProperty("batchSize") + private int myBatchSize; + + public String getJobDescription() { + return myJobDescription; + } + + public BulkImportJobJson setJobDescription(String theJobDescription) { + myJobDescription = theJobDescription; + return this; + } + + public JobFileRowProcessingModeEnum getProcessingMode() { + return myProcessingMode; + } + + public BulkImportJobJson setProcessingMode(JobFileRowProcessingModeEnum theProcessingMode) { + myProcessingMode = theProcessingMode; + return this; + } + + public int getFileCount() { + return myFileCount; + } + + public BulkImportJobJson setFileCount(int theFileCount) { + myFileCount = theFileCount; + return this; + } + + public int getBatchSize() { + return myBatchSize; + } + + public BulkImportJobJson setBatchSize(int theBatchSize) { + myBatchSize = theBatchSize; + return this; + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobStatusEnum.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobStatusEnum.java new file mode 100644 index 00000000000..5c3fe355224 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobStatusEnum.java @@ -0,0 +1,34 @@ +package ca.uhn.fhir.jpa.bulk.imprt.model; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import com.fasterxml.jackson.annotation.JsonFormat; + +@JsonFormat(shape = JsonFormat.Shape.STRING) +public enum BulkImportJobStatusEnum { + + STAGING, + READY, + RUNNING, + COMPLETE, + ERROR + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/JobFileRowProcessingModeEnum.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/JobFileRowProcessingModeEnum.java new file mode 100644 index 00000000000..92826d97242 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/JobFileRowProcessingModeEnum.java @@ -0,0 +1,34 @@ +package ca.uhn.fhir.jpa.bulk.imprt.model; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import com.fasterxml.jackson.annotation.JsonFormat; + +@JsonFormat(shape = JsonFormat.Shape.STRING) +public enum JobFileRowProcessingModeEnum { + + /** + * Sorting OK + */ + + FHIR_TRANSACTION + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/ParsedBulkImportRecord.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/ParsedBulkImportRecord.java new file mode 100644 index 00000000000..fba884734c0 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/ParsedBulkImportRecord.java @@ -0,0 +1,46 @@ +package ca.uhn.fhir.jpa.bulk.imprt.model; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.hl7.fhir.instance.model.api.IBaseResource; + +import java.io.Serializable; + +public class ParsedBulkImportRecord implements Serializable { + + private static final long serialVersionUID = 1L; + + private final String myTenantName; + private final IBaseResource myRowContent; + + public ParsedBulkImportRecord(String theTenantName, IBaseResource theRowContent) { + myTenantName = theTenantName; + myRowContent = theRowContent; + } + + public String getTenantName() { + return myTenantName; + } + + public IBaseResource getRowContent() { + return myRowContent; + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImpl.java new file mode 100644 index 00000000000..2bbb97abe5d --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImpl.java @@ -0,0 +1,280 @@ +package ca.uhn.fhir.jpa.bulk.imprt.svc; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.batch.BatchJobsConfig; +import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; +import ca.uhn.fhir.jpa.bulk.imprt.job.BulkImportJobConfig; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum; +import ca.uhn.fhir.jpa.dao.data.IBulkImportJobDao; +import ca.uhn.fhir.jpa.dao.data.IBulkImportJobFileDao; +import ca.uhn.fhir.jpa.entity.BulkImportJobEntity; +import ca.uhn.fhir.jpa.entity.BulkImportJobFileEntity; +import ca.uhn.fhir.jpa.model.sched.HapiJob; +import ca.uhn.fhir.jpa.model.sched.ISchedulerService; +import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import ca.uhn.fhir.util.ValidateUtil; +import org.apache.commons.lang3.time.DateUtils; +import org.quartz.JobExecutionContext; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.batch.core.JobParametersBuilder; +import org.springframework.batch.core.JobParametersInvalidException; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.support.TransactionTemplate; + +import javax.annotation.Nonnull; +import javax.annotation.PostConstruct; +import javax.transaction.Transactional; +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.UUID; + +import static org.apache.commons.lang3.StringUtils.isNotBlank; + +public class BulkDataImportSvcImpl implements IBulkDataImportSvc { + private static final Logger ourLog = LoggerFactory.getLogger(BulkDataImportSvcImpl.class); + @Autowired + private IBulkImportJobDao myJobDao; + + @Autowired + private IBulkImportJobFileDao myJobFileDao; + @Autowired + private PlatformTransactionManager myTxManager; + private TransactionTemplate myTxTemplate; + @Autowired + private ISchedulerService mySchedulerService; + @Autowired + private IBatchJobSubmitter myJobSubmitter; + @Autowired + @Qualifier(BatchJobsConfig.BULK_IMPORT_JOB_NAME) + private org.springframework.batch.core.Job myBulkImportJob; + + @PostConstruct + public void start() { + myTxTemplate = new TransactionTemplate(myTxManager); + + ScheduledJobDefinition jobDetail = new ScheduledJobDefinition(); + jobDetail.setId(ActivationJob.class.getName()); + jobDetail.setJobClass(ActivationJob.class); + mySchedulerService.scheduleClusteredJob(10 * DateUtils.MILLIS_PER_SECOND, jobDetail); + } + + @Override + @Transactional + public String createNewJob(BulkImportJobJson theJobDescription, @Nonnull List theInitialFiles) { + ValidateUtil.isNotNullOrThrowUnprocessableEntity(theJobDescription, "Job must not be null"); + ValidateUtil.isNotNullOrThrowUnprocessableEntity(theJobDescription.getProcessingMode(), "Job File Processing mode must not be null"); + ValidateUtil.isTrueOrThrowInvalidRequest(theJobDescription.getBatchSize() > 0, "Job File Batch Size must be > 0"); + + String jobId = UUID.randomUUID().toString(); + + ourLog.info("Creating new Bulk Import job with {} files, assigning job ID: {}", theInitialFiles.size(), jobId); + + BulkImportJobEntity job = new BulkImportJobEntity(); + job.setJobId(jobId); + job.setFileCount(theInitialFiles.size()); + job.setStatus(BulkImportJobStatusEnum.STAGING); + job.setJobDescription(theJobDescription.getJobDescription()); + job.setBatchSize(theJobDescription.getBatchSize()); + job.setRowProcessingMode(theJobDescription.getProcessingMode()); + job = myJobDao.save(job); + + int nextSequence = 0; + addFilesToJob(theInitialFiles, job, nextSequence); + + return jobId; + } + + @Override + @Transactional + public void addFilesToJob(String theJobId, List theFiles) { + ourLog.info("Adding {} files to bulk import job: {}", theFiles.size(), theJobId); + + BulkImportJobEntity job = findJobByJobId(theJobId); + + ValidateUtil.isTrueOrThrowInvalidRequest(job.getStatus() == BulkImportJobStatusEnum.STAGING, "Job %s has status %s and can not be added to", theJobId, job.getStatus()); + + addFilesToJob(theFiles, job, job.getFileCount()); + + job.setFileCount(job.getFileCount() + theFiles.size()); + myJobDao.save(job); + } + + private BulkImportJobEntity findJobByJobId(String theJobId) { + BulkImportJobEntity job = myJobDao + .findByJobId(theJobId) + .orElseThrow(() -> new InvalidRequestException("Unknown job ID: " + theJobId)); + return job; + } + + @Override + @Transactional + public void markJobAsReadyForActivation(String theJobId) { + ourLog.info("Activating bulk import job {}", theJobId); + + BulkImportJobEntity job = findJobByJobId(theJobId); + ValidateUtil.isTrueOrThrowInvalidRequest(job.getStatus() == BulkImportJobStatusEnum.STAGING, "Bulk import job %s can not be activated in status: %s", theJobId, job.getStatus()); + + job.setStatus(BulkImportJobStatusEnum.READY); + myJobDao.save(job); + } + + /** + * To be called by the job scheduler + */ + @Transactional(value = Transactional.TxType.NEVER) + @Override + public boolean activateNextReadyJob() { + + Optional jobToProcessOpt = Objects.requireNonNull(myTxTemplate.execute(t -> { + Pageable page = PageRequest.of(0, 1); + Slice submittedJobs = myJobDao.findByStatus(page, BulkImportJobStatusEnum.READY); + if (submittedJobs.isEmpty()) { + return Optional.empty(); + } + return Optional.of(submittedJobs.getContent().get(0)); + })); + + if (!jobToProcessOpt.isPresent()) { + return false; + } + + BulkImportJobEntity bulkImportJobEntity = jobToProcessOpt.get(); + + String jobUuid = bulkImportJobEntity.getJobId(); + try { + processJob(bulkImportJobEntity); + } catch (Exception e) { + ourLog.error("Failure while preparing bulk export extract", e); + myTxTemplate.execute(t -> { + Optional submittedJobs = myJobDao.findByJobId(jobUuid); + if (submittedJobs.isPresent()) { + BulkImportJobEntity jobEntity = submittedJobs.get(); + jobEntity.setStatus(BulkImportJobStatusEnum.ERROR); + jobEntity.setStatusMessage(e.getMessage()); + myJobDao.save(jobEntity); + } + return false; + }); + } + + return true; + } + + @Override + @Transactional + public void setJobToStatus(String theJobId, BulkImportJobStatusEnum theStatus) { + setJobToStatus(theJobId, theStatus, null); + } + + @Override + public void setJobToStatus(String theJobId, BulkImportJobStatusEnum theStatus, String theStatusMessage) { + BulkImportJobEntity job = findJobByJobId(theJobId); + job.setStatus(theStatus); + job.setStatusMessage(theStatusMessage); + myJobDao.save(job); + } + + @Override + @Transactional + public BulkImportJobJson fetchJob(String theJobId) { + BulkImportJobEntity job = findJobByJobId(theJobId); + return job.toJson(); + } + + @Transactional + @Override + public BulkImportJobFileJson fetchFile(String theJobId, int theFileIndex) { + BulkImportJobEntity job = findJobByJobId(theJobId); + + return myJobFileDao + .findForJob(job, theFileIndex) + .map(t -> t.toJson()) + .orElseThrow(() -> new IllegalArgumentException("Invalid index " + theFileIndex + " for job " + theJobId)); + } + + @Override + @Transactional + public void deleteJobFiles(String theJobId) { + BulkImportJobEntity job = findJobByJobId(theJobId); + List files = myJobFileDao.findAllIdsForJob(theJobId); + for (Long next : files) { + myJobFileDao.deleteById(next); + } + myJobDao.delete(job); + } + + private void processJob(BulkImportJobEntity theBulkExportJobEntity) throws JobParametersInvalidException { + String jobId = theBulkExportJobEntity.getJobId(); + int batchSize = theBulkExportJobEntity.getBatchSize(); + ValidateUtil.isTrueOrThrowInvalidRequest(batchSize > 0, "Batch size must be positive"); + + JobParametersBuilder parameters = new JobParametersBuilder() + .addString(BulkExportJobConfig.JOB_UUID_PARAMETER, jobId) + .addLong(BulkImportJobConfig.JOB_PARAM_COMMIT_INTERVAL, (long) batchSize); + + if(isNotBlank(theBulkExportJobEntity.getJobDescription())) { + parameters.addString(BulkExportJobConfig.JOB_DESCRIPTION, theBulkExportJobEntity.getJobDescription()); + } + + ourLog.info("Submitting bulk import job {} to job scheduler", jobId); + + myJobSubmitter.runJob(myBulkImportJob, parameters.toJobParameters()); + } + + private void addFilesToJob(@Nonnull List theInitialFiles, BulkImportJobEntity job, int nextSequence) { + for (BulkImportJobFileJson nextFile : theInitialFiles) { + ValidateUtil.isNotBlankOrThrowUnprocessableEntity(nextFile.getContents(), "Job File Contents mode must not be null"); + + BulkImportJobFileEntity jobFile = new BulkImportJobFileEntity(); + jobFile.setJob(job); + jobFile.setContents(nextFile.getContents()); + jobFile.setTenantName(nextFile.getTenantName()); + jobFile.setFileSequence(nextSequence++); + myJobFileDao.save(jobFile); + } + } + + + public static class ActivationJob implements HapiJob { + @Autowired + private IBulkDataImportSvc myTarget; + + @Override + public void execute(JobExecutionContext theContext) { + myTarget.activateNextReadyJob(); + } + } + + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java index a625e1d8ed3..e2a5f608117 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java @@ -11,15 +11,18 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IDao; import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; +import ca.uhn.fhir.jpa.batch.BatchConstants; import ca.uhn.fhir.jpa.batch.BatchJobsConfig; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; import ca.uhn.fhir.jpa.batch.config.NonPersistedBatchConfigurer; import ca.uhn.fhir.jpa.batch.svc.BatchJobSubmitterImpl; import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider; import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; -import ca.uhn.fhir.jpa.bulk.provider.BulkDataExportProvider; -import ca.uhn.fhir.jpa.bulk.svc.BulkDataExportSvcImpl; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider; +import ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportSvcImpl; +import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; +import ca.uhn.fhir.jpa.bulk.imprt.svc.BulkDataImportSvcImpl; import ca.uhn.fhir.jpa.cache.IResourceVersionSvc; import ca.uhn.fhir.jpa.cache.ResourceVersionSvcDaoImpl; import ca.uhn.fhir.jpa.dao.DaoSearchParamProvider; @@ -29,6 +32,7 @@ import ca.uhn.fhir.jpa.dao.ISearchBuilder; import ca.uhn.fhir.jpa.dao.LegacySearchBuilder; import ca.uhn.fhir.jpa.dao.MatchResourceUrlService; import ca.uhn.fhir.jpa.dao.SearchBuilderFactory; +import ca.uhn.fhir.jpa.dao.TransactionProcessor; import ca.uhn.fhir.jpa.dao.expunge.DeleteExpungeService; import ca.uhn.fhir.jpa.dao.expunge.ExpungeEverythingService; import ca.uhn.fhir.jpa.dao.expunge.ExpungeOperation; @@ -63,7 +67,6 @@ import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor; import ca.uhn.fhir.jpa.interceptor.JpaConsentContextServices; import ca.uhn.fhir.jpa.interceptor.MdmSearchExpandingInterceptor; import ca.uhn.fhir.jpa.interceptor.OverridePathBasedReferentialIntegrityForDeletesInterceptor; -import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationInterceptor; import ca.uhn.fhir.jpa.interceptor.validation.RepositoryValidatingRuleBuilder; import ca.uhn.fhir.jpa.model.sched.ISchedulerService; import ca.uhn.fhir.jpa.packages.IHapiPackageCacheManager; @@ -95,8 +98,8 @@ import ca.uhn.fhir.jpa.search.builder.predicate.CoordsPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.DatePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ForcedIdPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.NumberPredicateBuilder; -import ca.uhn.fhir.jpa.search.builder.predicate.QuantityPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.QuantityNormalizedPredicateBuilder; +import ca.uhn.fhir.jpa.search.builder.predicate.QuantityPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceIdPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceLinkPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceTablePredicateBuilder; @@ -129,6 +132,7 @@ import ca.uhn.fhir.jpa.util.MemoryCacheService; import ca.uhn.fhir.jpa.validation.JpaResourceLoader; import ca.uhn.fhir.jpa.validation.ValidationSettings; import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationInterceptor; import ca.uhn.fhir.rest.server.interceptor.consent.IConsentContextServices; import ca.uhn.fhir.rest.server.interceptor.partition.RequestTenantPartitionInterceptor; import org.hibernate.jpa.HibernatePersistenceProvider; @@ -160,6 +164,7 @@ import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; import javax.annotation.Nullable; import javax.annotation.PostConstruct; import java.util.Date; +import java.util.concurrent.RejectedExecutionHandler; /* * #%L @@ -185,7 +190,7 @@ import java.util.Date; @Configuration @EnableJpaRepositories(basePackages = "ca.uhn.fhir.jpa.dao.data") @Import({ - SearchParamConfig.class, BatchJobsConfig.class + SearchParamConfig.class, BatchJobsConfig.class }) @EnableBatchProcessing public abstract class BaseConfig { @@ -199,24 +204,23 @@ public abstract class BaseConfig { public static final String PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER = "PersistedJpaSearchFirstPageBundleProvider"; public static final String SEARCH_BUILDER = "SearchBuilder"; public static final String HISTORY_BUILDER = "HistoryBuilder"; - private static final String HAPI_DEFAULT_SCHEDULER_GROUP = "HAPI"; public static final String REPOSITORY_VALIDATING_RULE_BUILDER = "repositoryValidatingRuleBuilder"; - + private static final String HAPI_DEFAULT_SCHEDULER_GROUP = "HAPI"; @Autowired protected Environment myEnv; @Autowired private DaoRegistry myDaoRegistry; + private Integer searchCoordCorePoolSize = 20; + private Integer searchCoordMaxPoolSize = 100; + private Integer searchCoordQueueCapacity = 200; /** * Subclasses may override this method to provide settings such as search coordinator pool sizes. */ @PostConstruct - public void initSettings() {} - - private Integer searchCoordCorePoolSize = 20; - private Integer searchCoordMaxPoolSize = 100; - private Integer searchCoordQueueCapacity = 200; + public void initSettings() { + } public void setSearchCoordCorePoolSize(Integer searchCoordCorePoolSize) { this.searchCoordCorePoolSize = searchCoordCorePoolSize; @@ -297,6 +301,11 @@ public abstract class BaseConfig { return new SubscriptionTriggeringProvider(); } + @Bean + public TransactionProcessor transactionProcessor() { + return new TransactionProcessor(); + } + @Bean(name = "myAttachmentBinaryAccessProvider") @Lazy public BinaryAccessProvider binaryAccessProvider() { @@ -381,13 +390,15 @@ public abstract class BaseConfig { return retVal; } - @Bean + @Bean(name= BatchConstants.JOB_LAUNCHING_TASK_EXECUTOR) public TaskExecutor jobLaunchingTaskExecutor() { ThreadPoolTaskExecutor asyncTaskExecutor = new ThreadPoolTaskExecutor(); - asyncTaskExecutor.setCorePoolSize(5); + asyncTaskExecutor.setCorePoolSize(0); asyncTaskExecutor.setMaxPoolSize(10); - asyncTaskExecutor.setQueueCapacity(500); + asyncTaskExecutor.setQueueCapacity(0); + asyncTaskExecutor.setAllowCoreThreadTimeOut(true); asyncTaskExecutor.setThreadNamePrefix("JobLauncher-"); + asyncTaskExecutor.setRejectedExecutionHandler(new ResourceReindexingSvcImpl.BlockPolicy()); asyncTaskExecutor.initialize(); return asyncTaskExecutor; } @@ -514,6 +525,11 @@ public abstract class BaseConfig { return new BulkDataExportProvider(); } + @Bean + @Lazy + public IBulkDataImportSvc bulkDataImportSvc() { + return new BulkDataImportSvcImpl(); + } @Bean public PersistedJpaBundleProviderFactory persistedJpaBundleProviderFactory() { @@ -614,7 +630,7 @@ public abstract class BaseConfig { public QuantityNormalizedPredicateBuilder newQuantityNormalizedPredicateBuilder(SearchQueryBuilder theSearchBuilder) { return new QuantityNormalizedPredicateBuilder(theSearchBuilder); } - + @Bean @Scope("prototype") public ResourceLinkPredicateBuilder newResourceLinkPredicateBuilder(QueryStack theQueryStack, SearchQueryBuilder theSearchBuilder, boolean theReversed) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseDstu2Config.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseDstu2Config.java index c012ebbfc94..aebba0133b4 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseDstu2Config.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseDstu2Config.java @@ -7,6 +7,9 @@ import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.dao.JpaPersistedResourceValidationSupport; +import ca.uhn.fhir.jpa.dao.TransactionProcessor; +import ca.uhn.fhir.jpa.dao.TransactionProcessorVersionAdapterDstu2; +import ca.uhn.fhir.jpa.dao.r4.TransactionProcessorVersionAdapterR4; import ca.uhn.fhir.jpa.term.TermReadSvcDstu2; import ca.uhn.fhir.jpa.term.api.ITermReadSvc; import ca.uhn.fhir.jpa.util.ResourceCountCache; @@ -93,6 +96,12 @@ public class BaseDstu2Config extends BaseConfig { return retVal; } + @Bean + public TransactionProcessor.ITransactionProcessorVersionAdapter transactionProcessorVersionFacade() { + return new TransactionProcessorVersionAdapterDstu2(); + } + + @Bean(name = "myDefaultProfileValidationSupport") public DefaultProfileValidationSupport defaultProfileValidationSupport() { return new DefaultProfileValidationSupport(fhirContext()); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/dstu3/BaseDstu3Config.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/dstu3/BaseDstu3Config.java index aa9472737c8..92529b25f5b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/dstu3/BaseDstu3Config.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/dstu3/BaseDstu3Config.java @@ -87,11 +87,6 @@ public class BaseDstu3Config extends BaseConfigDstu3Plus { return new TransactionProcessorVersionAdapterDstu3(); } - @Bean - public TransactionProcessor transactionProcessor() { - return new TransactionProcessor(); - } - @Bean(name = "myResourceCountsCache") public ResourceCountCache resourceCountsCache() { ResourceCountCache retVal = new ResourceCountCache(() -> systemDaoDstu3().getResourceCounts()); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4/BaseR4Config.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4/BaseR4Config.java index beb5bc4eea0..6879d7dfd1b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4/BaseR4Config.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4/BaseR4Config.java @@ -82,11 +82,6 @@ public class BaseR4Config extends BaseConfigDstu3Plus { return new TransactionProcessorVersionAdapterR4(); } - @Bean - public TransactionProcessor transactionProcessor() { - return new TransactionProcessor(); - } - @Bean(name = GRAPHQL_PROVIDER_NAME) @Lazy public GraphQLProvider graphQLProvider() { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r5/BaseR5Config.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r5/BaseR5Config.java index 6ccb22fef95..c217a864907 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r5/BaseR5Config.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r5/BaseR5Config.java @@ -80,11 +80,6 @@ public class BaseR5Config extends BaseConfigDstu3Plus { return new TransactionProcessorVersionAdapterR5(); } - @Bean - public TransactionProcessor transactionProcessor() { - return new TransactionProcessor(); - } - @Bean(name = GRAPHQL_PROVIDER_NAME) @Lazy public GraphQLProvider graphQLProvider() { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java index da680a8f017..7e1877cc77d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java @@ -3,13 +3,14 @@ package ca.uhn.fhir.jpa.dao; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.api.model.ExpungeOutcome; -import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.util.ResourceCountCache; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails; import ca.uhn.fhir.util.StopWatch; +import com.google.common.annotations.VisibleForTesting; +import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseResource; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -17,6 +18,7 @@ import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; import javax.annotation.Nullable; +import javax.annotation.PostConstruct; import java.util.Date; import java.util.HashMap; import java.util.List; @@ -42,14 +44,26 @@ import java.util.Map; * #L% */ -public abstract class BaseHapiFhirSystemDao extends BaseHapiFhirDao implements IFhirSystemDao { +public abstract class BaseHapiFhirSystemDao extends BaseHapiFhirDao implements IFhirSystemDao { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiFhirSystemDao.class); @Autowired @Qualifier("myResourceCountsCache") public ResourceCountCache myResourceCountsCache; @Autowired - private PartitionSettings myPartitionSettings; + private TransactionProcessor myTransactionProcessor; + + @VisibleForTesting + public void setTransactionProcessorForUnitTest(TransactionProcessor theTransactionProcessor) { + myTransactionProcessor = theTransactionProcessor; + } + + @Override + @PostConstruct + public void start() { + super.start(); + myTransactionProcessor.setDao(this); + } @Override @Transactional(propagation = Propagation.NEVER) @@ -91,6 +105,18 @@ public abstract class BaseHapiFhirSystemDao extends BaseHapiFhirDao BUNDLE transaction(RequestDetails theRequestDetails, BUNDLE theRequest) { + public BUNDLE transaction(RequestDetails theRequestDetails, BUNDLE theRequest, boolean theNestedMode) { if (theRequestDetails != null && theRequestDetails.getServer() != null && myDao != null) { IServerInterceptor.ActionRequestDetails requestDetails = new IServerInterceptor.ActionRequestDetails(theRequestDetails, theRequest, "Bundle", null); myDao.notifyInterceptors(RestOperationTypeEnum.TRANSACTION, requestDetails); } String actionName = "Transaction"; - IBaseBundle response = processTransactionAsSubRequest((RequestDetails) theRequestDetails, theRequest, actionName); + IBaseBundle response = processTransactionAsSubRequest(theRequestDetails, theRequest, actionName, theNestedMode); List entries = myVersionAdapter.getEntries(response); for (int i = 0; i < entries.size(); i++) { @@ -190,7 +191,7 @@ public abstract class BaseTransactionProcessor { myVersionAdapter.setRequestUrl(entry, next.getIdElement().toUnqualifiedVersionless().getValue()); } - transaction(theRequestDetails, transactionBundle); + transaction(theRequestDetails, transactionBundle, false); return resp; } @@ -270,10 +271,10 @@ public abstract class BaseTransactionProcessor { myDao = theDao; } - private IBaseBundle processTransactionAsSubRequest(RequestDetails theRequestDetails, IBaseBundle theRequest, String theActionName) { + private IBaseBundle processTransactionAsSubRequest(RequestDetails theRequestDetails, IBaseBundle theRequest, String theActionName, boolean theNestedMode) { BaseHapiFhirDao.markRequestAsProcessingSubRequest(theRequestDetails); try { - return processTransaction(theRequestDetails, theRequest, theActionName); + return processTransaction(theRequestDetails, theRequest, theActionName, theNestedMode); } finally { BaseHapiFhirDao.clearRequestAsProcessingSubRequest(theRequestDetails); } @@ -289,7 +290,7 @@ public abstract class BaseTransactionProcessor { myTxManager = theTxManager; } - private IBaseBundle batch(final RequestDetails theRequestDetails, IBaseBundle theRequest) { + private IBaseBundle batch(final RequestDetails theRequestDetails, IBaseBundle theRequest, boolean theNestedMode) { ourLog.info("Beginning batch with {} resources", myVersionAdapter.getEntries(theRequest).size()); long start = System.currentTimeMillis(); @@ -310,7 +311,7 @@ public abstract class BaseTransactionProcessor { IBaseBundle subRequestBundle = myVersionAdapter.createBundle(org.hl7.fhir.r4.model.Bundle.BundleType.TRANSACTION.toCode()); myVersionAdapter.addEntry(subRequestBundle, (IBase) nextRequestEntry); - IBaseBundle nextResponseBundle = processTransactionAsSubRequest((ServletRequestDetails) theRequestDetails, subRequestBundle, "Batch sub-request"); + IBaseBundle nextResponseBundle = processTransactionAsSubRequest(theRequestDetails, subRequestBundle, "Batch sub-request", theNestedMode); IBase subResponseEntry = (IBase) myVersionAdapter.getEntries(nextResponseBundle).get(0); myVersionAdapter.addEntry(resp, subResponseEntry); @@ -341,7 +342,7 @@ public abstract class BaseTransactionProcessor { } long delay = System.currentTimeMillis() - start; - ourLog.info("Batch completed in {}ms", new Object[]{delay}); + ourLog.info("Batch completed in {}ms", delay); return resp; } @@ -351,13 +352,13 @@ public abstract class BaseTransactionProcessor { myHapiTransactionService = theHapiTransactionService; } - private IBaseBundle processTransaction(final RequestDetails theRequestDetails, final IBaseBundle theRequest, final String theActionName) { + private IBaseBundle processTransaction(final RequestDetails theRequestDetails, final IBaseBundle theRequest, final String theActionName, boolean theNestedMode) { validateDependencies(); String transactionType = myVersionAdapter.getBundleType(theRequest); if (org.hl7.fhir.r4.model.Bundle.BundleType.BATCH.toCode().equals(transactionType)) { - return batch(theRequestDetails, theRequest); + return batch(theRequestDetails, theRequest, theNestedMode); } if (transactionType == null) { @@ -465,6 +466,10 @@ public abstract class BaseTransactionProcessor { } for (IBase nextReqEntry : getEntries) { + if (theNestedMode) { + throw new InvalidRequestException("Can not invoke read operation on nested transaction"); + } + if (!(theRequestDetails instanceof ServletRequestDetails)) { throw new MethodNotAllowedException("Can not call transaction GET methods from this context"); } @@ -976,7 +981,12 @@ public abstract class BaseTransactionProcessor { } } - IPrimitiveType deletedInstantOrNull = ResourceMetadataKeyEnum.DELETED_AT.get((IAnyResource) nextResource); + IPrimitiveType deletedInstantOrNull; + if (nextResource instanceof IAnyResource) { + deletedInstantOrNull = ResourceMetadataKeyEnum.DELETED_AT.get((IAnyResource) nextResource); + } else { + deletedInstantOrNull = ResourceMetadataKeyEnum.DELETED_AT.get((IResource) nextResource); + } Date deletedTimestampOrNull = deletedInstantOrNull != null ? deletedInstantOrNull.getValue() : null; IFhirResourceDao dao = myDaoRegistry.getResourceDao(nextResource.getClass()); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirSystemDaoDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirSystemDaoDstu2.java index 9581599cac3..81ab30c20b7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirSystemDaoDstu2.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirSystemDaoDstu2.java @@ -20,559 +20,19 @@ package ca.uhn.fhir.jpa.dao; * #L% */ -import ca.uhn.fhir.context.RuntimeResourceDefinition; -import ca.uhn.fhir.jpa.api.dao.DaoRegistry; -import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; -import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; -import ca.uhn.fhir.jpa.api.model.DeleteConflictList; -import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome; -import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; -import ca.uhn.fhir.jpa.delete.DeleteConflictService; -import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource; -import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; -import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.TagDefinition; -import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; -import ca.uhn.fhir.jpa.searchparam.MatchUrlService; -import ca.uhn.fhir.model.api.IResource; -import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; -import ca.uhn.fhir.model.base.composite.BaseResourceReferenceDt; import ca.uhn.fhir.model.dstu2.composite.MetaDt; import ca.uhn.fhir.model.dstu2.resource.Bundle; -import ca.uhn.fhir.model.dstu2.resource.Bundle.Entry; -import ca.uhn.fhir.model.dstu2.resource.Bundle.EntryResponse; -import ca.uhn.fhir.model.dstu2.resource.OperationOutcome; -import ca.uhn.fhir.model.dstu2.valueset.BundleTypeEnum; -import ca.uhn.fhir.model.dstu2.valueset.HTTPVerbEnum; -import ca.uhn.fhir.model.dstu2.valueset.IssueSeverityEnum; -import ca.uhn.fhir.model.primitive.IdDt; -import ca.uhn.fhir.model.primitive.InstantDt; -import ca.uhn.fhir.model.primitive.UriDt; -import ca.uhn.fhir.parser.DataFormatException; -import ca.uhn.fhir.parser.IParser; -import ca.uhn.fhir.rest.api.Constants; -import ca.uhn.fhir.rest.api.RequestTypeEnum; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; -import ca.uhn.fhir.rest.server.RestfulServerUtils; -import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; -import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; -import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; -import ca.uhn.fhir.rest.server.exceptions.NotModifiedException; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails; -import ca.uhn.fhir.rest.server.method.BaseMethodBinding; -import ca.uhn.fhir.rest.server.method.BaseResourceReturningMethodBinding; -import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; -import ca.uhn.fhir.rest.server.servlet.ServletSubRequestDetails; -import ca.uhn.fhir.util.FhirTerser; -import ca.uhn.fhir.util.UrlUtil; -import ca.uhn.fhir.util.UrlUtil.UrlParts; -import com.google.common.collect.ArrayListMultimap; -import org.apache.http.NameValuePair; import org.hl7.fhir.instance.model.api.IBaseBundle; -import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.instance.model.api.IIdType; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.transaction.PlatformTransactionManager; -import org.springframework.transaction.TransactionStatus; -import org.springframework.transaction.annotation.Propagation; -import org.springframework.transaction.annotation.Transactional; -import org.springframework.transaction.support.TransactionCallback; -import org.springframework.transaction.support.TransactionTemplate; import javax.persistence.TypedQuery; -import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.IdentityHashMap; -import java.util.LinkedHashSet; import java.util.List; -import java.util.Map; -import java.util.Set; - -import static org.apache.commons.lang3.StringUtils.defaultString; -import static org.apache.commons.lang3.StringUtils.isBlank; -import static org.apache.commons.lang3.StringUtils.isNotBlank; public class FhirSystemDaoDstu2 extends BaseHapiFhirSystemDao { - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirSystemDaoDstu2.class); - - @Autowired - private PlatformTransactionManager myTxManager; - @Autowired - private MatchUrlService myMatchUrlService; - @Autowired - private DaoRegistry myDaoRegistry; - @Autowired - private MatchResourceUrlService myMatchResourceUrlService; - @Autowired - private HapiTransactionService myHapiTransactionalService; - - private Bundle batch(final RequestDetails theRequestDetails, Bundle theRequest) { - ourLog.info("Beginning batch with {} resources", theRequest.getEntry().size()); - long start = System.currentTimeMillis(); - - Bundle resp = new Bundle(); - resp.setType(BundleTypeEnum.BATCH_RESPONSE); - - /* - * For batch, we handle each entry as a mini-transaction in its own database transaction so that if one fails, it doesn't prevent others - */ - - for (final Entry nextRequestEntry : theRequest.getEntry()) { - - TransactionCallback callback = new TransactionCallback() { - @Override - public Bundle doInTransaction(TransactionStatus theStatus) { - Bundle subRequestBundle = new Bundle(); - subRequestBundle.setType(BundleTypeEnum.TRANSACTION); - subRequestBundle.addEntry(nextRequestEntry); - return transaction((ServletRequestDetails) theRequestDetails, subRequestBundle, "Batch sub-request"); - } - }; - - BaseServerResponseException caughtEx; - try { - Bundle nextResponseBundle; - if (nextRequestEntry.getRequest().getMethodElement().getValueAsEnum() == HTTPVerbEnum.GET) { - // Don't process GETs in a transaction because they'll - // create their own - nextResponseBundle = callback.doInTransaction(null); - } else { - nextResponseBundle = myHapiTransactionalService.execute(theRequestDetails, callback); - } - caughtEx = null; - - Entry subResponseEntry = nextResponseBundle.getEntry().get(0); - resp.addEntry(subResponseEntry); - /* - * If the individual entry didn't have a resource in its response, bring the sub-transaction's OperationOutcome across so the client can see it - */ - if (subResponseEntry.getResource() == null) { - subResponseEntry.setResource(nextResponseBundle.getEntry().get(0).getResource()); - } - - } catch (BaseServerResponseException e) { - caughtEx = e; - } catch (Throwable t) { - ourLog.error("Failure during BATCH sub transaction processing", t); - caughtEx = new InternalErrorException(t); - } - - if (caughtEx != null) { - Entry nextEntry = resp.addEntry(); - - OperationOutcome oo = new OperationOutcome(); - oo.addIssue().setSeverity(IssueSeverityEnum.ERROR).setDiagnostics(caughtEx.getMessage()); - nextEntry.setResource(oo); - - EntryResponse nextEntryResp = nextEntry.getResponse(); - nextEntryResp.setStatus(toStatusString(caughtEx.getStatusCode())); - } - - } - - long delay = System.currentTimeMillis() - start; - ourLog.info("Batch completed in {}ms", new Object[] {delay}); - - return resp; - } - - @SuppressWarnings("unchecked") - private Bundle doTransaction(ServletRequestDetails theRequestDetails, Bundle theRequest, String theActionName) { - BundleTypeEnum transactionType = theRequest.getTypeElement().getValueAsEnum(); - if (transactionType == BundleTypeEnum.BATCH) { - return batch(theRequestDetails, theRequest); - } - - return doTransaction(theRequestDetails, theRequest, theActionName, transactionType); - } - - private Bundle doTransaction(ServletRequestDetails theRequestDetails, Bundle theRequest, String theActionName, BundleTypeEnum theTransactionType) { - if (theTransactionType == null) { - String message = "Transaction Bundle did not specify valid Bundle.type, assuming " + BundleTypeEnum.TRANSACTION.getCode(); - ourLog.warn(message); - theTransactionType = BundleTypeEnum.TRANSACTION; - } - if (theTransactionType != BundleTypeEnum.TRANSACTION) { - throw new InvalidRequestException("Unable to process transaction where incoming Bundle.type = " + theTransactionType.getCode()); - } - - ourLog.info("Beginning {} with {} resources", theActionName, theRequest.getEntry().size()); - - long start = System.currentTimeMillis(); - TransactionDetails transactionDetails = new TransactionDetails(); - - Set allIds = new LinkedHashSet(); - Map idSubstitutions = new HashMap(); - Map idToPersistedOutcome = new HashMap(); - - /* - * We want to execute the transaction request bundle elements in the order - * specified by the FHIR specification (see TransactionSorter) so we save the - * original order in the request, then sort it. - * - * Entries with a type of GET are removed from the bundle so that they - * can be processed at the very end. We do this because the incoming resources - * are saved in a two-phase way in order to deal with interdependencies, and - * we want the GET processing to use the final indexing state - */ - Bundle response = new Bundle(); - List getEntries = new ArrayList(); - IdentityHashMap originalRequestOrder = new IdentityHashMap(); - for (int i = 0; i < theRequest.getEntry().size(); i++) { - originalRequestOrder.put(theRequest.getEntry().get(i), i); - response.addEntry(); - if (theRequest.getEntry().get(i).getRequest().getMethodElement().getValueAsEnum() == HTTPVerbEnum.GET) { - getEntries.add(theRequest.getEntry().get(i)); - } - } - Collections.sort(theRequest.getEntry(), new TransactionSorter()); - - List deletedResources = new ArrayList<>(); - DeleteConflictList deleteConflicts = new DeleteConflictList(); - Map entriesToProcess = new IdentityHashMap<>(); - Set nonUpdatedEntities = new HashSet<>(); - Set updatedEntities = new HashSet<>(); - - /* - * Handle: GET/PUT/POST - */ - TransactionTemplate txTemplate = new TransactionTemplate(myTxManager); - txTemplate.execute(t->{ - handleTransactionWriteOperations(theRequestDetails, theRequest, theActionName, transactionDetails, allIds, idSubstitutions, idToPersistedOutcome, response, originalRequestOrder, deletedResources, deleteConflicts, entriesToProcess, nonUpdatedEntities, updatedEntities); - return null; - }); - - /* - * Loop through the request and process any entries of type GET - */ - for (int i = 0; i < getEntries.size(); i++) { - Entry nextReqEntry = getEntries.get(i); - Integer originalOrder = originalRequestOrder.get(nextReqEntry); - Entry nextRespEntry = response.getEntry().get(originalOrder); - - ServletSubRequestDetails requestDetails = new ServletSubRequestDetails(theRequestDetails); - requestDetails.setServletRequest(theRequestDetails.getServletRequest()); - requestDetails.setRequestType(RequestTypeEnum.GET); - requestDetails.setServer(theRequestDetails.getServer()); - - String url = extractTransactionUrlOrThrowException(nextReqEntry, HTTPVerbEnum.GET); - - int qIndex = url.indexOf('?'); - ArrayListMultimap paramValues = ArrayListMultimap.create(); - requestDetails.setParameters(new HashMap()); - if (qIndex != -1) { - String params = url.substring(qIndex); - List parameters = UrlUtil.translateMatchUrl(params); - for (NameValuePair next : parameters) { - paramValues.put(next.getName(), next.getValue()); - } - for (Map.Entry> nextParamEntry : paramValues.asMap().entrySet()) { - String[] nextValue = nextParamEntry.getValue().toArray(new String[nextParamEntry.getValue().size()]); - requestDetails.addParameter(nextParamEntry.getKey(), nextValue); - } - url = url.substring(0, qIndex); - } - - requestDetails.setRequestPath(url); - requestDetails.setFhirServerBase(theRequestDetails.getFhirServerBase()); - - theRequestDetails.getServer().populateRequestDetailsFromRequestPath(requestDetails, url); - BaseMethodBinding method = theRequestDetails.getServer().determineResourceMethod(requestDetails, url); - if (method == null) { - throw new IllegalArgumentException("Unable to handle GET " + url); - } - - if (isNotBlank(nextReqEntry.getRequest().getIfMatch())) { - requestDetails.addHeader(Constants.HEADER_IF_MATCH, nextReqEntry.getRequest().getIfMatch()); - } - if (isNotBlank(nextReqEntry.getRequest().getIfNoneExist())) { - requestDetails.addHeader(Constants.HEADER_IF_NONE_EXIST, nextReqEntry.getRequest().getIfNoneExist()); - } - if (isNotBlank(nextReqEntry.getRequest().getIfNoneMatch())) { - requestDetails.addHeader(Constants.HEADER_IF_NONE_MATCH, nextReqEntry.getRequest().getIfNoneMatch()); - } - - if (method instanceof BaseResourceReturningMethodBinding) { - try { - IBaseResource resource = ((BaseResourceReturningMethodBinding) method).doInvokeServer(theRequestDetails.getServer(), requestDetails); - if (paramValues.containsKey(Constants.PARAM_SUMMARY) || paramValues.containsKey(Constants.PARAM_CONTENT)) { - resource = filterNestedBundle(requestDetails, resource); - } - nextRespEntry.setResource((IResource) resource); - nextRespEntry.getResponse().setStatus(toStatusString(Constants.STATUS_HTTP_200_OK)); - } catch (NotModifiedException e) { - nextRespEntry.getResponse().setStatus(toStatusString(Constants.STATUS_HTTP_304_NOT_MODIFIED)); - } - } else { - throw new IllegalArgumentException("Unable to handle GET " + url); - } - - } - - for (Map.Entry nextEntry : entriesToProcess.entrySet()) { - nextEntry.getKey().getResponse().setLocation(nextEntry.getValue().getIdDt().toUnqualified().getValue()); - nextEntry.getKey().getResponse().setEtag(nextEntry.getValue().getIdDt().getVersionIdPart()); - } - - long delay = System.currentTimeMillis() - start; - int numEntries = theRequest.getEntry().size(); - long delayPer = delay / numEntries; - ourLog.info("{} completed in {}ms ({} entries at {}ms per entry)", theActionName, delay, numEntries, delayPer); - - response.setType(BundleTypeEnum.TRANSACTION_RESPONSE); - return response; - } - - private void handleTransactionWriteOperations(ServletRequestDetails theRequestDetails, Bundle theRequest, String theActionName, TransactionDetails theTransactionDetails, Set theAllIds, Map theIdSubstitutions, Map theIdToPersistedOutcome, Bundle theResponse, IdentityHashMap theOriginalRequestOrder, List theDeletedResources, DeleteConflictList theDeleteConflicts, Map theEntriesToProcess, Set theNonUpdatedEntities, Set theUpdatedEntities) { - /* - * Loop through the request and process any entries of type - * PUT, POST or DELETE - */ - for (int i = 0; i < theRequest.getEntry().size(); i++) { - - if (i % 100 == 0) { - ourLog.debug("Processed {} non-GET entries out of {}", i, theRequest.getEntry().size()); - } - - Entry nextReqEntry = theRequest.getEntry().get(i); - IResource res = nextReqEntry.getResource(); - IdDt nextResourceId = null; - if (res != null) { - - nextResourceId = res.getId(); - - if (!nextResourceId.hasIdPart()) { - if (isNotBlank(nextReqEntry.getFullUrl())) { - nextResourceId = new IdDt(nextReqEntry.getFullUrl()); - } - } - - if (nextResourceId.hasIdPart() && nextResourceId.getIdPart().matches("[a-zA-Z]+:.*") && !isPlaceholder(nextResourceId)) { - throw new InvalidRequestException("Invalid placeholder ID found: " + nextResourceId.getIdPart() + " - Must be of the form 'urn:uuid:[uuid]' or 'urn:oid:[oid]'"); - } - - if (nextResourceId.hasIdPart() && !nextResourceId.hasResourceType() && !isPlaceholder(nextResourceId)) { - nextResourceId = new IdDt(toResourceName(res.getClass()), nextResourceId.getIdPart()); - res.setId(nextResourceId); - } - - /* - * Ensure that the bundle doesn't have any duplicates, since this causes all kinds of weirdness - */ - if (isPlaceholder(nextResourceId)) { - if (!theAllIds.add(nextResourceId)) { - throw new InvalidRequestException(getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionContainsMultipleWithDuplicateId", nextResourceId)); - } - } else if (nextResourceId.hasResourceType() && nextResourceId.hasIdPart()) { - IdDt nextId = nextResourceId.toUnqualifiedVersionless(); - if (!theAllIds.add(nextId)) { - throw new InvalidRequestException(getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionContainsMultipleWithDuplicateId", nextId)); - } - } - - } - - HTTPVerbEnum verb = nextReqEntry.getRequest().getMethodElement().getValueAsEnum(); - if (verb == null) { - throw new InvalidRequestException(getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionEntryHasInvalidVerb", nextReqEntry.getRequest().getMethod())); - } - - String resourceType = res != null ? getContext().getResourceType(res) : null; - Entry nextRespEntry = theResponse.getEntry().get(theOriginalRequestOrder.get(nextReqEntry)); - - switch (verb) { - case POST: { - // CREATE - @SuppressWarnings("rawtypes") - IFhirResourceDao resourceDao = myDaoRegistry.getResourceDao(res.getClass()); - res.setId((String) null); - DaoMethodOutcome outcome; - outcome = resourceDao.create(res, nextReqEntry.getRequest().getIfNoneExist(), false, theTransactionDetails, theRequestDetails); - handleTransactionCreateOrUpdateOutcome(theIdSubstitutions, theIdToPersistedOutcome, nextResourceId, outcome, nextRespEntry, resourceType, res); - theEntriesToProcess.put(nextRespEntry, outcome.getEntity()); - if (outcome.getCreated() == false) { - theNonUpdatedEntities.add(outcome.getEntity()); - } - break; - } - case DELETE: { - // DELETE - String url = extractTransactionUrlOrThrowException(nextReqEntry, verb); - UrlParts parts = UrlUtil.parseUrl(url); - IFhirResourceDao dao = toDao(parts, verb.getCode(), url); - int status = Constants.STATUS_HTTP_204_NO_CONTENT; - if (parts.getResourceId() != null) { - DaoMethodOutcome outcome = dao.delete(new IdDt(parts.getResourceType(), parts.getResourceId()), theDeleteConflicts, theRequestDetails, theTransactionDetails); - if (outcome.getEntity() != null) { - theDeletedResources.add(outcome.getId().toUnqualifiedVersionless()); - theEntriesToProcess.put(nextRespEntry, outcome.getEntity()); - } - } else { - DeleteMethodOutcome deleteOutcome = dao.deleteByUrl(parts.getResourceType() + '?' + parts.getParams(), theDeleteConflicts, theRequestDetails); - List allDeleted = deleteOutcome.getDeletedEntities(); - for (ResourceTable deleted : allDeleted) { - theDeletedResources.add(deleted.getIdDt().toUnqualifiedVersionless()); - } - if (allDeleted.isEmpty()) { - status = Constants.STATUS_HTTP_404_NOT_FOUND; - } - } - - nextRespEntry.getResponse().setStatus(toStatusString(status)); - break; - } - case PUT: { - // UPDATE - @SuppressWarnings("rawtypes") - IFhirResourceDao resourceDao = myDaoRegistry.getResourceDao(res.getClass()); - - DaoMethodOutcome outcome; - - String url = extractTransactionUrlOrThrowException(nextReqEntry, verb); - - UrlParts parts = UrlUtil.parseUrl(url); - if (isNotBlank(parts.getResourceId())) { - res.setId(new IdDt(parts.getResourceType(), parts.getResourceId())); - outcome = resourceDao.update(res, null, false, theRequestDetails); - } else { - res.setId((String) null); - outcome = resourceDao.update(res, parts.getResourceType() + '?' + parts.getParams(), false, theRequestDetails); - } - - if (outcome.getCreated() == Boolean.FALSE) { - theUpdatedEntities.add(outcome.getEntity()); - } - - handleTransactionCreateOrUpdateOutcome(theIdSubstitutions, theIdToPersistedOutcome, nextResourceId, outcome, nextRespEntry, resourceType, res); - theEntriesToProcess.put(nextRespEntry, outcome.getEntity()); - break; - } - case GET: - break; - } - } - - /* - * Make sure that there are no conflicts from deletions. E.g. we can't delete something - * if something else has a reference to it.. Unless the thing that has a reference to it - * was also deleted as a part of this transaction, which is why we check this now at the - * end. - */ - - theDeleteConflicts.removeIf(next -> theDeletedResources.contains(next.getTargetId().toVersionless())); - DeleteConflictService.validateDeleteConflictsEmptyOrThrowException(getContext(), theDeleteConflicts); - - /* - * Perform ID substitutions and then index each resource we have saved - */ - - FhirTerser terser = getContext().newTerser(); - for (DaoMethodOutcome nextOutcome : theIdToPersistedOutcome.values()) { - IResource nextResource = (IResource) nextOutcome.getResource(); - if (nextResource == null) { - continue; - } - - // References - List allRefs = terser.getAllPopulatedChildElementsOfType(nextResource, BaseResourceReferenceDt.class); - for (BaseResourceReferenceDt nextRef : allRefs) { - IdDt nextId = nextRef.getReference(); - if (!nextId.hasIdPart()) { - continue; - } - if (theIdSubstitutions.containsKey(nextId)) { - IdDt newId = theIdSubstitutions.get(nextId); - ourLog.debug(" * Replacing resource ref {} with {}", nextId, newId); - nextRef.setReference(newId); - } else { - ourLog.debug(" * Reference [{}] does not exist in bundle", nextId); - } - } - - // URIs - List allUris = terser.getAllPopulatedChildElementsOfType(nextResource, UriDt.class); - for (UriDt nextRef : allUris) { - if (nextRef instanceof IIdType) { - continue; // No substitution on the resource ID itself! - } - IdDt nextUriString = new IdDt(nextRef.getValueAsString()); - if (theIdSubstitutions.containsKey(nextUriString)) { - IdDt newId = theIdSubstitutions.get(nextUriString); - ourLog.debug(" * Replacing resource ref {} with {}", nextUriString, newId); - nextRef.setValue(newId.getValue()); - } else { - ourLog.debug(" * Reference [{}] does not exist in bundle", nextUriString); - } - } - - - InstantDt deletedInstantOrNull = ResourceMetadataKeyEnum.DELETED_AT.get(nextResource); - Date deletedTimestampOrNull = deletedInstantOrNull != null ? deletedInstantOrNull.getValue() : null; - if (theUpdatedEntities.contains(nextOutcome.getEntity())) { - updateInternal(theRequestDetails, nextResource, true, false, nextOutcome.getEntity(), nextResource.getIdElement(), nextOutcome.getPreviousResource(), theTransactionDetails); - } else if (!theNonUpdatedEntities.contains(nextOutcome.getEntity())) { - updateEntity(theRequestDetails, nextResource, nextOutcome.getEntity(), deletedTimestampOrNull, true, false, theTransactionDetails, false, true); - } - } - - myEntityManager.flush(); - - /* - * Double check we didn't allow any duplicates we shouldn't have - */ - for (Entry nextEntry : theRequest.getEntry()) { - if (nextEntry.getRequest().getMethodElement().getValueAsEnum() == HTTPVerbEnum.POST) { - String matchUrl = nextEntry.getRequest().getIfNoneExist(); - if (isNotBlank(matchUrl)) { - Class resType = nextEntry.getResource().getClass(); - Set val = myMatchResourceUrlService.processMatchUrl(matchUrl, resType, theRequestDetails); - if (val.size() > 1) { - throw new InvalidRequestException( - "Unable to process " + theActionName + " - Request would cause multiple resources to match URL: \"" + matchUrl + "\". Does transaction request contain duplicates?"); - } - } - } - } - - for (IdDt next : theAllIds) { - IdDt replacement = theIdSubstitutions.get(next); - if (replacement == null) { - continue; - } - if (replacement.equals(next)) { - continue; - } - ourLog.debug("Placeholder resource ID \"{}\" was replaced with permanent ID \"{}\"", next, replacement); - } - } - - private String extractTransactionUrlOrThrowException(Entry nextEntry, HTTPVerbEnum verb) { - String url = nextEntry.getRequest().getUrl(); - if (isBlank(url)) { - throw new InvalidRequestException(getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionMissingUrl", verb.name())); - } - return url; - } - - /** - * This method is called for nested bundles (e.g. if we received a transaction with an entry that - * was a GET search, this method is called on the bundle for the search result, that will be placed in the - * outer bundle). This method applies the _summary and _content parameters to the output of - * that bundle. - *

- * TODO: This isn't the most efficient way of doing this.. hopefully we can come up with something better in the future. - */ - private IBaseResource filterNestedBundle(RequestDetails theRequestDetails, IBaseResource theResource) { - IParser p = getContext().newJsonParser(); - RestfulServerUtils.configureResponseParser(theRequestDetails, p); - return p.parseResource(theResource.getClass(), p.encodeResourceToString(theResource)); - } @Override public MetaDt metaGetOperation(RequestDetails theRequestDetails) { @@ -589,31 +49,6 @@ public class FhirSystemDaoDstu2 extends BaseHapiFhirSystemDao { return retVal; } - private IFhirResourceDao toDao(UrlParts theParts, String theVerb, String theUrl) { - RuntimeResourceDefinition resType; - try { - resType = getContext().getResourceDefinition(theParts.getResourceType()); - } catch (DataFormatException e) { - String msg = getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionInvalidUrl", theVerb, theUrl); - throw new InvalidRequestException(msg); - } - IFhirResourceDao dao = null; - if (resType != null) { - dao = this.myDaoRegistry.getResourceDaoOrNull(resType.getImplementingClass()); - } - if (dao == null) { - String msg = getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionInvalidUrl", theVerb, theUrl); - throw new InvalidRequestException(msg); - } - - // if (theParts.getResourceId() == null && theParts.getParams() == null) { - // String msg = getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionInvalidUrl", theVerb, theUrl); - // throw new InvalidRequestException(msg); - // } - - return dao; - } - protected MetaDt toMetaDt(Collection tagDefinitions) { MetaDt retVal = new MetaDt(); for (TagDefinition next : tagDefinitions) { @@ -632,105 +67,9 @@ public class FhirSystemDaoDstu2 extends BaseHapiFhirSystemDao { return retVal; } - @Transactional(propagation = Propagation.NEVER) - @Override - public Bundle transaction(RequestDetails theRequestDetails, Bundle theRequest) { - if (theRequestDetails != null) { - ActionRequestDetails requestDetails = new ActionRequestDetails(theRequestDetails, theRequest, "Bundle", null); - notifyInterceptors(RestOperationTypeEnum.TRANSACTION, requestDetails); - } - - String actionName = "Transaction"; - return transaction((ServletRequestDetails) theRequestDetails, theRequest, actionName); - } - - private Bundle transaction(ServletRequestDetails theRequestDetails, Bundle theRequest, String theActionName) { - markRequestAsProcessingSubRequest(theRequestDetails); - try { - return doTransaction(theRequestDetails, theRequest, theActionName); - } finally { - clearRequestAsProcessingSubRequest(theRequestDetails); - } - } - - private static void handleTransactionCreateOrUpdateOutcome(Map idSubstitutions, Map idToPersistedOutcome, IdDt nextResourceId, DaoMethodOutcome outcome, - Entry newEntry, String theResourceType, IResource theRes) { - IdDt newId = (IdDt) outcome.getId().toUnqualifiedVersionless(); - IdDt resourceId = isPlaceholder(nextResourceId) ? nextResourceId : nextResourceId.toUnqualifiedVersionless(); - if (newId.equals(resourceId) == false) { - idSubstitutions.put(resourceId, newId); - if (isPlaceholder(resourceId)) { - /* - * The correct way for substitution IDs to be is to be with no resource type, but we'll accept the qualified kind too just to be lenient. - */ - idSubstitutions.put(new IdDt(theResourceType + '/' + resourceId.getValue()), newId); - } - } - idToPersistedOutcome.put(newId, outcome); - if (outcome.getCreated().booleanValue()) { - newEntry.getResponse().setStatus(toStatusString(Constants.STATUS_HTTP_201_CREATED)); - } else { - newEntry.getResponse().setStatus(toStatusString(Constants.STATUS_HTTP_200_OK)); - } - newEntry.getResponse().setLastModified(ResourceMetadataKeyEnum.UPDATED.get(theRes)); - } - - private static boolean isPlaceholder(IdDt theId) { - if (theId.getValue() != null) { - return theId.getValue().startsWith("urn:oid:") || theId.getValue().startsWith("urn:uuid:"); - } - return false; - } - - private static String toStatusString(int theStatusCode) { - return theStatusCode + " " + defaultString(Constants.HTTP_STATUS_NAMES.get(theStatusCode)); - } - @Override public IBaseBundle processMessage(RequestDetails theRequestDetails, IBaseBundle theMessage) { return FhirResourceDaoMessageHeaderDstu2.throwProcessMessageNotImplemented(); } - - /** - * Transaction Order, per the spec: - *

- * Process any DELETE interactions - * Process any POST interactions - * Process any PUT interactions - * Process any GET interactions - */ - public class TransactionSorter implements Comparator { - - @Override - public int compare(Entry theO1, Entry theO2) { - int o1 = toOrder(theO1); - int o2 = toOrder(theO2); - - return o1 - o2; - } - - private int toOrder(Entry theO1) { - int o1 = 0; - if (theO1.getRequest().getMethodElement().getValueAsEnum() != null) { - switch (theO1.getRequest().getMethodElement().getValueAsEnum()) { - case DELETE: - o1 = 1; - break; - case POST: - o1 = 2; - break; - case PUT: - o1 = 3; - break; - case GET: - o1 = 4; - break; - } - } - return o1; - } - - } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessorVersionAdapterDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessorVersionAdapterDstu2.java new file mode 100644 index 00000000000..b1b87a079e1 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessorVersionAdapterDstu2.java @@ -0,0 +1,171 @@ +package ca.uhn.fhir.jpa.dao; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.model.api.IResource; +import ca.uhn.fhir.model.api.TemporalPrecisionEnum; +import ca.uhn.fhir.model.dstu2.resource.Bundle; +import ca.uhn.fhir.model.dstu2.resource.OperationOutcome; +import ca.uhn.fhir.model.dstu2.valueset.BundleTypeEnum; +import ca.uhn.fhir.model.dstu2.valueset.HTTPVerbEnum; +import ca.uhn.fhir.model.dstu2.valueset.IssueSeverityEnum; +import ca.uhn.fhir.model.dstu2.valueset.IssueTypeEnum; +import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import org.hl7.fhir.exceptions.FHIRException; +import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; +import org.hl7.fhir.instance.model.api.IBaseResource; + +import java.util.Date; +import java.util.List; + +public class TransactionProcessorVersionAdapterDstu2 implements TransactionProcessor.ITransactionProcessorVersionAdapter { + @Override + public void setResponseStatus(Bundle.Entry theBundleEntry, String theStatus) { + theBundleEntry.getResponse().setStatus(theStatus); + } + + @Override + public void setResponseLastModified(Bundle.Entry theBundleEntry, Date theLastModified) { + theBundleEntry.getResponse().setLastModified(theLastModified, TemporalPrecisionEnum.MILLI); + } + + @Override + public void setResource(Bundle.Entry theBundleEntry, IBaseResource theResource) { + theBundleEntry.setResource((IResource) theResource); + } + + @Override + public IBaseResource getResource(Bundle.Entry theBundleEntry) { + return theBundleEntry.getResource(); + } + + @Override + public String getBundleType(Bundle theRequest) { + if (theRequest.getType() == null) { + return null; + } + return theRequest.getTypeElement().getValue(); + } + + @Override + public void populateEntryWithOperationOutcome(BaseServerResponseException theCaughtEx, Bundle.Entry theEntry) { + OperationOutcome oo = new OperationOutcome(); + oo.addIssue() + .setSeverity(IssueSeverityEnum.ERROR) + .setDiagnostics(theCaughtEx.getMessage()) + .setCode(IssueTypeEnum.EXCEPTION); + theEntry.setResource(oo); + } + + @Override + public Bundle createBundle(String theBundleType) { + Bundle resp = new Bundle(); + try { + resp.setType(BundleTypeEnum.forCode(theBundleType)); + } catch (FHIRException theE) { + throw new InternalErrorException("Unknown bundle type: " + theBundleType); + } + return resp; + } + + @Override + public List getEntries(Bundle theRequest) { + return theRequest.getEntry(); + } + + @Override + public void addEntry(Bundle theBundle, Bundle.Entry theEntry) { + theBundle.addEntry(theEntry); + } + + @Override + public Bundle.Entry addEntry(Bundle theBundle) { + return theBundle.addEntry(); + } + + @Override + public String getEntryRequestVerb(FhirContext theContext, Bundle.Entry theEntry) { + String retVal = null; + HTTPVerbEnum value = theEntry.getRequest().getMethodElement().getValueAsEnum(); + if (value != null) { + retVal = value.getCode(); + } + return retVal; + } + + @Override + public String getFullUrl(Bundle.Entry theEntry) { + return theEntry.getFullUrl(); + } + + @Override + public String getEntryIfNoneExist(Bundle.Entry theEntry) { + return theEntry.getRequest().getIfNoneExist(); + } + + @Override + public String getEntryRequestUrl(Bundle.Entry theEntry) { + return theEntry.getRequest().getUrl(); + } + + @Override + public void setResponseLocation(Bundle.Entry theEntry, String theResponseLocation) { + theEntry.getResponse().setLocation(theResponseLocation); + } + + @Override + public void setResponseETag(Bundle.Entry theEntry, String theEtag) { + theEntry.getResponse().setEtag(theEtag); + } + + @Override + public String getEntryRequestIfMatch(Bundle.Entry theEntry) { + return theEntry.getRequest().getIfMatch(); + } + + @Override + public String getEntryRequestIfNoneExist(Bundle.Entry theEntry) { + return theEntry.getRequest().getIfNoneExist(); + } + + @Override + public String getEntryRequestIfNoneMatch(Bundle.Entry theEntry) { + return theEntry.getRequest().getIfNoneMatch(); + } + + @Override + public void setResponseOutcome(Bundle.Entry theEntry, IBaseOperationOutcome theOperationOutcome) { + theEntry.setResource((IResource) theOperationOutcome); + } + + @Override + public void setRequestVerb(Bundle.Entry theEntry, String theVerb) { + theEntry.getRequest().setMethod(HTTPVerbEnum.forCode(theVerb)); + } + + @Override + public void setRequestUrl(Bundle.Entry theEntry, String theUrl) { + theEntry.getRequest().setUrl(theUrl); + } + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportJobDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportJobDao.java index 708425d5fdb..c5bf0ddf606 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportJobDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportJobDao.java @@ -1,6 +1,6 @@ package ca.uhn.fhir.jpa.dao.data; -import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; +import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum; import ca.uhn.fhir.jpa.entity.BulkExportJobEntity; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Slice; @@ -38,13 +38,13 @@ public interface IBulkExportJobDao extends JpaRepository findByJobId(@Param("jobid") String theUuid); @Query("SELECT j FROM BulkExportJobEntity j WHERE j.myStatus = :status") - Slice findByStatus(Pageable thePage, @Param("status") BulkJobStatusEnum theSubmitted); + Slice findByStatus(Pageable thePage, @Param("status") BulkExportJobStatusEnum theSubmitted); @Query("SELECT j FROM BulkExportJobEntity j WHERE j.myExpiry < :cutoff") Slice findByExpiry(Pageable thePage, @Param("cutoff") Date theCutoff); @Query("SELECT j FROM BulkExportJobEntity j WHERE j.myRequest = :request AND j.myCreated > :createdAfter AND j.myStatus <> :status ORDER BY j.myCreated DESC") - Slice findExistingJob(Pageable thePage, @Param("request") String theRequest, @Param("createdAfter") Date theCreatedAfter, @Param("status") BulkJobStatusEnum theNotStatus); + Slice findExistingJob(Pageable thePage, @Param("request") String theRequest, @Param("createdAfter") Date theCreatedAfter, @Param("status") BulkExportJobStatusEnum theNotStatus); @Modifying @Query("DELETE FROM BulkExportJobEntity t") diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobDao.java new file mode 100644 index 00000000000..dccaa953eb8 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobDao.java @@ -0,0 +1,40 @@ +package ca.uhn.fhir.jpa.dao.data; + +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum; +import ca.uhn.fhir.jpa.entity.BulkImportJobEntity; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.query.Param; + +import java.util.Optional; + +/* + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +public interface IBulkImportJobDao extends JpaRepository { + + @Query("SELECT j FROM BulkImportJobEntity j WHERE j.myJobId = :jobid") + Optional findByJobId(@Param("jobid") String theUuid); + + @Query("SELECT j FROM BulkImportJobEntity j WHERE j.myStatus = :status") + Slice findByStatus(Pageable thePage, @Param("status") BulkImportJobStatusEnum theStatus); +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobFileDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobFileDao.java new file mode 100644 index 00000000000..c53e49f95a4 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobFileDao.java @@ -0,0 +1,43 @@ +package ca.uhn.fhir.jpa.dao.data; + +import ca.uhn.fhir.jpa.entity.BulkImportJobEntity; +import ca.uhn.fhir.jpa.entity.BulkImportJobFileEntity; +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.query.Param; + +import java.util.List; +import java.util.Optional; + +/* + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +public interface IBulkImportJobFileDao extends JpaRepository { + + @Query("SELECT f FROM BulkImportJobFileEntity f WHERE f.myJob.myJobId = :jobId ORDER BY f.myFileSequence ASC") + List findAllForJob(@Param("jobId") String theJobId); + + @Query("SELECT f FROM BulkImportJobFileEntity f WHERE f.myJob = :job AND f.myFileSequence = :fileIndex") + Optional findForJob(@Param("job") BulkImportJobEntity theJob, @Param("fileIndex") int theFileIndex); + + @Query("SELECT f.myId FROM BulkImportJobFileEntity f WHERE f.myJob.myJobId = :jobId ORDER BY f.myFileSequence ASC") + List findAllIdsForJob(@Param("jobId") String theJobId); + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirSystemDaoDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirSystemDaoDstu3.java index 0af11e3a082..96019ae6a21 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirSystemDaoDstu3.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirSystemDaoDstu3.java @@ -22,18 +22,13 @@ package ca.uhn.fhir.jpa.dao.dstu3; import ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao; import ca.uhn.fhir.jpa.dao.FhirResourceDaoMessageHeaderDstu2; -import ca.uhn.fhir.jpa.dao.TransactionProcessor; import ca.uhn.fhir.jpa.model.entity.TagDefinition; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails; import org.hl7.fhir.dstu3.model.Bundle; -import org.hl7.fhir.dstu3.model.Bundle.BundleEntryComponent; import org.hl7.fhir.dstu3.model.Meta; import org.hl7.fhir.instance.model.api.IBaseBundle; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.transaction.annotation.Propagation; -import org.springframework.transaction.annotation.Transactional; import javax.annotation.PostConstruct; import javax.persistence.TypedQuery; @@ -42,14 +37,10 @@ import java.util.List; public class FhirSystemDaoDstu3 extends BaseHapiFhirSystemDao { - @Autowired - private TransactionProcessor myTransactionProcessor; - @Override @PostConstruct public void start() { super.start(); - myTransactionProcessor.setDao(this); } @Override @@ -88,12 +79,5 @@ public class FhirSystemDaoDstu3 extends BaseHapiFhirSystemDao { return FhirResourceDaoMessageHeaderDstu2.throwProcessMessageNotImplemented(); } - @Transactional(propagation = Propagation.NEVER) - @Override - public Bundle transaction(RequestDetails theRequestDetails, Bundle theRequest) { - return myTransactionProcessor.transaction(theRequestDetails, theRequest); - } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java index 5da2372c9b6..e012ee235ad 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java @@ -23,6 +23,8 @@ package ca.uhn.fhir.jpa.dao.expunge; import ca.uhn.fhir.interceptor.api.HookParams; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.interceptor.api.Pointcut; +import ca.uhn.fhir.jpa.entity.BulkImportJobEntity; +import ca.uhn.fhir.jpa.entity.BulkImportJobFileEntity; import ca.uhn.fhir.jpa.entity.PartitionEntity; import ca.uhn.fhir.jpa.entity.Search; import ca.uhn.fhir.jpa.entity.SearchInclude; @@ -123,6 +125,8 @@ public class ExpungeEverythingService { counter.addAndGet(expungeEverythingByType(NpmPackageVersionEntity.class)); counter.addAndGet(expungeEverythingByType(NpmPackageEntity.class)); counter.addAndGet(expungeEverythingByType(SearchParamPresent.class)); + counter.addAndGet(expungeEverythingByType(BulkImportJobFileEntity.class)); + counter.addAndGet(expungeEverythingByType(BulkImportJobEntity.class)); counter.addAndGet(expungeEverythingByType(ForcedId.class)); counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamDate.class)); counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamNumber.class)); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4.java index 04baaca4922..a369f3d7e5f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4.java @@ -22,42 +22,20 @@ package ca.uhn.fhir.jpa.dao.r4; import ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao; import ca.uhn.fhir.jpa.dao.FhirResourceDaoMessageHeaderDstu2; -import ca.uhn.fhir.jpa.dao.TransactionProcessor; import ca.uhn.fhir.jpa.model.entity.TagDefinition; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails; -import com.google.common.annotations.VisibleForTesting; import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.r4.model.Bundle; import org.hl7.fhir.r4.model.Meta; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.transaction.annotation.Propagation; -import org.springframework.transaction.annotation.Transactional; -import javax.annotation.PostConstruct; import javax.persistence.TypedQuery; import java.util.Collection; import java.util.List; public class FhirSystemDaoR4 extends BaseHapiFhirSystemDao { - @Autowired - private TransactionProcessor myTransactionProcessor; - - @VisibleForTesting - public void setTransactionProcessorForUnitTest(TransactionProcessor theTransactionProcessor) { - myTransactionProcessor = theTransactionProcessor; - } - - @Override - @PostConstruct - public void start() { - super.start(); - myTransactionProcessor.setDao(this); - } - - @Override public Meta metaGetOperation(RequestDetails theRequestDetails) { // Notify interceptors @@ -95,10 +73,4 @@ public class FhirSystemDaoR4 extends BaseHapiFhirSystemDao { return retVal; } - @Transactional(propagation = Propagation.NEVER) - @Override - public Bundle transaction(RequestDetails theRequestDetails, Bundle theRequest) { - return myTransactionProcessor.transaction(theRequestDetails, theRequest); - } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoR5.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoR5.java index 9d13bae6d1e..919d831e4a6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoR5.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoR5.java @@ -22,20 +22,14 @@ package ca.uhn.fhir.jpa.dao.r5; import ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao; import ca.uhn.fhir.jpa.dao.FhirResourceDaoMessageHeaderDstu2; -import ca.uhn.fhir.jpa.dao.TransactionProcessor; import ca.uhn.fhir.jpa.model.entity.TagDefinition; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails; import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.r5.model.Bundle; -import org.hl7.fhir.r5.model.Bundle.BundleEntryComponent; import org.hl7.fhir.r5.model.Meta; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.transaction.annotation.Propagation; -import org.springframework.transaction.annotation.Transactional; -import javax.annotation.PostConstruct; import javax.persistence.TypedQuery; import java.util.Collection; import java.util.List; @@ -44,17 +38,6 @@ public class FhirSystemDaoR5 extends BaseHapiFhirSystemDao { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirSystemDaoR5.class); - @Autowired - private TransactionProcessor myTransactionProcessor; - - @Override - @PostConstruct - public void start() { - super.start(); - myTransactionProcessor.setDao(this); - } - - @Override public Meta metaGetOperation(RequestDetails theRequestDetails) { // Notify interceptors @@ -92,10 +75,5 @@ public class FhirSystemDaoR5 extends BaseHapiFhirSystemDao { return retVal; } - @Transactional(propagation = Propagation.NEVER) - @Override - public Bundle transaction(RequestDetails theRequestDetails, Bundle theRequest) { - return myTransactionProcessor.transaction(theRequestDetails, theRequest); - } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java index 05f68783fa7..f2f8a092715 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java @@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.entity; * #L% */ -import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; +import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import org.hl7.fhir.r5.model.InstantType; @@ -51,9 +51,9 @@ import static org.apache.commons.lang3.StringUtils.left; @Entity @Table(name = "HFJ_BLK_EXPORT_JOB", uniqueConstraints = { - @UniqueConstraint(name = "IDX_BLKEX_JOB_ID", columnNames = "JOB_ID") + @UniqueConstraint(name = "IDX_BLKEX_JOB_ID", columnNames = "JOB_ID") }, indexes = { - @Index(name = "IDX_BLKEX_EXPTIME", columnList = "EXP_TIME") + @Index(name = "IDX_BLKEX_EXPTIME", columnList = "EXP_TIME") }) public class BulkExportJobEntity implements Serializable { @@ -70,7 +70,7 @@ public class BulkExportJobEntity implements Serializable { @Enumerated(EnumType.STRING) @Column(name = "JOB_STATUS", length = 10, nullable = false) - private BulkJobStatusEnum myStatus; + private BulkExportJobStatusEnum myStatus; @Temporal(TemporalType.TIMESTAMP) @Column(name = "CREATED_TIME", nullable = false) private Date myCreated; @@ -156,11 +156,11 @@ public class BulkExportJobEntity implements Serializable { return b.toString(); } - public BulkJobStatusEnum getStatus() { + public BulkExportJobStatusEnum getStatus() { return myStatus; } - public void setStatus(BulkJobStatusEnum theStatus) { + public void setStatus(BulkExportJobStatusEnum theStatus) { if (myStatus != theStatus) { myStatusTime = new Date(); myStatus = theStatus; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobEntity.java new file mode 100644 index 00000000000..b7de7e9cc7b --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobEntity.java @@ -0,0 +1,157 @@ +package ca.uhn.fhir.jpa.entity; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum; +import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.EnumType; +import javax.persistence.Enumerated; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.SequenceGenerator; +import javax.persistence.Table; +import javax.persistence.Temporal; +import javax.persistence.TemporalType; +import javax.persistence.UniqueConstraint; +import javax.persistence.Version; +import java.io.Serializable; +import java.util.Date; + +import static org.apache.commons.lang3.StringUtils.left; + +@Entity +@Table(name = "HFJ_BLK_IMPORT_JOB", uniqueConstraints = { + @UniqueConstraint(name = "IDX_BLKIM_JOB_ID", columnNames = "JOB_ID") +}) +public class BulkImportJobEntity implements Serializable { + + @Id + @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_BLKIMJOB_PID") + @SequenceGenerator(name = "SEQ_BLKIMJOB_PID", sequenceName = "SEQ_BLKIMJOB_PID") + @Column(name = "PID") + private Long myId; + + @Column(name = "JOB_ID", length = Search.UUID_COLUMN_LENGTH, nullable = false, updatable = false) + private String myJobId; + @Column(name = "JOB_DESC", nullable = true, length = BulkExportJobEntity.STATUS_MESSAGE_LEN) + private String myJobDescription; + @Enumerated(EnumType.STRING) + @Column(name = "JOB_STATUS", length = 10, nullable = false) + private BulkImportJobStatusEnum myStatus; + @Version + @Column(name = "OPTLOCK", nullable = false) + private int myVersion; + @Column(name = "FILE_COUNT", nullable = false) + private int myFileCount; + @Temporal(TemporalType.TIMESTAMP) + @Column(name = "STATUS_TIME", nullable = false) + private Date myStatusTime; + @Column(name = "STATUS_MESSAGE", nullable = true, length = BulkExportJobEntity.STATUS_MESSAGE_LEN) + private String myStatusMessage; + @Column(name = "ROW_PROCESSING_MODE", length = 20, nullable = false, updatable = false) + @Enumerated(EnumType.STRING) + private JobFileRowProcessingModeEnum myRowProcessingMode; + @Column(name = "BATCH_SIZE", nullable = false, updatable = false) + private int myBatchSize; + + public String getJobDescription() { + return myJobDescription; + } + + public void setJobDescription(String theJobDescription) { + myJobDescription = left(theJobDescription, BulkExportJobEntity.STATUS_MESSAGE_LEN); + } + + public JobFileRowProcessingModeEnum getRowProcessingMode() { + return myRowProcessingMode; + } + + public void setRowProcessingMode(JobFileRowProcessingModeEnum theRowProcessingMode) { + myRowProcessingMode = theRowProcessingMode; + } + + public Date getStatusTime() { + return myStatusTime; + } + + public void setStatusTime(Date theStatusTime) { + myStatusTime = theStatusTime; + } + + public int getFileCount() { + return myFileCount; + } + + public void setFileCount(int theFileCount) { + myFileCount = theFileCount; + } + + public String getJobId() { + return myJobId; + } + + public void setJobId(String theJobId) { + myJobId = theJobId; + } + + public BulkImportJobStatusEnum getStatus() { + return myStatus; + } + + /** + * Sets the status, updates the status time, and clears the status message + */ + public void setStatus(BulkImportJobStatusEnum theStatus) { + if (myStatus != theStatus) { + myStatus = theStatus; + setStatusTime(new Date()); + setStatusMessage(null); + } + } + + public String getStatusMessage() { + return myStatusMessage; + } + + public void setStatusMessage(String theStatusMessage) { + myStatusMessage = left(theStatusMessage, BulkExportJobEntity.STATUS_MESSAGE_LEN); + } + + public BulkImportJobJson toJson() { + return new BulkImportJobJson() + .setProcessingMode(getRowProcessingMode()) + .setFileCount(getFileCount()) + .setJobDescription(getJobDescription()); + } + + public int getBatchSize() { + return myBatchSize; + } + + public void setBatchSize(int theBatchSize) { + myBatchSize = theBatchSize; + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobFileEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobFileEntity.java new file mode 100644 index 00000000000..b1dd778a2c8 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobFileEntity.java @@ -0,0 +1,104 @@ +package ca.uhn.fhir.jpa.entity; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.ForeignKey; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.Index; +import javax.persistence.JoinColumn; +import javax.persistence.Lob; +import javax.persistence.ManyToOne; +import javax.persistence.SequenceGenerator; +import javax.persistence.Table; +import java.io.Serializable; +import java.nio.charset.StandardCharsets; + +@Entity +@Table(name = "HFJ_BLK_IMPORT_JOBFILE", indexes = { + @Index(name = "IDX_BLKIM_JOBFILE_JOBID", columnList = "JOB_PID") +}) +public class BulkImportJobFileEntity implements Serializable { + + @Id + @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_BLKIMJOBFILE_PID") + @SequenceGenerator(name = "SEQ_BLKIMJOBFILE_PID", sequenceName = "SEQ_BLKIMJOBFILE_PID") + @Column(name = "PID") + private Long myId; + + @ManyToOne + @JoinColumn(name = "JOB_PID", referencedColumnName = "PID", nullable = false, foreignKey = @ForeignKey(name = "FK_BLKIMJOBFILE_JOB")) + private BulkImportJobEntity myJob; + + @Column(name = "FILE_SEQ", nullable = false) + private int myFileSequence; + + @Lob + @Column(name = "JOB_CONTENTS", nullable = false) + private byte[] myContents; + + @Column(name = "TENANT_NAME", nullable = true, length = PartitionEntity.MAX_NAME_LENGTH) + private String myTenantName; + + public BulkImportJobEntity getJob() { + return myJob; + } + + public void setJob(BulkImportJobEntity theJob) { + myJob = theJob; + } + + public int getFileSequence() { + return myFileSequence; + } + + public void setFileSequence(int theFileSequence) { + myFileSequence = theFileSequence; + } + + public String getContents() { + return new String(myContents, StandardCharsets.UTF_8); + } + + public void setContents(String theContents) { + myContents = theContents.getBytes(StandardCharsets.UTF_8); + } + + + public BulkImportJobFileJson toJson() { + return new BulkImportJobFileJson() + .setContents(getContents()) + .setTenantName(getTenantName()); + } + + public void setTenantName(String theTenantName) { + myTenantName = theTenantName; + } + + public String getTenantName() { + return myTenantName; + } +} diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BaseBatchJobR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BaseBatchJobR4Test.java new file mode 100644 index 00000000000..f3e6a6a130d --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BaseBatchJobR4Test.java @@ -0,0 +1,58 @@ +package ca.uhn.fhir.jpa.bulk; + +import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobInstance; +import org.springframework.batch.core.explore.JobExplorer; +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +import static org.awaitility.Awaitility.await; +import static org.junit.jupiter.api.Assertions.fail; + +public class BaseBatchJobR4Test extends BaseJpaR4Test { + + private static final Logger ourLog = LoggerFactory.getLogger(BaseBatchJobR4Test.class); + @Autowired + private JobExplorer myJobExplorer; + + protected List awaitAllBulkJobCompletions(String... theJobNames) { + assert theJobNames.length > 0; + + List bulkExport = new ArrayList<>(); + for (String nextName : theJobNames) { + bulkExport.addAll(myJobExplorer.findJobInstancesByJobName(nextName, 0, 100)); + } + if (bulkExport.isEmpty()) { + List wantNames = Arrays.asList(theJobNames); + List haveNames = myJobExplorer.getJobNames(); + fail("There are no jobs running - Want names " + wantNames + " and have names " + haveNames); + } + List bulkExportExecutions = bulkExport.stream().flatMap(jobInstance -> myJobExplorer.getJobExecutions(jobInstance).stream()).collect(Collectors.toList()); + awaitJobCompletions(bulkExportExecutions); + + return bulkExportExecutions; + } + + protected void awaitJobCompletions(Collection theJobs) { + theJobs.forEach(jobExecution -> awaitJobCompletion(jobExecution)); + } + + protected void awaitJobCompletion(JobExecution theJobExecution) { + await().atMost(120, TimeUnit.SECONDS).until(() -> { + JobExecution jobExecution = myJobExplorer.getJobExecution(theJobExecution.getId()); + ourLog.info("JobExecution {} currently has status: {}- Failures if any: {}", theJobExecution.getId(), jobExecution.getStatus(), jobExecution.getFailureExceptions()); + return jobExecution.getStatus() == BatchStatus.COMPLETED || jobExecution.getStatus() == BatchStatus.FAILED; + }); + } + +} diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java index ede41a213e8..2c216b9074d 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java @@ -2,11 +2,11 @@ package ca.uhn.fhir.jpa.bulk; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; -import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; -import ca.uhn.fhir.jpa.bulk.model.BulkExportResponseJson; -import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; -import ca.uhn.fhir.jpa.bulk.provider.BulkDataExportProvider; +import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson; +import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum; +import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.client.apache.ResourceEntity; @@ -188,7 +188,7 @@ public class BulkDataExportProviderTest { IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() .setJobId(A_JOB_ID) - .setStatus(BulkJobStatusEnum.BUILDING) + .setStatus(BulkExportJobStatusEnum.BUILDING) .setStatusTime(InstantType.now().getValue()); when(myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(eq(A_JOB_ID))).thenReturn(jobInfo); @@ -212,7 +212,7 @@ public class BulkDataExportProviderTest { IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() .setJobId(A_JOB_ID) - .setStatus(BulkJobStatusEnum.ERROR) + .setStatus(BulkExportJobStatusEnum.ERROR) .setStatusTime(InstantType.now().getValue()) .setStatusMessage("Some Error Message"); when(myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(eq(A_JOB_ID))).thenReturn(jobInfo); @@ -239,7 +239,7 @@ public class BulkDataExportProviderTest { IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() .setJobId(A_JOB_ID) - .setStatus(BulkJobStatusEnum.COMPLETE) + .setStatus(BulkExportJobStatusEnum.COMPLETE) .setStatusTime(InstantType.now().getValue()); jobInfo.addFile().setResourceType("Patient").setResourceId(new IdType("Binary/111")); jobInfo.addFile().setResourceType("Patient").setResourceId(new IdType("Binary/222")); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java index 58fd8a39f5b..fdc92d090e9 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java @@ -6,15 +6,14 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; import ca.uhn.fhir.jpa.batch.BatchJobsConfig; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; -import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; -import ca.uhn.fhir.jpa.bulk.job.BulkExportJobParametersBuilder; -import ca.uhn.fhir.jpa.bulk.job.GroupBulkExportJobParametersBuilder; -import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; +import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobParametersBuilder; +import ca.uhn.fhir.jpa.bulk.export.job.GroupBulkExportJobParametersBuilder; +import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum; import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao; import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao; import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao; -import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test; import ca.uhn.fhir.jpa.entity.BulkExportCollectionEntity; import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity; import ca.uhn.fhir.jpa.entity.BulkExportJobEntity; @@ -46,28 +45,22 @@ import org.hl7.fhir.r4.model.Reference; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.Job; import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; import org.springframework.batch.core.JobParametersBuilder; import org.springframework.batch.core.JobParametersInvalidException; -import org.springframework.batch.core.explore.JobExplorer; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import java.util.Arrays; -import java.util.Collection; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.UUID; -import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.awaitility.Awaitility.await; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; @@ -78,7 +71,7 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; -public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { +public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test { public static final String TEST_FILTER = "Patient?gender=female"; private static final Logger ourLog = LoggerFactory.getLogger(BulkDataExportSvcImplR4Test.class); @@ -92,8 +85,6 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { private IBulkDataExportSvc myBulkDataExportSvc; @Autowired private IBatchJobSubmitter myBatchJobSubmitter; - @Autowired - private JobExplorer myJobExplorer; @Autowired @Qualifier(BatchJobsConfig.BULK_EXPORT_JOB_NAME) @@ -128,7 +119,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { String binaryId = myBinaryDao.create(b).getId().toUnqualifiedVersionless().getValue(); BulkExportJobEntity job = new BulkExportJobEntity(); - job.setStatus(BulkJobStatusEnum.COMPLETE); + job.setStatus(BulkExportJobStatusEnum.COMPLETE); job.setExpiry(DateUtils.addHours(new Date(), -1)); job.setJobId(UUID.randomUUID().toString()); job.setCreated(new Date()); @@ -241,6 +232,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); return options; } + @Test public void testSubmit_ReusesExisting() { @@ -278,7 +270,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { // Check the status IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertEquals(BulkJobStatusEnum.SUBMITTED, status.getStatus()); + assertEquals(BulkExportJobStatusEnum.SUBMITTED, status.getStatus()); // Run a scheduled pass to build the export myBulkDataExportSvc.buildExportFiles(); @@ -287,7 +279,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { // Fetch the job again status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertEquals(BulkJobStatusEnum.ERROR, status.getStatus()); + assertEquals(BulkExportJobStatusEnum.ERROR, status.getStatus()); assertThat(status.getStatusMessage(), containsString("help i'm a bug")); } finally { @@ -295,6 +287,14 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { } } + private void awaitAllBulkJobCompletions() { + awaitAllBulkJobCompletions( + BatchJobsConfig.BULK_EXPORT_JOB_NAME, + BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME, + BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME + ); + } + @Test public void testGenerateBulkExport_SpecificResources() { @@ -313,7 +313,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { // Check the status IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertEquals(BulkJobStatusEnum.SUBMITTED, status.getStatus()); + assertEquals(BulkExportJobStatusEnum.SUBMITTED, status.getStatus()); assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation,Patient&_typeFilter=" + UrlUtil.escapeUrlParam(TEST_FILTER), status.getRequest()); // Run a scheduled pass to build the export @@ -323,7 +323,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { // Fetch the job again status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertEquals(BulkJobStatusEnum.COMPLETE, status.getStatus()); + assertEquals(BulkExportJobStatusEnum.COMPLETE, status.getStatus()); // Iterate over the files for (IBulkDataExportSvc.FileEntry next : status.getFiles()) { @@ -368,7 +368,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { // Check the status IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertEquals(BulkJobStatusEnum.SUBMITTED, status.getStatus()); + assertEquals(BulkExportJobStatusEnum.SUBMITTED, status.getStatus()); assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson", status.getRequest()); // Run a scheduled pass to build the export @@ -378,7 +378,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { // Fetch the job again status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertEquals(BulkJobStatusEnum.COMPLETE, status.getStatus()); + assertEquals(BulkExportJobStatusEnum.COMPLETE, status.getStatus()); assertEquals(5, status.getFiles().size()); // Iterate over the files @@ -393,7 +393,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { } else if ("Observation".equals(next.getResourceType())) { assertThat(nextContents, containsString("\"subject\":{\"reference\":\"Patient/PAT0\"}}\n")); assertEquals(26, nextContents.split("\n").length); - }else if ("Immunization".equals(next.getResourceType())) { + } else if ("Immunization".equals(next.getResourceType())) { assertThat(nextContents, containsString("\"patient\":{\"reference\":\"Patient/PAT0\"}}\n")); assertEquals(26, nextContents.split("\n").length); } else if ("CareTeam".equals(next.getResourceType())) { @@ -428,7 +428,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE)); assertThat(jobInfo.getFiles().size(), equalTo(5)); } @@ -451,7 +451,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { // Check the status IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertEquals(BulkJobStatusEnum.SUBMITTED, status.getStatus()); + assertEquals(BulkExportJobStatusEnum.SUBMITTED, status.getStatus()); assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Patient&_typeFilter=Patient%3F_has%3AObservation%3Apatient%3Aidentifier%3DSYS%7CVAL3", status.getRequest()); // Run a scheduled pass to build the export @@ -461,7 +461,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { // Fetch the job again status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertEquals(BulkJobStatusEnum.COMPLETE, status.getStatus()); + assertEquals(BulkExportJobStatusEnum.COMPLETE, status.getStatus()); assertEquals(1, status.getFiles().size()); // Iterate over the files @@ -481,7 +481,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { } @Test - public void testGenerateBulkExport_WithSince() throws InterruptedException { + public void testGenerateBulkExport_WithSince() { // Create some resources to load createResources(); @@ -508,7 +508,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { // Check the status IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertEquals(BulkJobStatusEnum.SUBMITTED, status.getStatus()); + assertEquals(BulkExportJobStatusEnum.SUBMITTED, status.getStatus()); assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation,Patient&_since=" + cutoff.setTimeZoneZulu(true).getValueAsString(), status.getRequest()); // Run a scheduled pass to build the export @@ -518,7 +518,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { // Fetch the job again status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertEquals(BulkJobStatusEnum.COMPLETE, status.getStatus()); + assertEquals(BulkExportJobStatusEnum.COMPLETE, status.getStatus()); assertEquals(1, status.getFiles().size()); // Iterate over the files @@ -560,24 +560,10 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { String jobUUID = (String) jobExecution.getExecutionContext().get("jobUUID"); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobUUID); - assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE)); assertThat(jobInfo.getFiles().size(), equalTo(2)); } - public void awaitAllBulkJobCompletions() { - List bulkExport = myJobExplorer.findJobInstancesByJobName(BatchJobsConfig.BULK_EXPORT_JOB_NAME, 0, 100); - bulkExport.addAll(myJobExplorer.findJobInstancesByJobName(BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME, 0, 100)); - bulkExport.addAll(myJobExplorer.findJobInstancesByJobName(BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME, 0, 100)); - if (bulkExport.isEmpty()) { - fail("There are no bulk export jobs running!"); - } - List bulkExportExecutions = bulkExport.stream().flatMap(jobInstance -> myJobExplorer.getJobExecutions(jobInstance).stream()).collect(Collectors.toList()); - awaitJobCompletions(bulkExportExecutions); - } - - public void awaitJobCompletions(Collection theJobs) { - theJobs.forEach(jobExecution -> awaitJobCompletion(jobExecution)); - } @Test public void testBatchJobSubmitsAndRuns() throws Exception { @@ -599,13 +585,13 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { awaitJobCompletion(jobExecution); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE)); assertThat(jobInfo.getFiles().size(), equalTo(2)); } @Test - public void testGroupBatchJobWorks() throws Exception { + public void testGroupBatchJobWorks() { createResources(); // Create a bulk job @@ -625,7 +611,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE)); assertThat(jobInfo.getFiles().size(), equalTo(1)); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); @@ -639,8 +625,9 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { assertThat(nextContents, is(containsString("IMM6"))); assertThat(nextContents, is(containsString("IMM8"))); } + @Test - public void testGroupBatchJobMdmExpansionIdentifiesGoldenResources() throws Exception { + public void testGroupBatchJobMdmExpansionIdentifiesGoldenResources() { createResources(); // Create a bulk job @@ -659,7 +646,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE)); assertThat(jobInfo.getFiles().size(), equalTo(2)); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); @@ -716,7 +703,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { awaitJobCompletion(jobExecution); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE)); assertThat(jobInfo.getFiles().size(), equalTo(2)); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); @@ -747,7 +734,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { // CareTeam has two patient references: participant and patient. This test checks if we find the patient if participant is null but patient is not null @Test - public void testGroupBatchJobCareTeam() throws Exception { + public void testGroupBatchJobCareTeam() { createResources(); BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions(); @@ -766,7 +753,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE)); assertThat(jobInfo.getFiles().size(), equalTo(1)); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("CareTeam"))); @@ -810,7 +797,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE)); assertThat(jobInfo.getFiles().size(), equalTo(1)); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); @@ -847,7 +834,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE)); assertThat(jobInfo.getFiles().size(), equalTo(1)); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Observation"))); String nextContents = getBinaryContents(jobInfo, 0); @@ -888,7 +875,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { awaitAllBulkJobCompletions(); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE)); assertThat(jobInfo.getFiles().size(), equalTo(1)); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Patient"))); @@ -900,7 +887,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { } @Test - public void testMdmExpansionWorksForGroupExportOnMatchedPatients() throws JobParametersInvalidException { + public void testMdmExpansionWorksForGroupExportOnMatchedPatients() { createResources(); // Create a bulk job @@ -918,9 +905,9 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { awaitAllBulkJobCompletions(); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertEquals("/Group/G0/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation,Immunization&_groupId=" + myPatientGroupId +"&_mdm=true", jobInfo.getRequest()); + assertEquals("/Group/G0/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation,Immunization&_groupId=" + myPatientGroupId + "&_mdm=true", jobInfo.getRequest()); - assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE)); assertThat(jobInfo.getFiles().size(), equalTo(2)); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); @@ -963,7 +950,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { } @Test - public void testGroupBulkExportSupportsTypeFilters() throws JobParametersInvalidException { + public void testGroupBulkExportSupportsTypeFilters() { createResources(); //Only get COVID-19 vaccinations @@ -985,7 +972,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE)); assertThat(jobInfo.getFiles().size(), equalTo(1)); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); @@ -1021,7 +1008,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { myBulkDataExportSvc.buildExportFiles(); awaitAllBulkJobCompletions(); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertThat(jobInfo.getStatus(), is(equalTo(BulkJobStatusEnum.COMPLETE))); + assertThat(jobInfo.getStatus(), is(equalTo(BulkExportJobStatusEnum.COMPLETE))); //Group-style bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP); @@ -1030,7 +1017,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { myBulkDataExportSvc.buildExportFiles(); awaitAllBulkJobCompletions(); jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertThat(jobInfo.getStatus(), is(equalTo(BulkJobStatusEnum.COMPLETE))); + assertThat(jobInfo.getStatus(), is(equalTo(BulkExportJobStatusEnum.COMPLETE))); //System-style bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); @@ -1038,7 +1025,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { myBulkDataExportSvc.buildExportFiles(); awaitAllBulkJobCompletions(); jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertThat(jobInfo.getStatus(), is(equalTo(BulkJobStatusEnum.COMPLETE))); + assertThat(jobInfo.getStatus(), is(equalTo(BulkExportJobStatusEnum.COMPLETE))); } @Test @@ -1077,14 +1064,6 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { } - private void awaitJobCompletion(JobExecution theJobExecution) { - await().atMost(120, TimeUnit.SECONDS).until(() -> { - JobExecution jobExecution = myJobExplorer.getJobExecution(theJobExecution.getId()); - ourLog.info("JobExecution {} currently has status: {}", theJobExecution.getId(), jobExecution.getStatus()); - return jobExecution.getStatus() == BatchStatus.COMPLETED || jobExecution.getStatus() == BatchStatus.FAILED; - }); - } - private void createResources() { Group group = new Group(); group.setId("G0"); @@ -1109,7 +1088,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { linkToGoldenResource(goldenPid, sourcePid); //Only add half the patients to the group. - if (i % 2 == 0 ) { + if (i % 2 == 0) { group.addMember().setEntity(new Reference(patId)); } @@ -1119,7 +1098,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { createCareTeamWithIndex(i, patId); } - myPatientGroupId = myGroupDao.update(group).getId(); + myPatientGroupId = myGroupDao.update(group).getId(); //Manually create another golden record Patient goldenPatient2 = new Patient(); @@ -1153,8 +1132,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { patient.setGender(i % 2 == 0 ? Enumerations.AdministrativeGender.MALE : Enumerations.AdministrativeGender.FEMALE); patient.addName().setFamily("FAM" + i); patient.addIdentifier().setSystem("http://mrns").setValue("PAT" + i); - DaoMethodOutcome patientOutcome = myPatientDao.update(patient); - return patientOutcome; + return myPatientDao.update(patient); } private void createCareTeamWithIndex(int i, IIdType patId) { @@ -1167,7 +1145,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { private void createImmunizationWithIndex(int i, IIdType patId) { Immunization immunization = new Immunization(); immunization.setId("IMM" + i); - if (patId != null ) { + if (patId != null) { immunization.setPatient(new Reference(patId)); } if (i % 2 == 0) { diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportR4Test.java new file mode 100644 index 00000000000..dcee246154c --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportR4Test.java @@ -0,0 +1,155 @@ +package ca.uhn.fhir.jpa.bulk.imprt.svc; + +import ca.uhn.fhir.interceptor.api.HookParams; +import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor; +import ca.uhn.fhir.interceptor.api.Pointcut; +import ca.uhn.fhir.jpa.batch.BatchJobsConfig; +import ca.uhn.fhir.jpa.bulk.BaseBatchJobR4Test; +import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; +import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum; +import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum; +import ca.uhn.fhir.jpa.dao.data.IBulkImportJobDao; +import ca.uhn.fhir.jpa.dao.data.IBulkImportJobFileDao; +import ca.uhn.fhir.jpa.entity.BulkImportJobEntity; +import ca.uhn.fhir.jpa.entity.BulkImportJobFileEntity; +import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; +import ca.uhn.fhir.rest.api.server.IBundleProvider; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.test.utilities.ITestDataBuilder; +import ca.uhn.fhir.util.BundleBuilder; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.mockito.ArgumentCaptor; +import org.springframework.batch.core.JobExecution; +import org.springframework.beans.factory.annotation.Autowired; + +import javax.annotation.Nonnull; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +public class BulkDataImportR4Test extends BaseBatchJobR4Test implements ITestDataBuilder { + + @Autowired + private IBulkDataImportSvc mySvc; + @Autowired + private IBulkImportJobDao myBulkImportJobDao; + @Autowired + private IBulkImportJobFileDao myBulkImportJobFileDao; + + @AfterEach + public void after() { + myInterceptorRegistry.unregisterInterceptorsIf(t -> t instanceof IAnonymousInterceptor); + } + + @Test + public void testFlow_TransactionRows() { + int transactionsPerFile = 10; + int fileCount = 10; + List files = createInputFiles(transactionsPerFile, fileCount); + + BulkImportJobJson job = new BulkImportJobJson(); + job.setProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION); + job.setJobDescription("This is the description"); + job.setBatchSize(3); + String jobId = mySvc.createNewJob(job, files); + mySvc.markJobAsReadyForActivation(jobId); + + boolean activateJobOutcome = mySvc.activateNextReadyJob(); + assertTrue(activateJobOutcome); + + List executions = awaitAllBulkJobCompletions(); + assertEquals(1, executions.size()); + assertEquals("This is the description", executions.get(0).getJobParameters().getString(BulkExportJobConfig.JOB_DESCRIPTION)); + + runInTransaction(() -> { + List jobs = myBulkImportJobDao.findAll(); + assertEquals(0, jobs.size()); + + List jobFiles = myBulkImportJobFileDao.findAll(); + assertEquals(0, jobFiles.size()); + + }); + + IBundleProvider searchResults = myPatientDao.search(SearchParameterMap.newSynchronous()); + assertEquals(transactionsPerFile * fileCount, searchResults.sizeOrThrowNpe()); + + } + + @Test + public void testFlow_WithTenantNamesInInput() { + int transactionsPerFile = 5; + int fileCount = 10; + List files = createInputFiles(transactionsPerFile, fileCount); + for (int i = 0; i < fileCount; i++) { + files.get(i).setTenantName("TENANT" + i); + } + + IAnonymousInterceptor interceptor = mock(IAnonymousInterceptor.class); + myInterceptorRegistry.registerAnonymousInterceptor(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED, interceptor); + + BulkImportJobJson job = new BulkImportJobJson(); + job.setProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION); + job.setBatchSize(5); + String jobId = mySvc.createNewJob(job, files); + mySvc.markJobAsReadyForActivation(jobId); + + boolean activateJobOutcome = mySvc.activateNextReadyJob(); + assertTrue(activateJobOutcome); + + awaitAllBulkJobCompletions(); + + ArgumentCaptor paramsCaptor = ArgumentCaptor.forClass(HookParams.class); + verify(interceptor, times(50)).invoke(any(), paramsCaptor.capture()); + List tenantNames = paramsCaptor + .getAllValues() + .stream() + .map(t -> t.get(RequestDetails.class).getTenantId()) + .distinct() + .sorted() + .collect(Collectors.toList()); + assertThat(tenantNames, containsInAnyOrder( + "TENANT0", "TENANT1", "TENANT2", "TENANT3", "TENANT4", "TENANT5", "TENANT6", "TENANT7", "TENANT8", "TENANT9" + )); + } + + + @Nonnull + private List createInputFiles(int transactionsPerFile, int fileCount) { + List files = new ArrayList<>(); + for (int fileIndex = 0; fileIndex < fileCount; fileIndex++) { + StringBuilder fileContents = new StringBuilder(); + + for (int transactionIdx = 0; transactionIdx < transactionsPerFile; transactionIdx++) { + BundleBuilder bundleBuilder = new BundleBuilder(myFhirCtx); + IBaseResource patient = buildPatient(withFamily("FAM " + fileIndex + " " + transactionIdx)); + bundleBuilder.addTransactionCreateEntry(patient); + fileContents.append(myFhirCtx.newJsonParser().setPrettyPrint(false).encodeResourceToString(bundleBuilder.getBundle())); + fileContents.append("\n"); + } + + BulkImportJobFileJson nextFile = new BulkImportJobFileJson(); + nextFile.setContents(fileContents.toString()); + files.add(nextFile); + } + return files; + } + + protected List awaitAllBulkJobCompletions() { + return awaitAllBulkJobCompletions(BatchJobsConfig.BULK_IMPORT_JOB_NAME); + } + +} diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImplTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImplTest.java new file mode 100644 index 00000000000..5bc80f28024 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImplTest.java @@ -0,0 +1,145 @@ +package ca.uhn.fhir.jpa.bulk.imprt.svc; + +import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson; +import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum; +import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum; +import ca.uhn.fhir.jpa.dao.data.IBulkImportJobDao; +import ca.uhn.fhir.jpa.dao.data.IBulkImportJobFileDao; +import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test; +import ca.uhn.fhir.jpa.entity.BulkImportJobEntity; +import ca.uhn.fhir.jpa.entity.BulkImportJobFileEntity; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; +import com.google.common.collect.Lists; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.List; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.blankString; +import static org.hamcrest.Matchers.not; +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class BulkDataImportSvcImplTest extends BaseJpaR4Test { + + @Autowired + private IBulkDataImportSvc mySvc; + @Autowired + private IBulkImportJobDao myBulkImportJobDao; + @Autowired + private IBulkImportJobFileDao myBulkImportJobFileDao; + + @Test + public void testCreateNewJob() { + + // Create job + BulkImportJobJson job = new BulkImportJobJson(); + job.setProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION); + job.setBatchSize(3); + BulkImportJobFileJson file1 = new BulkImportJobFileJson(); + file1.setContents("contents 1"); + BulkImportJobFileJson file2 = new BulkImportJobFileJson(); + file2.setContents("contents 2"); + String jobId = mySvc.createNewJob(job, Lists.newArrayList(file1, file2)); + assertThat(jobId, not(blankString())); + + // Add file + BulkImportJobFileJson file3 = new BulkImportJobFileJson(); + file3.setContents("contents 3"); + mySvc.addFilesToJob(jobId, Lists.newArrayList(file3)); + + runInTransaction(() -> { + List jobs = myBulkImportJobDao.findAll(); + assertEquals(1, jobs.size()); + assertEquals(jobId, jobs.get(0).getJobId()); + assertEquals(3, jobs.get(0).getFileCount()); + assertEquals(BulkImportJobStatusEnum.STAGING, jobs.get(0).getStatus()); + + List files = myBulkImportJobFileDao.findAllForJob(jobId); + assertEquals(3, files.size()); + + }); + } + + @Test + public void testCreateNewJob_InvalidJob_NoContents() { + BulkImportJobJson job = new BulkImportJobJson(); + job.setProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION); + job.setBatchSize(3); + BulkImportJobFileJson file1 = new BulkImportJobFileJson(); + try { + mySvc.createNewJob(job, Lists.newArrayList(file1)); + } catch (UnprocessableEntityException e) { + assertEquals("Job File Contents mode must not be null", e.getMessage()); + } + } + + @Test + public void testCreateNewJob_InvalidJob_NoProcessingMode() { + BulkImportJobJson job = new BulkImportJobJson(); + job.setProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION); + job.setBatchSize(3); + BulkImportJobFileJson file1 = new BulkImportJobFileJson(); + file1.setContents("contents 1"); + try { + mySvc.createNewJob(job, Lists.newArrayList(file1)); + } catch (UnprocessableEntityException e) { + assertEquals("Job File Processing mode must not be null", e.getMessage()); + } + } + + @Test + public void testAddFilesToJob_InvalidId() { + BulkImportJobFileJson file3 = new BulkImportJobFileJson(); + file3.setContents("contents 3"); + try { + mySvc.addFilesToJob("ABCDEFG", Lists.newArrayList(file3)); + } catch (InvalidRequestException e) { + assertEquals("Unknown job ID: ABCDEFG", e.getMessage()); + } + } + + @Test + public void testAddFilesToJob_WrongStatus() { + runInTransaction(() -> { + BulkImportJobEntity entity = new BulkImportJobEntity(); + entity.setFileCount(1); + entity.setJobId("ABCDEFG"); + entity.setStatus(BulkImportJobStatusEnum.RUNNING); + entity.setRowProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION); + myBulkImportJobDao.save(entity); + }); + + BulkImportJobFileJson file3 = new BulkImportJobFileJson(); + file3.setContents("contents 3"); + try { + mySvc.addFilesToJob("ABCDEFG", Lists.newArrayList(file3)); + } catch (InvalidRequestException e) { + assertEquals("Job ABCDEFG has status RUNNING and can not be added to", e.getMessage()); + } + } + + @Test + public void testActivateJob() { + runInTransaction(() -> { + BulkImportJobEntity entity = new BulkImportJobEntity(); + entity.setFileCount(1); + entity.setJobId("ABCDEFG"); + entity.setStatus(BulkImportJobStatusEnum.STAGING); + entity.setRowProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION); + myBulkImportJobDao.save(entity); + }); + + mySvc.markJobAsReadyForActivation("ABCDEFG"); + + runInTransaction(() -> { + List jobs = myBulkImportJobDao.findAll(); + assertEquals(1, jobs.size()); + assertEquals(BulkImportJobStatusEnum.READY, jobs.get(0).getStatus()); + }); + } + +} diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java index 8f22d6dda97..83c339af0bf 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java @@ -9,7 +9,7 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.config.BaseConfig; import ca.uhn.fhir.jpa.dao.index.IdHelperService; import ca.uhn.fhir.jpa.entity.TermConcept; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/TransactionProcessorTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/TransactionProcessorTest.java index 63f61b740b1..301f5476e7a 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/TransactionProcessorTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/TransactionProcessorTest.java @@ -85,7 +85,7 @@ public class TransactionProcessorTest { .setUrl("/MedicationKnowledge"); try { - myTransactionProcessor.transaction(null, input); + myTransactionProcessor.transaction(null, input, false); fail(); } catch (InvalidRequestException e) { assertEquals("Resource MedicationKnowledge is not supported on this server. Supported resource types: []", e.getMessage()); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java index 483e0874f02..5992f9df117 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java @@ -8,7 +8,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoSubscription; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.config.TestDstu2Config; import ca.uhn.fhir.jpa.dao.BaseJpaTest; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirSystemDaoDstu2Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirSystemDaoDstu2Test.java index 9cd366f3b1e..e4803f648d5 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirSystemDaoDstu2Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirSystemDaoDstu2Test.java @@ -785,7 +785,7 @@ public class FhirSystemDaoDstu2Test extends BaseJpaDstu2SystemTest { // try { Bundle resp = mySystemDao.transaction(mySrd, request); assertEquals(1, resp.getEntry().size()); - assertEquals("404 Not Found", resp.getEntry().get(0).getResponse().getStatus()); + assertEquals("204 No Content", resp.getEntry().get(0).getResponse().getStatus()); // fail(); // } catch (ResourceNotFoundException e) { @@ -1159,11 +1159,7 @@ public class FhirSystemDaoDstu2Test extends BaseJpaDstu2SystemTest { } assertEquals("201 Created", resp.getEntry().get(2).getResponse().getStatus()); assertThat(resp.getEntry().get(2).getResponse().getLocation(), startsWith("Patient/")); - if (pass == 0) { - assertEquals("404 Not Found", resp.getEntry().get(3).getResponse().getStatus()); - } else { - assertEquals("204 No Content", resp.getEntry().get(3).getResponse().getStatus()); - } + assertEquals("204 No Content", resp.getEntry().get(3).getResponse().getStatus()); Bundle respGetBundle = (Bundle) resp.getEntry().get(0).getResource(); assertEquals(1, respGetBundle.getEntry().size()); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java index 109a59cdad0..0953336ff66 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java @@ -13,7 +13,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.config.TestDstu3Config; import ca.uhn.fhir.jpa.dao.BaseJpaTest; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java index bf257e1f5ed..9cb19833adf 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java @@ -17,7 +17,7 @@ import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider; import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.config.TestR4Config; import ca.uhn.fhir.jpa.dao.BaseJpaTest; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; @@ -51,24 +51,19 @@ import ca.uhn.fhir.jpa.dao.data.ITermConceptParentChildLinkDao; import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDao; import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDesignationDao; import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao; -import ca.uhn.fhir.jpa.dao.dstu2.FhirResourceDaoDstu2SearchNoFtTest; import ca.uhn.fhir.jpa.dao.index.IdHelperService; import ca.uhn.fhir.jpa.entity.TermCodeSystem; import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion; import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.entity.TermValueSet; import ca.uhn.fhir.jpa.entity.TermValueSetConcept; -import ca.uhn.fhir.jpa.entity.TermValueSetConceptDesignation; import ca.uhn.fhir.jpa.interceptor.PerformanceTracingLoggingInterceptor; import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.entity.ModelConfig; -import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.packages.IPackageInstallerSvc; import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc; -import ca.uhn.fhir.jpa.provider.r4.BaseJpaResourceProviderObservationR4; import ca.uhn.fhir.jpa.provider.r4.JpaSystemProviderR4; -import ca.uhn.fhir.jpa.rp.r4.ObservationResourceProvider; import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; import ca.uhn.fhir.jpa.search.IStaleSearchDeletingSvc; import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc; @@ -77,7 +72,6 @@ import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryImpl; import ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl; import ca.uhn.fhir.jpa.term.TermConceptMappingSvcImpl; import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl; -import ca.uhn.fhir.jpa.term.ValueSetExpansionR4Test; import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc; import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc; @@ -95,11 +89,9 @@ import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor; import ca.uhn.fhir.rest.server.provider.ResourceProviderFactory; import ca.uhn.fhir.test.utilities.ITestDataBuilder; import ca.uhn.fhir.util.ClasspathUtil; -import ca.uhn.fhir.util.ResourceUtil; import ca.uhn.fhir.util.UrlUtil; import ca.uhn.fhir.validation.FhirValidator; import ca.uhn.fhir.validation.ValidationResult; -import org.apache.commons.io.IOUtils; import org.hibernate.search.mapper.orm.Search; import org.hibernate.search.mapper.orm.session.SearchSession; import org.hl7.fhir.common.hapi.validation.support.CachingValidationSupport; @@ -168,7 +160,6 @@ import org.hl7.fhir.r4.model.ValueSet; import org.hl7.fhir.r5.utils.IResourceValidator; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; @@ -182,7 +173,6 @@ import org.springframework.transaction.PlatformTransactionManager; import javax.persistence.EntityManager; import java.io.IOException; -import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.Map; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithElasticSearchIT.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithElasticSearchIT.java index 7e71ceab8ad..79904f2d15f 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithElasticSearchIT.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithElasticSearchIT.java @@ -39,7 +39,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticSearch; import ca.uhn.fhir.jpa.dao.BaseJpaTest; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithLuceneDisabledTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithLuceneDisabledTest.java index 4f0eb913e03..d01756a7265 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithLuceneDisabledTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithLuceneDisabledTest.java @@ -7,7 +7,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.config.TestR4WithLuceneDisabledConfig; import ca.uhn.fhir.jpa.dao.BaseJpaTest; import ca.uhn.fhir.jpa.dao.dstu2.FhirResourceDaoDstu2SearchNoFtTest; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyElasticsearchIT.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyElasticsearchIT.java index c5ba2df8890..c4b556bc6ed 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyElasticsearchIT.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyElasticsearchIT.java @@ -6,7 +6,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticSearch; import ca.uhn.fhir.jpa.dao.BaseJpaTest; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java index 24951ce3060..a7d4f7a5278 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java @@ -18,6 +18,7 @@ import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.param.ReferenceParam; import ca.uhn.fhir.rest.param.StringParam; import ca.uhn.fhir.rest.param.TokenParam; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException; @@ -60,10 +61,10 @@ import org.hl7.fhir.r4.model.Quantity; import org.hl7.fhir.r4.model.Reference; import org.hl7.fhir.r4.model.Resource; import org.hl7.fhir.r4.model.ValueSet; -import org.junit.jupiter.api.Test; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; import org.springframework.transaction.TransactionDefinition; import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.support.TransactionCallback; @@ -109,8 +110,8 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { public void after() { myDaoConfig.setAllowInlineMatchUrlReferences(false); myDaoConfig.setAllowMultipleDelete(new DaoConfig().isAllowMultipleDelete()); - myModelConfig.setNormalizedQuantitySearchLevel(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED); - } + myModelConfig.setNormalizedQuantitySearchLevel(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED); + } @BeforeEach public void beforeDisableResultReuse() { @@ -549,7 +550,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { myValueSetDao.create(vs, mySrd); sleepUntilTimeChanges(); - + ResourceTable entity = new TransactionTemplate(myTxManager).execute(t -> myEntityManager.find(ResourceTable.class, id.getIdPartAsLong())); assertEquals(Long.valueOf(1), entity.getIndexStatus()); @@ -568,9 +569,9 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { * so it indexes the newest resource one more time. It wouldn't be a big deal * if this ever got fixed so that it ends up with 2 instead of 3. */ - runInTransaction(()->{ + runInTransaction(() -> { Optional reindexCount = myResourceReindexJobDao.getReindexCount(jobId); - assertEquals(3, reindexCount.orElseThrow(()->new NullPointerException("No job " + jobId)).intValue()); + assertEquals(3, reindexCount.orElseThrow(() -> new NullPointerException("No job " + jobId)).intValue()); }); // Try making the resource unparseable @@ -626,7 +627,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { searchParamMap.add(Patient.SP_FAMILY, new StringParam("family2")); assertEquals(1, myPatientDao.search(searchParamMap).size().intValue()); - runInTransaction(()->{ + runInTransaction(() -> { ResourceHistoryTable historyEntry = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 3); assertNotNull(historyEntry); myResourceHistoryTableDao.delete(historyEntry); @@ -656,7 +657,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { searchParamMap.add(Patient.SP_FAMILY, new StringParam("family1")); assertEquals(1, myPatientDao.search(searchParamMap).size().intValue()); - runInTransaction(()->{ + runInTransaction(() -> { myEntityManager .createQuery("UPDATE ResourceIndexedSearchParamString s SET s.myHashNormalizedPrefix = 0") .executeUpdate(); @@ -671,7 +672,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { myResourceReindexingSvc.markAllResourcesForReindexing(); myResourceReindexingSvc.forceReindexingPass(); - runInTransaction(()->{ + runInTransaction(() -> { ResourceIndexedSearchParamString param = myResourceIndexedSearchParamStringDao.findAll() .stream() .filter(t -> t.getParamName().equals("family")) @@ -694,7 +695,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { searchParamMap.add(Patient.SP_FAMILY, new StringParam("family1")); assertEquals(1, myPatientDao.search(searchParamMap).size().intValue()); - runInTransaction(()->{ + runInTransaction(() -> { Long i = myEntityManager .createQuery("SELECT count(s) FROM ResourceIndexedSearchParamString s WHERE s.myHashIdentity = 0", Long.class) .getSingleResult(); @@ -714,7 +715,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { myResourceReindexingSvc.markAllResourcesForReindexing(); myResourceReindexingSvc.forceReindexingPass(); - runInTransaction(()->{ + runInTransaction(() -> { Long i = myEntityManager .createQuery("SELECT count(s) FROM ResourceIndexedSearchParamString s WHERE s.myHashIdentity = 0", Long.class) .getSingleResult(); @@ -808,6 +809,30 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { assertEquals("201 Created", resp.getEntry().get(0).getResponse().getStatus()); } + + @Test + public void testNestedTransaction_ReadsBlocked() { + String methodName = "testTransactionBatchWithFailingRead"; + Bundle request = new Bundle(); + request.setType(BundleType.TRANSACTION); + + Patient p = new Patient(); + p.addName().setFamily(methodName); + request.addEntry().setResource(p).getRequest().setMethod(HTTPVerb.POST); + + request.addEntry().getRequest().setMethod(HTTPVerb.GET).setUrl("Patient?identifier=foo"); + + try { + runInTransaction(()->{ + mySystemDao.transactionNested(mySrd, request); + }); + fail(); + } catch (InvalidRequestException e) { + assertEquals("Can not invoke read operation on nested transaction", e.getMessage()); + } + } + + @Test public void testTransactionBatchWithFailingRead() { String methodName = "testTransactionBatchWithFailingRead"; @@ -923,8 +948,8 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { Bundle outcome = mySystemDao.transaction(mySrd, request); ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); assertEquals("400 Bad Request", outcome.getEntry().get(0).getResponse().getStatus()); - assertEquals(IssueSeverity.ERROR, ((OperationOutcome)outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getSeverity()); - assertEquals("Missing required resource in Bundle.entry[0].resource for operation POST", ((OperationOutcome)outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getDiagnostics()); + assertEquals(IssueSeverity.ERROR, ((OperationOutcome) outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getSeverity()); + assertEquals("Missing required resource in Bundle.entry[0].resource for operation POST", ((OperationOutcome) outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getDiagnostics()); validate(outcome); } @@ -942,8 +967,8 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { Bundle outcome = mySystemDao.transaction(mySrd, request); ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); assertEquals("400 Bad Request", outcome.getEntry().get(0).getResponse().getStatus()); - assertEquals(IssueSeverity.ERROR, ((OperationOutcome)outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getSeverity()); - assertEquals("Missing required resource in Bundle.entry[0].resource for operation PUT", ((OperationOutcome)outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getDiagnostics()); + assertEquals(IssueSeverity.ERROR, ((OperationOutcome) outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getSeverity()); + assertEquals("Missing required resource in Bundle.entry[0].resource for operation PUT", ((OperationOutcome) outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getDiagnostics()); validate(outcome); } @@ -2272,7 +2297,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { patient2.addIdentifier().setSystem("urn:system").setValue("testPersistWithSimpleLinkP02"); request.addEntry().setResource(patient2).getRequest().setMethod(HTTPVerb.POST); - assertThrows(InvalidRequestException.class, ()->{ + assertThrows(InvalidRequestException.class, () -> { mySystemDao.transaction(mySrd, request); }); } @@ -3198,9 +3223,9 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { assertEquals("1", id2.getVersionIdPart()); assertEquals(id.getValue(), id2.getValue()); - + } - + @Test public void testTransactionWithIfMatch() { Patient p = new Patient(); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java index 8d40288bc2b..b9192bf7b0f 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java @@ -16,7 +16,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider; import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.config.TestR5Config; import ca.uhn.fhir.jpa.dao.BaseJpaTest; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4ElasticsearchIT.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4ElasticsearchIT.java index 7680e88baec..66ac0d415cb 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4ElasticsearchIT.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4ElasticsearchIT.java @@ -6,7 +6,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; -import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; +import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticSearch; import ca.uhn.fhir.jpa.dao.BaseJpaTest; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; diff --git a/hapi-fhir-jpaserver-batch/pom.xml b/hapi-fhir-jpaserver-batch/pom.xml index b4436c319ec..a08e196fe26 100644 --- a/hapi-fhir-jpaserver-batch/pom.xml +++ b/hapi-fhir-jpaserver-batch/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-batch/src/main/java/ca/uhn/fhir/jpa/batch/BatchConstants.java b/hapi-fhir-jpaserver-batch/src/main/java/ca/uhn/fhir/jpa/batch/BatchConstants.java new file mode 100644 index 00000000000..4224e215332 --- /dev/null +++ b/hapi-fhir-jpaserver-batch/src/main/java/ca/uhn/fhir/jpa/batch/BatchConstants.java @@ -0,0 +1,32 @@ +package ca.uhn.fhir.jpa.batch; + +/*- + * #%L + * HAPI FHIR JPA Server - Batch Task Processor + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +public class BatchConstants { + + /** + * Non instantiable + */ + private BatchConstants() {} + + public static final String JOB_LAUNCHING_TASK_EXECUTOR = "jobLaunchingTaskExecutor"; + +} diff --git a/hapi-fhir-jpaserver-batch/src/main/java/ca/uhn/fhir/jpa/batch/config/NonPersistedBatchConfigurer.java b/hapi-fhir-jpaserver-batch/src/main/java/ca/uhn/fhir/jpa/batch/config/NonPersistedBatchConfigurer.java index 936eb9d12ab..27eb2518893 100644 --- a/hapi-fhir-jpaserver-batch/src/main/java/ca/uhn/fhir/jpa/batch/config/NonPersistedBatchConfigurer.java +++ b/hapi-fhir-jpaserver-batch/src/main/java/ca/uhn/fhir/jpa/batch/config/NonPersistedBatchConfigurer.java @@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.batch.config; * #L% */ +import ca.uhn.fhir.jpa.batch.BatchConstants; import org.springframework.batch.core.configuration.annotation.DefaultBatchConfigurer; import org.springframework.batch.core.explore.JobExplorer; import org.springframework.batch.core.explore.support.MapJobExplorerFactoryBean; @@ -39,7 +40,7 @@ public class NonPersistedBatchConfigurer extends DefaultBatchConfigurer { private PlatformTransactionManager myHapiPlatformTransactionManager; @Autowired - @Qualifier("jobLaunchingTaskExecutor") + @Qualifier(BatchConstants.JOB_LAUNCHING_TASK_EXECUTOR) private TaskExecutor myTaskExecutor; private MapJobRepositoryFactoryBean myJobRepositoryFactory; diff --git a/hapi-fhir-jpaserver-cql/pom.xml b/hapi-fhir-jpaserver-cql/pom.xml index 440cd4e30e4..20069c8fa7e 100644 --- a/hapi-fhir-jpaserver-cql/pom.xml +++ b/hapi-fhir-jpaserver-cql/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml @@ -144,13 +144,13 @@ ca.uhn.hapi.fhir hapi-fhir-test-utilities - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT test ca.uhn.hapi.fhir hapi-fhir-jpaserver-test-utilities - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT test diff --git a/hapi-fhir-jpaserver-mdm/pom.xml b/hapi-fhir-jpaserver-mdm/pom.xml index 236fc33d3ab..3295a49c94a 100644 --- a/hapi-fhir-jpaserver-mdm/pom.xml +++ b/hapi-fhir-jpaserver-mdm/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml @@ -55,13 +55,13 @@ ca.uhn.hapi.fhir hapi-fhir-test-utilities - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT test ca.uhn.hapi.fhir hapi-fhir-jpaserver-test-utilities - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT test diff --git a/hapi-fhir-jpaserver-migrate/pom.xml b/hapi-fhir-jpaserver-migrate/pom.xml index ab2347ac99c..68316dcf0db 100644 --- a/hapi-fhir-jpaserver-migrate/pom.xml +++ b/hapi-fhir-jpaserver-migrate/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java index 4e14b8594d7..e3975350931 100644 --- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java +++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java @@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.migrate.tasks; */ import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.jpa.entity.Search; import ca.uhn.fhir.jpa.migrate.DriverTypeEnum; import ca.uhn.fhir.jpa.migrate.taskdef.ArbitrarySqlTask; import ca.uhn.fhir.jpa.migrate.taskdef.CalculateHashesTask; @@ -91,6 +92,32 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { version.onTable("TRM_VALUESET_CONCEPT").addColumn("20210406.1", "INDEX_STATUS").nullable().type(ColumnTypeEnum.LONG); version.onTable("TRM_VALUESET_CONCEPT").addColumn("20210406.2", "SOURCE_DIRECT_PARENT_PIDS").nullable().type(ColumnTypeEnum.CLOB); version.onTable("TRM_VALUESET_CONCEPT").addColumn("20210406.3", "SOURCE_PID").nullable().type(ColumnTypeEnum.LONG); + + // Bulk Import Job + Builder.BuilderAddTableByColumns blkImportJobTable = version.addTableByColumns("20210410.1", "HFJ_BLK_IMPORT_JOB", "PID"); + blkImportJobTable.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG); + blkImportJobTable.addColumn("JOB_ID").nonNullable().type(ColumnTypeEnum.STRING, Search.UUID_COLUMN_LENGTH); + blkImportJobTable.addColumn("JOB_STATUS").nonNullable().type(ColumnTypeEnum.STRING, 10); + blkImportJobTable.addColumn("STATUS_TIME").nonNullable().type(ColumnTypeEnum.DATE_TIMESTAMP); + blkImportJobTable.addColumn("STATUS_MESSAGE").nullable().type(ColumnTypeEnum.STRING, 500); + blkImportJobTable.addColumn("OPTLOCK").nonNullable().type(ColumnTypeEnum.INT); + blkImportJobTable.addColumn("FILE_COUNT").nonNullable().type(ColumnTypeEnum.INT); + blkImportJobTable.addColumn("ROW_PROCESSING_MODE").nonNullable().type(ColumnTypeEnum.STRING, 20); + blkImportJobTable.addColumn("BATCH_SIZE").nonNullable().type(ColumnTypeEnum.INT); + blkImportJobTable.addIndex("20210410.2", "IDX_BLKIM_JOB_ID").unique(true).withColumns("JOB_ID"); + version.addIdGenerator("20210410.3", "SEQ_BLKIMJOB_PID"); + + // Bulk Import Job File + Builder.BuilderAddTableByColumns blkImportJobFileTable = version.addTableByColumns("20210410.4", "HFJ_BLK_IMPORT_JOBFILE", "PID"); + blkImportJobFileTable.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG); + blkImportJobFileTable.addColumn("JOB_PID").nonNullable().type(ColumnTypeEnum.LONG); + blkImportJobFileTable.addColumn("JOB_CONTENTS").nonNullable().type(ColumnTypeEnum.BLOB); + blkImportJobFileTable.addColumn("FILE_SEQ").nonNullable().type(ColumnTypeEnum.INT); + blkImportJobFileTable.addColumn("TENANT_NAME").nullable().type(ColumnTypeEnum.STRING, 200); + blkImportJobFileTable.addIndex("20210410.5", "IDX_BLKIM_JOBFILE_JOBID").unique(false).withColumns("JOB_PID"); + blkImportJobFileTable.addForeignKey("20210410.6", "FK_BLKIMJOBFILE_JOB").toColumn("JOB_PID").references("HFJ_BLK_IMPORT_JOB", "PID"); + version.addIdGenerator("20210410.7", "SEQ_BLKIMJOBFILE_PID"); + } private void init530() { diff --git a/hapi-fhir-jpaserver-model/pom.xml b/hapi-fhir-jpaserver-model/pom.xml index 8c915ff27c6..b46518bed86 100644 --- a/hapi-fhir-jpaserver-model/pom.xml +++ b/hapi-fhir-jpaserver-model/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-searchparam/pom.xml b/hapi-fhir-jpaserver-searchparam/pom.xml index 0cccf93a45b..5dc3b7d17f4 100755 --- a/hapi-fhir-jpaserver-searchparam/pom.xml +++ b/hapi-fhir-jpaserver-searchparam/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-subscription/pom.xml b/hapi-fhir-jpaserver-subscription/pom.xml index 633843691b3..fc862aea074 100644 --- a/hapi-fhir-jpaserver-subscription/pom.xml +++ b/hapi-fhir-jpaserver-subscription/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-utilities/pom.xml b/hapi-fhir-jpaserver-test-utilities/pom.xml index e00e23537da..b66f8e9615e 100644 --- a/hapi-fhir-jpaserver-test-utilities/pom.xml +++ b/hapi-fhir-jpaserver-test-utilities/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml index ffbe7a1164c..183e9a6b602 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml +++ b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../pom.xml @@ -164,7 +164,7 @@ ca.uhn.hapi.fhir hapi-fhir-converter - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java index e2c52f3b321..a7e2c06d36a 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java @@ -6,7 +6,7 @@ import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; -import ca.uhn.fhir.jpa.bulk.provider.BulkDataExportProvider; +import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider; import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor; import ca.uhn.fhir.jpa.provider.DiffProvider; import ca.uhn.fhir.jpa.provider.GraphQLProvider; diff --git a/hapi-fhir-server-mdm/pom.xml b/hapi-fhir-server-mdm/pom.xml index 6f7664a8f48..adc4be9c0ab 100644 --- a/hapi-fhir-server-mdm/pom.xml +++ b/hapi-fhir-server-mdm/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server/pom.xml b/hapi-fhir-server/pom.xml index 2524d2960e8..1e71fc77421 100644 --- a/hapi-fhir-server/pom.xml +++ b/hapi-fhir-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml index 58483804fd3..d0905db3a66 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml index bb0460f5f2c..919e895664c 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT hapi-fhir-spring-boot-sample-client-apache diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml index ec97e7f6b0f..622de27723a 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT hapi-fhir-spring-boot-sample-client-okhttp diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml index b20cdc12fac..ad0f6ed0cfb 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT hapi-fhir-spring-boot-sample-server-jersey diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml index 1643433e6ef..02e9d448d79 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT hapi-fhir-spring-boot-samples diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml index 04d4fe39150..c210826be2c 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/pom.xml b/hapi-fhir-spring-boot/pom.xml index b3490ba702b..38c1789454d 100644 --- a/hapi-fhir-spring-boot/pom.xml +++ b/hapi-fhir-spring-boot/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-structures-dstu2.1/pom.xml b/hapi-fhir-structures-dstu2.1/pom.xml index d80fff6935b..c8054db0e2b 100644 --- a/hapi-fhir-structures-dstu2.1/pom.xml +++ b/hapi-fhir-structures-dstu2.1/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu2/pom.xml b/hapi-fhir-structures-dstu2/pom.xml index 412d69d642c..6659bcd46c2 100644 --- a/hapi-fhir-structures-dstu2/pom.xml +++ b/hapi-fhir-structures-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/BaseThymeleafNarrativeGeneratorDstu2Test.java b/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/BaseThymeleafNarrativeGeneratorDstu2Test.java index d1acfeb7369..0bb1e0e3f31 100644 --- a/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/BaseThymeleafNarrativeGeneratorDstu2Test.java +++ b/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/BaseThymeleafNarrativeGeneratorDstu2Test.java @@ -1,7 +1,5 @@ package ca.uhn.fhir.narrative; -import ca.uhn.fhir.util.TestUtil; -import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -9,60 +7,54 @@ import static org.junit.jupiter.api.Assertions.assertEquals; public class BaseThymeleafNarrativeGeneratorDstu2Test { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseThymeleafNarrativeGeneratorDstu2Test.class); - @AfterAll - public static void afterClassClearContext() { - TestUtil.clearAllStaticFieldsForUnitTest(); - } - - @Test public void testTrimWhitespace() { //@formatter:off - String input = "

\n" + - "
\n" + - " \n" + - " joe \n" + - " john \n" + - " BLOW \n" + - " \n" + - "
\n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - "
Identifier123456
Address\n" + - " \n" + - " 123 Fake Street
\n" + - " \n" + - " \n" + - " Unit 1
\n" + - " \n" + - " Toronto\n" + - " ON\n" + - " Canada\n" + - "
Date of birth\n" + - " 31 March 2014\n" + - "
\n" + - "
"; + String input = "
\n" + + "
\n" + + " \n" + + " joe \n" + + " john \n" + + " BLOW \n" + + " \n" + + "
\n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + "
Identifier123456
Address\n" + + " \n" + + " 123 Fake Street
\n" + + " \n" + + " \n" + + " Unit 1
\n" + + " \n" + + " Toronto\n" + + " ON\n" + + " Canada\n" + + "
Date of birth\n" + + " 31 March 2014\n" + + "
\n" + + "
"; //@formatter:on String actual = BaseThymeleafNarrativeGenerator.cleanWhitespace(input); String expected = "
joe john BLOW
Identifier123456
Address123 Fake Street
Unit 1
TorontoONCanada
Date of birth31 March 2014
"; - + ourLog.info(actual); - + assertEquals(expected, actual); } diff --git a/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorDstu2Test.java b/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorDstu2Test.java index c6709a2128b..a3742c8ea60 100644 --- a/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorDstu2Test.java +++ b/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorDstu2Test.java @@ -1,9 +1,9 @@ package ca.uhn.fhir.narrative; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.model.dstu2.resource.Practitioner; -import ca.uhn.fhir.util.TestUtil; -import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; import static org.hamcrest.MatcherAssert.assertThat; @@ -13,20 +13,19 @@ public class CustomThymeleafNarrativeGeneratorDstu2Test { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(CustomThymeleafNarrativeGeneratorDstu2Test.class); - private static FhirContext ourCtx = FhirContext.forDstu2(); + private FhirContext myCtx = FhirContext.forCached(FhirVersionEnum.DSTU2); - @AfterAll - public static void afterClassClearContext() { - TestUtil.clearAllStaticFieldsForUnitTest(); + @AfterEach + public void after() { + myCtx.setNarrativeGenerator(null); } - @Test public void testGenerator() { // CustomThymeleafNarrativeGenerator gen = new CustomThymeleafNarrativeGenerator("file:src/test/resources/narrative/customnarrative.properties"); CustomThymeleafNarrativeGenerator gen = new CustomThymeleafNarrativeGenerator("classpath:narrative/customnarrative_dstu2.properties"); - ourCtx.setNarrativeGenerator(gen); + myCtx.setNarrativeGenerator(gen); Practitioner p = new Practitioner(); p.addIdentifier().setSystem("sys").setValue("val1"); @@ -34,7 +33,7 @@ public class CustomThymeleafNarrativeGeneratorDstu2Test { p.addAddress().addLine("line1").addLine("line2"); p.getName().addFamily("fam1").addGiven("given"); - gen.populateResourceNarrative(ourCtx, p); + gen.populateResourceNarrative(myCtx, p); String actual = p.getText().getDiv().getValueAsString(); ourLog.info(actual); diff --git a/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu2Test.java b/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu2Test.java index 4b5015d9f5b..58beb7f4a0b 100644 --- a/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu2Test.java +++ b/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu2Test.java @@ -1,6 +1,7 @@ package ca.uhn.fhir.narrative; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.model.api.TemporalPrecisionEnum; import ca.uhn.fhir.model.dstu2.composite.CodeableConceptDt; import ca.uhn.fhir.model.dstu2.composite.QuantityDt; @@ -22,6 +23,7 @@ import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.util.TestUtil; import org.hamcrest.core.StringContains; import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; @@ -35,7 +37,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; public class DefaultThymeleafNarrativeGeneratorDstu2Test { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(DefaultThymeleafNarrativeGeneratorDstu2Test.class); - private static FhirContext ourCtx = FhirContext.forDstu2(); + private final FhirContext myCtx = FhirContext.forCached(FhirVersionEnum.DSTU2); private DefaultThymeleafNarrativeGenerator myGen; @BeforeEach @@ -43,9 +45,15 @@ public class DefaultThymeleafNarrativeGeneratorDstu2Test { myGen = new DefaultThymeleafNarrativeGenerator(); myGen.setUseHapiServerConformanceNarrative(true); - ourCtx.setNarrativeGenerator(myGen); + myCtx.setNarrativeGenerator(myGen); } + @AfterEach + public void after() { + myCtx.setNarrativeGenerator(null); + } + + @Test public void testGeneratePatient() throws DataFormatException { Patient value = new Patient(); @@ -57,7 +65,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu2Test { value.setBirthDate(new Date(), TemporalPrecisionEnum.DAY); - myGen.populateResourceNarrative(ourCtx, value); + myGen.populateResourceNarrative(myCtx, value); String output = value.getText().getDiv().getValueAsString(); ourLog.info(output); assertThat(output, StringContains.containsString("
joe john BLOW
")); @@ -69,7 +77,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu2Test { Parameters value = new Parameters(); value.setId("123"); - myGen.populateResourceNarrative(ourCtx, value); + myGen.populateResourceNarrative(myCtx, value); String output = value.getText().getDiv().getValueAsString(); ourLog.info(output); assertThat(output, not(containsString("narrative"))); @@ -89,9 +97,9 @@ public class DefaultThymeleafNarrativeGeneratorDstu2Test { " \n" + ""; - OperationOutcome oo = ourCtx.newXmlParser().parseResource(OperationOutcome.class, parse); + OperationOutcome oo = myCtx.newXmlParser().parseResource(OperationOutcome.class, parse); - myGen.populateResourceNarrative(ourCtx, oo); + myGen.populateResourceNarrative(myCtx, oo); String output = oo.getText().getDiv().getValueAsString(); ourLog.info(output); @@ -129,7 +137,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu2Test { value.addResult().setResource(obs); } - myGen.populateResourceNarrative(ourCtx, value); + myGen.populateResourceNarrative(myCtx, value); String output = value.getText().getDiv().getValueAsString(); ourLog.info(output); @@ -137,7 +145,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu2Test { // Now try it with the parser - output = ourCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(value); + output = myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(value); ourLog.info(output); assertThat(output, StringContains.containsString("
Some & Diagnostic Report
")); } @@ -154,7 +162,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu2Test { mp.setStatus(MedicationOrderStatusEnum.ACTIVE); mp.setDateWritten(new DateTimeDt("2014-09-01")); - myGen.populateResourceNarrative(ourCtx, mp); + myGen.populateResourceNarrative(myCtx, mp); String output = mp.getText().getDiv().getValueAsString(); assertTrue(output.contains("ciprofloaxin"), "Expected medication name of ciprofloaxin within narrative: " + output); @@ -167,7 +175,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu2Test { Medication med = new Medication(); med.getCode().setText("ciproflaxin"); - myGen.populateResourceNarrative(ourCtx, med); + myGen.populateResourceNarrative(myCtx, med); String output = med.getText().getDiv().getValueAsString(); assertThat(output, containsString("ciproflaxin")); diff --git a/hapi-fhir-structures-dstu3/pom.xml b/hapi-fhir-structures-dstu3/pom.xml index 24f6bfd53d9..04fc5a8cbc3 100644 --- a/hapi-fhir-structures-dstu3/pom.xml +++ b/hapi-fhir-structures-dstu3/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu3Test.java b/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu3Test.java index 9d99461d525..c5dceef79ac 100644 --- a/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu3Test.java +++ b/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorDstu3Test.java @@ -1,6 +1,7 @@ package ca.uhn.fhir.narrative; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.util.TestUtil; import org.apache.commons.collections.Transformer; @@ -11,6 +12,7 @@ import org.hl7.fhir.dstu3.model.DiagnosticReport.DiagnosticReportStatus; import org.hl7.fhir.dstu3.model.MedicationRequest.MedicationRequestStatus; import org.hl7.fhir.dstu3.model.Observation.ObservationStatus; import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; @@ -28,7 +30,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; public class DefaultThymeleafNarrativeGeneratorDstu3Test { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(DefaultThymeleafNarrativeGeneratorDstu3Test.class); - private static FhirContext ourCtx = FhirContext.forDstu3(); + private final FhirContext myCtx = FhirContext.forCached(FhirVersionEnum.DSTU3); private DefaultThymeleafNarrativeGenerator myGen; @BeforeEach @@ -36,9 +38,15 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test { myGen = new DefaultThymeleafNarrativeGenerator(); myGen.setUseHapiServerConformanceNarrative(true); - ourCtx.setNarrativeGenerator(myGen); + myCtx.setNarrativeGenerator(myGen); } + @AfterEach + public void after() { + myCtx.setNarrativeGenerator(null); + } + + @Test public void testGeneratePatient() throws DataFormatException { Patient value = new Patient(); @@ -51,7 +59,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test { value.setBirthDate(new Date()); - myGen.populateResourceNarrative(ourCtx, value); + myGen.populateResourceNarrative(myCtx, value); String output = value.getText().getDiv().getValueAsString(); ourLog.info(output); assertThat(output, StringContains.containsString("
joe john BLOW
")); @@ -95,7 +103,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test { } }); - customGen.populateResourceNarrative(ourCtx, value); + customGen.populateResourceNarrative(myCtx, value); String output = value.getText().getDiv().getValueAsString(); ourLog.info(output); assertThat(output, StringContains.containsString("Some beautiful proze")); @@ -111,7 +119,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test { value.addResult().setReference("Observation/2"); value.addResult().setReference("Observation/3"); - myGen.populateResourceNarrative(ourCtx, value); + myGen.populateResourceNarrative(myCtx, value); String output = value.getText().getDiv().getValueAsString(); ourLog.info(output); @@ -133,13 +141,13 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test { ""; //@formatter:on - OperationOutcome oo = ourCtx.newXmlParser().parseResource(OperationOutcome.class, parse); + OperationOutcome oo = myCtx.newXmlParser().parseResource(OperationOutcome.class, parse); // String output = gen.generateTitle(oo); // ourLog.info(output); // assertEquals("Operation Outcome (2 issues)", output); - myGen.populateResourceNarrative(ourCtx, oo); + myGen.populateResourceNarrative(myCtx, oo); String output = oo.getText().getDiv().getValueAsString(); ourLog.info(output); @@ -177,7 +185,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test { value.addResult().setResource(obs); } - myGen.populateResourceNarrative(ourCtx, value); + myGen.populateResourceNarrative(myCtx, value); String output = value.getText().getDiv().getValueAsString(); ourLog.info(output); @@ -240,8 +248,8 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test { " }"; - DiagnosticReport value = ourCtx.newJsonParser().parseResource(DiagnosticReport.class, input); - myGen.populateResourceNarrative(ourCtx, value); + DiagnosticReport value = myCtx.newJsonParser().parseResource(DiagnosticReport.class, input); + myGen.populateResourceNarrative(myCtx, value); String output = value.getText().getDiv().getValueAsString(); ourLog.info(output); @@ -261,7 +269,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test { mp.setStatus(MedicationRequestStatus.ACTIVE); mp.setAuthoredOnElement(new DateTimeType("2014-09-01")); - myGen.populateResourceNarrative(ourCtx, mp); + myGen.populateResourceNarrative(myCtx, mp); String output = mp.getText().getDiv().getValueAsString(); assertTrue(output.contains("ciprofloaxin"), "Expected medication name of ciprofloaxin within narrative: " + output); @@ -274,7 +282,7 @@ public class DefaultThymeleafNarrativeGeneratorDstu3Test { Medication med = new Medication(); med.getCode().setText("ciproflaxin"); - myGen.populateResourceNarrative(ourCtx, med); + myGen.populateResourceNarrative(myCtx, med); String output = med.getText().getDiv().getValueAsString(); assertThat(output, containsString("ciproflaxin")); diff --git a/hapi-fhir-structures-hl7org-dstu2/pom.xml b/hapi-fhir-structures-hl7org-dstu2/pom.xml index 035dabec631..8383bdda70a 100644 --- a/hapi-fhir-structures-hl7org-dstu2/pom.xml +++ b/hapi-fhir-structures-hl7org-dstu2/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4/pom.xml b/hapi-fhir-structures-r4/pom.xml index 67dd6dfeb59..e0793555c76 100644 --- a/hapi-fhir-structures-r4/pom.xml +++ b/hapi-fhir-structures-r4/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorR4Test.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorR4Test.java index 6b355c7332e..957551d03c5 100644 --- a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorR4Test.java +++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGeneratorR4Test.java @@ -1,11 +1,13 @@ package ca.uhn.fhir.narrative; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.util.TestUtil; import org.hl7.fhir.r4.model.Practitioner; import org.hl7.fhir.r4.model.Quantity; import org.hl7.fhir.r4.model.StringType; import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; import static org.hamcrest.MatcherAssert.assertThat; @@ -14,86 +16,89 @@ import static org.junit.jupiter.api.Assertions.assertEquals; public class CustomThymeleafNarrativeGeneratorR4Test { - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(CustomThymeleafNarrativeGeneratorR4Test.class); + private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(CustomThymeleafNarrativeGeneratorR4Test.class); - /** Don't use cached here since we modify the context */ - private FhirContext myCtx = FhirContext.forR4(); + /** + * Don't use cached here since we modify the context + */ + private final FhirContext myCtx = FhirContext.forCached(FhirVersionEnum.R4); - /** - * Implement narrative for standard type - */ - @Test - public void testStandardType() { + @AfterEach + public void after() { + myCtx.setNarrativeGenerator(null); + } - CustomThymeleafNarrativeGenerator gen = new CustomThymeleafNarrativeGenerator("classpath:narrative/standardtypes_r4.properties"); - myCtx.setNarrativeGenerator(gen); + /** + * Implement narrative for standard type + */ + @Test + public void testStandardType() { - Practitioner p = new Practitioner(); - p.addIdentifier().setSystem("sys").setValue("val1"); - p.addIdentifier().setSystem("sys").setValue("val2"); - p.addAddress().addLine("line1").addLine("line2"); - p.addName().setFamily("fam1").addGiven("given"); + CustomThymeleafNarrativeGenerator gen = new CustomThymeleafNarrativeGenerator("classpath:narrative/standardtypes_r4.properties"); + myCtx.setNarrativeGenerator(gen); - gen.populateResourceNarrative(myCtx, p); + Practitioner p = new Practitioner(); + p.addIdentifier().setSystem("sys").setValue("val1"); + p.addIdentifier().setSystem("sys").setValue("val2"); + p.addAddress().addLine("line1").addLine("line2"); + p.addName().setFamily("fam1").addGiven("given"); - String actual = p.getText().getDiv().getValueAsString(); - ourLog.info(actual); + gen.populateResourceNarrative(myCtx, p); - assertThat(actual, containsString("

Name

given FAM1

Address

line1
line2
")); + String actual = p.getText().getDiv().getValueAsString(); + ourLog.info(actual); - } + assertThat(actual, containsString("

Name

given FAM1

Address

line1
line2
")); - @Test - public void testCustomType() { + } - CustomPatient patient = new CustomPatient(); - patient.setActive(true); - FavouritePizzaExtension parentExtension = new FavouritePizzaExtension(); - parentExtension.setToppings(new StringType("Mushrooms, Onions")); - parentExtension.setSize(new Quantity(null, 14, "http://unitsofmeasure", "[in_i]", "Inches")); - patient.setFavouritePizza(parentExtension); + @Test + public void testCustomType() { - String output = myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(patient); - ourLog.info("Encoded: {}", output); + CustomPatient patient = new CustomPatient(); + patient.setActive(true); + FavouritePizzaExtension parentExtension = new FavouritePizzaExtension(); + parentExtension.setToppings(new StringType("Mushrooms, Onions")); + parentExtension.setSize(new Quantity(null, 14, "http://unitsofmeasure", "[in_i]", "Inches")); + patient.setFavouritePizza(parentExtension); - String expectedEncoding = "{\n" + - " \"resourceType\": \"Patient\",\n" + - " \"meta\": {\n" + - " \"profile\": [ \"http://custom_patient\" ]\n" + - " },\n" + - " \"extension\": [ {\n" + - " \"url\": \"http://example.com/favourite_pizza\",\n" + - " \"extension\": [ {\n" + - " \"url\": \"toppings\",\n" + - " \"valueString\": \"Mushrooms, Onions\"\n" + - " }, {\n" + - " \"url\": \"size\",\n" + - " \"valueQuantity\": {\n" + - " \"value\": 14,\n" + - " \"unit\": \"Inches\",\n" + - " \"system\": \"http://unitsofmeasure\",\n" + - " \"code\": \"[in_i]\"\n" + - " }\n" + - " } ]\n" + - " } ],\n" + - " \"active\": true\n" + - "}"; - assertEquals(expectedEncoding, output); + String output = myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(patient); + ourLog.info("Encoded: {}", output); - CustomThymeleafNarrativeGenerator gen = new CustomThymeleafNarrativeGenerator("classpath:narrative/customtypes_r4.properties"); - myCtx.setNarrativeGenerator(gen); - gen.populateResourceNarrative(myCtx, patient); + String expectedEncoding = "{\n" + + " \"resourceType\": \"Patient\",\n" + + " \"meta\": {\n" + + " \"profile\": [ \"http://custom_patient\" ]\n" + + " },\n" + + " \"extension\": [ {\n" + + " \"url\": \"http://example.com/favourite_pizza\",\n" + + " \"extension\": [ {\n" + + " \"url\": \"toppings\",\n" + + " \"valueString\": \"Mushrooms, Onions\"\n" + + " }, {\n" + + " \"url\": \"size\",\n" + + " \"valueQuantity\": {\n" + + " \"value\": 14,\n" + + " \"unit\": \"Inches\",\n" + + " \"system\": \"http://unitsofmeasure\",\n" + + " \"code\": \"[in_i]\"\n" + + " }\n" + + " } ]\n" + + " } ],\n" + + " \"active\": true\n" + + "}"; + assertEquals(expectedEncoding, output); - String actual = patient.getText().getDiv().getValueAsString(); - ourLog.info(actual); + CustomThymeleafNarrativeGenerator gen = new CustomThymeleafNarrativeGenerator("classpath:narrative/customtypes_r4.properties"); + myCtx.setNarrativeGenerator(gen); + gen.populateResourceNarrative(myCtx, patient); - String expected = "

CustomPatient

Favourite Pizza

Toppings: Mushrooms, Onions Size: 14
"; - assertEquals(expected, actual); + String actual = patient.getText().getDiv().getValueAsString(); + ourLog.info(actual); - } + String expected = "

CustomPatient

Favourite Pizza

Toppings: Mushrooms, Onions Size: 14
"; + assertEquals(expected, actual); + + } - @AfterAll - public static void afterClassClearContext() { - TestUtil.clearAllStaticFieldsForUnitTest(); - } } diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorR4Test.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorR4Test.java index 433b448a448..4847d922123 100644 --- a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorR4Test.java +++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGeneratorR4Test.java @@ -10,6 +10,7 @@ import org.hl7.fhir.r4.model.DiagnosticReport.DiagnosticReportStatus; import org.hl7.fhir.r4.model.MedicationRequest.MedicationRequestStatus; import org.hl7.fhir.r4.model.Observation.ObservationStatus; import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; @@ -22,7 +23,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; public class DefaultThymeleafNarrativeGeneratorR4Test { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(DefaultThymeleafNarrativeGeneratorR4Test.class); - private FhirContext myCtx = FhirContext.forCached(FhirVersionEnum.R4); + private final FhirContext myCtx = FhirContext.forCached(FhirVersionEnum.R4); private DefaultThymeleafNarrativeGenerator myGen; @BeforeEach @@ -33,6 +34,11 @@ public class DefaultThymeleafNarrativeGeneratorR4Test { myCtx.setNarrativeGenerator(myGen); } + @AfterEach + public void after() { + myCtx.setNarrativeGenerator(null); + } + @Test public void testGeneratePatient() throws DataFormatException { Patient value = new Patient(); diff --git a/hapi-fhir-structures-r5/pom.xml b/hapi-fhir-structures-r5/pom.xml index 8760ce684e3..f564ed044b8 100644 --- a/hapi-fhir-structures-r5/pom.xml +++ b/hapi-fhir-structures-r5/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-test-utilities/pom.xml b/hapi-fhir-test-utilities/pom.xml index 654c33c28fe..ac4521d10e5 100644 --- a/hapi-fhir-test-utilities/pom.xml +++ b/hapi-fhir-test-utilities/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/ITestDataBuilder.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/ITestDataBuilder.java index ee503727c3e..61563e7ccb4 100644 --- a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/ITestDataBuilder.java +++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/ITestDataBuilder.java @@ -154,7 +154,7 @@ public interface ITestDataBuilder { } } - default IBaseResource buildResource(String theResourceType, Consumer[] theModifiers) { + default IBaseResource buildResource(String theResourceType, Consumer... theModifiers) { IBaseResource resource = getFhirContext().getResourceDefinition(theResourceType).newInstance(); for (Consumer next : theModifiers) { next.accept(resource); diff --git a/hapi-fhir-testpage-overlay/pom.xml b/hapi-fhir-testpage-overlay/pom.xml index 1dfb066f20b..0a140bc05ab 100644 --- a/hapi-fhir-testpage-overlay/pom.xml +++ b/hapi-fhir-testpage-overlay/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-validation-resources-dstu2.1/pom.xml b/hapi-fhir-validation-resources-dstu2.1/pom.xml index 6aae3187f80..4db0f72591a 100644 --- a/hapi-fhir-validation-resources-dstu2.1/pom.xml +++ b/hapi-fhir-validation-resources-dstu2.1/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu2/pom.xml b/hapi-fhir-validation-resources-dstu2/pom.xml index ecdbbd4a635..a3b7462547d 100644 --- a/hapi-fhir-validation-resources-dstu2/pom.xml +++ b/hapi-fhir-validation-resources-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu3/pom.xml b/hapi-fhir-validation-resources-dstu3/pom.xml index 560106316d0..33d67b4e94a 100644 --- a/hapi-fhir-validation-resources-dstu3/pom.xml +++ b/hapi-fhir-validation-resources-dstu3/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r4/pom.xml b/hapi-fhir-validation-resources-r4/pom.xml index 68ae7002bdd..5f830ffce69 100644 --- a/hapi-fhir-validation-resources-r4/pom.xml +++ b/hapi-fhir-validation-resources-r4/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r5/pom.xml b/hapi-fhir-validation-resources-r5/pom.xml index 9ac87f4b79d..dbc5e9359b8 100644 --- a/hapi-fhir-validation-resources-r5/pom.xml +++ b/hapi-fhir-validation-resources-r5/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation/pom.xml b/hapi-fhir-validation/pom.xml index b180c3de905..aafc58b15ba 100644 --- a/hapi-fhir-validation/pom.xml +++ b/hapi-fhir-validation/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-tinder-plugin/pom.xml b/hapi-tinder-plugin/pom.xml index 5e946ea683b..6daf487888f 100644 --- a/hapi-tinder-plugin/pom.xml +++ b/hapi-tinder-plugin/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../pom.xml @@ -58,37 +58,37 @@ ca.uhn.hapi.fhir hapi-fhir-structures-dstu3 - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-structures-hl7org-dstu2 - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-structures-r4 - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-structures-r5 - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-validation-resources-dstu2 - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-validation-resources-dstu3 - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-validation-resources-r4 - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT org.apache.velocity diff --git a/hapi-tinder-test/pom.xml b/hapi-tinder-test/pom.xml index b4a180ec8ca..6b9aa6546e9 100644 --- a/hapi-tinder-test/pom.xml +++ b/hapi-tinder-test/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../pom.xml diff --git a/pom.xml b/pom.xml index d53c158c6e0..723188cb297 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir pom - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT HAPI-FHIR An open-source implementation of the FHIR specification in Java. https://hapifhir.io @@ -761,20 +761,21 @@ 1.2.0 4.2.5 1.2 - 2.7.0 - 1.14 + 3.0.1 + 1.15 1.20 - 1.7 - 2.6 - 3.9 + 1.9 + 2.8.0 + 3.12.0 1.2 1.5.0 10.14.2.0 2.5.1 + 3.9.0 0.7.9 - 30.1-jre - 2.8.5 + 30.1.1-jre + 2.8.6 2.2.11_1 2.3.1 2.3.0.1 @@ -786,17 +787,17 @@ 3.0.2 5.7.0 6.5.4 - 5.4.26.Final - 6.0.0.Final + 5.4.30.Final + 6.0.2.Final 8.7.0 2.2 6.1.5.Final 4.4.13 4.5.13 - 2.12.1 - 2.11.3 - 3.1.0 + 2.12.3 + ${jackson_version} + 3.3.0 1.8 3.8.1 4.0.0.Beta3 @@ -807,15 +808,15 @@ 1.2_5 1.7.30 2.11.1 - 5.3.3 + 5.3.6 - 2.4.2 - 4.2.3.RELEASE + 2.4.7 + 4.3.2 2.4.1 1.2.2.RELEASE 3.1.4 - 3.0.11.RELEASE + 3.0.12.RELEASE 4.4.1 @@ -999,7 +1000,7 @@ org.jetbrains annotations - 19.0.0 + 20.1.0 commons-io @@ -1150,7 +1151,7 @@ org.apache.commons commons-dbcp2 - 2.7.0 + 2.8.0 org.apache.commons @@ -1312,7 +1313,7 @@ com.fasterxml.woodstox woodstox-core - 6.2.3 + 6.2.5 org.ebaysf.web @@ -1398,7 +1399,7 @@ org.fusesource.jansi jansi - 2.1.1 + 2.3.2 org.glassfish @@ -1553,12 +1554,12 @@ org.mockito mockito-core - 3.6.28 + ${mockito_version} org.mockito mockito-junit-jupiter - 3.3.3 + ${mockito_version} org.postgresql @@ -1817,18 +1818,10 @@ true - - com.gemnasium - gemnasium-maven-plugin - 0.2.0 - - github.com/hapifhir/hapi-fhir - - org.basepom.maven duplicate-finder-maven-plugin - 1.4.0 + 1.5.0 de.jpdigital @@ -1889,12 +1882,12 @@ org.apache.maven.plugins maven-javadoc-plugin - 3.1.1 + 3.2.0 org.apache.maven.plugins maven-jar-plugin - 3.1.2 + 3.2.0 org.apache.maven.plugins @@ -1909,7 +1902,7 @@ org.apache.maven.plugins maven-plugin-plugin - 3.5 + 3.6.0 org.apache.maven.plugins @@ -1919,14 +1912,7 @@ org.apache.maven.plugins maven-source-plugin - 3.1.0 - - - org.codehaus.plexus - plexus-utils - 3.1.0 - - + 3.2.1 org.apache.maven.plugins @@ -1948,7 +1934,7 @@ org.codehaus.mojo build-helper-maven-plugin - 3.0.0 + 3.2.0 org.codehaus.mojo @@ -1981,7 +1967,7 @@ org.codehaus.mojo versions-maven-plugin - 2.7 + 2.8.1 false @@ -2110,7 +2096,7 @@ org.apache.maven.plugins maven-checkstyle-plugin - 3.1.0 + 3.1.2 com.puppycrawl.tools @@ -2143,7 +2129,7 @@ - 3.3.9 + 3.5.4 11 diff --git a/restful-server-example/pom.xml b/restful-server-example/pom.xml index 37c4a874033..7640191ae97 100644 --- a/restful-server-example/pom.xml +++ b/restful-server-example/pom.xml @@ -8,7 +8,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../pom.xml diff --git a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml index 4e51e91c572..2ef7b674d4b 100644 --- a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml +++ b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-client/pom.xml b/tests/hapi-fhir-base-test-mindeps-client/pom.xml index 20a96ef3c9e..85242f389d3 100644 --- a/tests/hapi-fhir-base-test-mindeps-client/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-server/pom.xml b/tests/hapi-fhir-base-test-mindeps-server/pom.xml index 897bcbb0b07..9716634f826 100644 --- a/tests/hapi-fhir-base-test-mindeps-server/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.4.0-PRE5-SNAPSHOT + 5.4.0-PRE6-SNAPSHOT ../../pom.xml