From ff1c6e4d276c9effd2d83d14238ede68e8f65c15 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Thu, 4 Mar 2021 16:15:51 -0500 Subject: [PATCH 01/22] Start with failling test and partial job config --- .../uhn/fhir/jpa/batch/BatchJobsConfig.java | 1 + .../jpa/bulk/api/BulkDataExportOptions.java | 8 +- .../jpa/bulk/job/BulkExportJobConfig.java | 48 +++++++++-- .../jpa/bulk/svc/BulkDataExportSvcImpl.java | 19 ++++- .../jpa/bulk/BulkDataExportSvcImplR4Test.java | 83 +++++++++++++++++-- .../uhn/fhir/jpa/model/util/JpaConstants.java | 5 ++ .../uhn/fhir/jpa/config/TestJpaR4Config.java | 3 +- 7 files changed, 150 insertions(+), 17 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/BatchJobsConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/BatchJobsConfig.java index 043d9775df1..5cb70419561 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/BatchJobsConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/BatchJobsConfig.java @@ -33,4 +33,5 @@ import org.springframework.context.annotation.Import; public class BatchJobsConfig { public static final String BULK_EXPORT_JOB_NAME = "bulkExportJob"; public static final String GROUP_BULK_EXPORT_JOB_NAME = "groupBulkExportJob"; + public static final String PATIENT_BULK_EXPORT_JOB_NAME = "patientBulkExport"; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/BulkDataExportOptions.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/BulkDataExportOptions.java index 007cd78c97b..93e8aa75ce6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/BulkDataExportOptions.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/BulkDataExportOptions.java @@ -8,12 +8,14 @@ public class BulkDataExportOptions { private final Set myResourceTypes; private final Date mySince; private final Set myFilters; + private boolean mySystemLevel; - public BulkDataExportOptions(String theOutputFormat, Set theResourceTypes, Date theSince, Set theFilters) { + public BulkDataExportOptions(String theOutputFormat, Set theResourceTypes, Date theSince, Set theFilters, boolean theSystemLevel) { myOutputFormat = theOutputFormat; myResourceTypes = theResourceTypes; mySince = theSince; myFilters = theFilters; + mySystemLevel = theSystemLevel; } public String getOutputFormat() { @@ -31,4 +33,8 @@ public class BulkDataExportOptions { public Set getFilters() { return myFilters; } + + public boolean isSystemLevel() { + return mySystemLevel; + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobConfig.java index 4db16392b82..31cf3b93e7c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobConfig.java @@ -92,6 +92,17 @@ public class BulkExportJobConfig { .build(); } + @Bean + @Lazy + public Job patientBulkExportJob() { + return myJobBuilderFactory.get(BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME) + .validator(bulkJobParameterValidator()) + .start(createBulkExportEntityStep()) + .next(patientPartitionStep()) + .next(closeJobStep()) + .build(); + } + @Bean public GroupIdPresentValidator groupBulkJobParameterValidator() { return new GroupIdPresentValidator(); @@ -122,7 +133,7 @@ public class BulkExportJobConfig { .reader(groupBulkItemReader()) .processor(myPidToIBaseResourceProcessor) .writer(resourceToFileWriter()) - .listener(bulkExportGenrateResourceFilesStepListener()) + .listener(bulkExportGenerateResourceFilesStepListener()) .build(); } @@ -140,7 +151,7 @@ public class BulkExportJobConfig { .reader(bulkItemReader()) .processor(myPidToIBaseResourceProcessor) .writer(resourceToFileWriter()) - .listener(bulkExportGenrateResourceFilesStepListener()) + .listener(bulkExportGenerateResourceFilesStepListener()) .build(); } @@ -165,10 +176,17 @@ public class BulkExportJobConfig { @Bean @JobScope - public BulkExportGenerateResourceFilesStepListener bulkExportGenrateResourceFilesStepListener() { + public BulkExportGenerateResourceFilesStepListener bulkExportGenerateResourceFilesStepListener() { return new BulkExportGenerateResourceFilesStepListener(); } + @Bean + public Step partitionStep() { + return myStepBuilderFactory.get("partitionStep") + .partitioner("bulkExportGenerateResourceFilesStep", bulkExportResourceTypePartitioner()) + .step(bulkExportGenerateResourceFilesStep()) + .build(); + } @Bean public Step groupPartitionStep() { @@ -177,14 +195,32 @@ public class BulkExportJobConfig { .step(groupBulkExportGenerateResourceFilesStep()) .build(); } + @Bean - public Step partitionStep() { + public Step patientPartitionStep() { return myStepBuilderFactory.get("partitionStep") - .partitioner("bulkExportGenerateResourceFilesStep", bulkExportResourceTypePartitioner()) - .step(bulkExportGenerateResourceFilesStep()) + .partitioner("patientBulkExportGenerateResourceFilesStep", bulkExportResourceTypePartitioner()) + .step(patientBulkExportGenerateResourceFilesStep()) .build(); } + @Bean + public Step patientBulkExportGenerateResourceFilesStep() { + return myStepBuilderFactory.get("patientBulkExportGenerateResourceFilesStep") + ., List> chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time. + .reader(patientBulkItemReader()) + .processor(myPidToIBaseResourceProcessor) + .writer(resourceToFileWriter()) + .listener(bulkExportGenerateResourceFilesStepListener()) + .build(); + } + + @Bean + @StepScope + public PatientBulkItemReader patientBulkItemReader() { + return new PatientBulkItemReader(); + } + @Bean @StepScope public BulkItemReader bulkItemReader(){ diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java index 14be984a072..a1145c97609 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java @@ -49,6 +49,7 @@ import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.time.DateUtils; import org.hl7.fhir.instance.model.api.IBaseBinary; import org.hl7.fhir.instance.model.api.IIdType; +import org.hl7.fhir.r4.model.Group; import org.hl7.fhir.r4.model.InstantType; import org.quartz.JobExecutionContext; import org.slf4j.Logger; @@ -229,6 +230,8 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { if (isGroupBulkJob(theBulkExportJobEntity)) { enhanceBulkParametersWithGroupParameters(theBulkExportJobEntity, parameters); myJobSubmitter.runJob(myGroupBulkExportJob, parameters.toJobParameters()); + } else if (isPatientBulkJob(theBulkExportJobEntity)) { + myJobSubmitter.runJob(myPatientBulkExportJob, parameters.toJobParameters()); } else { myJobSubmitter.runJob(myBulkExportJob, parameters.toJobParameters()); } @@ -237,6 +240,10 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { } } + private boolean isPatientBulkJob(BulkExportJobEntity theBulkExportJobEntity) { + //TODO GGG + } + private void enhanceBulkParametersWithGroupParameters(BulkExportJobEntity theBulkExportJobEntity, JobParametersBuilder theParameters) { String theGroupId = getQueryParameterIfPresent(theBulkExportJobEntity.getRequest(), JpaConstants.PARAM_EXPORT_GROUP_ID); String expandMdm = getQueryParameterIfPresent(theBulkExportJobEntity.getRequest(), JpaConstants.PARAM_EXPORT_MDM); @@ -282,7 +289,14 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { // TODO GGG KS can we encode BulkDataExportOptions as a JSON string as opposed to this request string. Feels like it would be a more extensible encoding... //Probably yes, but this will all need to be rebuilt when we remove this bridge entity StringBuilder requestBuilder = new StringBuilder(); - requestBuilder.append("/").append(JpaConstants.OPERATION_EXPORT); + requestBuilder.append("/"); + + //Prefix the export url with Group/[id]/ + if (theBulkDataExportOptions instanceof GroupBulkDataExportOptions) { + requestBuilder.append(((GroupBulkDataExportOptions)theBulkDataExportOptions).getGroupId()).append("/"); + } + + requestBuilder.append(JpaConstants.OPERATION_EXPORT); requestBuilder.append("?").append(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT).append("=").append(escapeUrlParam(outputFormat)); Set resourceTypes = theBulkDataExportOptions.getResourceTypes(); if (resourceTypes != null) { @@ -300,6 +314,9 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_GROUP_ID).append("=").append(groupOptions.getGroupId().getValue()); requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_MDM).append("=").append(groupOptions.isMdm()); } + + requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_SYSTEM_LEVEL).append("=").append(theBulkDataExportOptions.isSystemLevel()); + String request = requestBuilder.toString(); Date cutoff = DateUtils.addMilliseconds(new Date(), -myReuseBulkExportForMillis); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java index 94ba91f5fd6..986cba83cfd 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java @@ -97,6 +97,10 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { @Qualifier(BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME) private Job myGroupBulkJob; + @Autowired + @Qualifier(BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME) + private Job myPatientBulkJob; + private IIdType myPatientGroupId; @Test @@ -562,6 +566,58 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { assertThat(nextContents, is(containsString("IMM8"))); } + @Test + public void testPatientLevelExportWorks() throws JobParametersInvalidException { + createResources(); + + // Create a bulk job + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Immunization", "Observation"), null, null, false)); + + GroupBulkExportJobParametersBuilder paramBuilder = new GroupBulkExportJobParametersBuilder(); + paramBuilder.setGroupId(myPatientGroupId.getIdPart()); + paramBuilder.setJobUUID(jobDetails.getJobId()); + paramBuilder.setReadChunkSize(10L); + + JobExecution jobExecution = myBatchJobSubmitter.runJob(myPatientBulkJob, paramBuilder.toJobParameters()); + + awaitJobCompletion(jobExecution); + IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); + + assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getFiles().size(), equalTo(1)); + assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); + + // Iterate over the files + Binary nextBinary = myBinaryDao.read(jobInfo.getFiles().get(0).getResourceId()); + assertEquals(Constants.CT_FHIR_NDJSON, nextBinary.getContentType()); + String nextContents = new String(nextBinary.getContent(), Constants.CHARSET_UTF8); + ourLog.info("Next contents for type {}:\n{}", nextBinary.getResourceType(), nextContents); + + assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); + assertThat(nextContents, is(containsString("IMM0"))); + assertThat(nextContents, is(containsString("IMM1"))); + assertThat(nextContents, is(containsString("IMM2"))); + assertThat(nextContents, is(containsString("IMM3"))); + assertThat(nextContents, is(containsString("IMM4"))); + assertThat(nextContents, is(containsString("IMM5"))); + assertThat(nextContents, is(containsString("IMM6"))); + assertThat(nextContents, is(containsString("IMM7"))); + assertThat(nextContents, is(containsString("IMM8"))); + assertThat(nextContents, is(containsString("IMM9"))); + assertThat(nextContents, is(containsString("IMM999"))); + + assertThat(nextContents, is(not(containsString("IMM2000")))); + assertThat(nextContents, is(not(containsString("IMM2001")))); + assertThat(nextContents, is(not(containsString("IMM2002")))); + assertThat(nextContents, is(not(containsString("IMM2003")))); + assertThat(nextContents, is(not(containsString("IMM2004")))); + assertThat(nextContents, is(not(containsString("IMM2005")))); + + } + + + } + // CareTeam has two patient references: participant and patient. This test checks if we find the patient if participant is null but patient is not null @Test public void testGroupBatchJobCareTeam() throws Exception { @@ -789,15 +845,24 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { Long goldenPid2 = myIdHelperService.getPidOrNull(g2Outcome.getResource()); //Create some nongroup patients MDM linked to a different golden resource. They shouldnt be included in the query. - for (int i = 1000; i < 1005; i++) { - DaoMethodOutcome patientOutcome = createPatientWithIndex(i); + for (int i = 0; i < 5; i++) { + int index = 1000 + i; + DaoMethodOutcome patientOutcome = createPatientWithIndex(index); IIdType patId = patientOutcome.getId().toUnqualifiedVersionless(); Long sourcePid = myIdHelperService.getPidOrNull(patientOutcome.getResource()); linkToGoldenResource(goldenPid2, sourcePid); - createObservationWithIndex(i, patId); - createImmunizationWithIndex(i, patId); - createCareTeamWithIndex(i, patId); + createObservationWithIndex(index, patId); + createImmunizationWithIndex(index, patId); + createCareTeamWithIndex(index, patId); } + + //Create some Observations and immunizations which have _no subjects!_ These will be exlucded from the Patient level export. + for (int i = 0; i < 10; i++) { + int index = 2000 + i; + createObservationWithIndex(index, null); + createImmunizationWithIndex(index, null); + } + } private DaoMethodOutcome createPatientWithIndex(int i) { @@ -820,7 +885,9 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { private void createImmunizationWithIndex(int i, IIdType patId) { Immunization immunization = new Immunization(); immunization.setId("IMM" + i); - immunization.setPatient(new Reference(patId)); + if (patId != null ) { + immunization.setPatient(new Reference(patId)); + } if (i % 2 == 0) { CodeableConcept cc = new CodeableConcept(); cc.addCoding().setSystem("vaccines").setCode("Flu"); @@ -838,7 +905,9 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { obs.setId("OBS" + i); obs.addIdentifier().setSystem("SYS").setValue("VAL" + i); obs.setStatus(Observation.ObservationStatus.FINAL); - obs.getSubject().setReference(patId.getValue()); + if (patId != null) { + obs.getSubject().setReference(patId.getValue()); + } myObservationDao.update(obs); } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java index 3cf09bb4efc..4b5d9ebb57f 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java @@ -194,6 +194,11 @@ public class JpaConstants { */ public static final String PARAM_EXPORT_MDM = "_mdm"; + /** + * Whether the Export is for System level or Patient Level. + */ + public static final String PARAM_EXPORT_SYSTEM_LEVEL = "_system"; + /** * Parameter for delete to indicate the deleted resources should also be expunged */ diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/config/TestJpaR4Config.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/config/TestJpaR4Config.java index f73e9ed2480..1716e099cad 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/config/TestJpaR4Config.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/config/TestJpaR4Config.java @@ -67,8 +67,7 @@ public class TestJpaR4Config extends BaseJavaConfigR4 { BasicDataSource retVal = new BasicDataSource(); retVal.setDriver(new org.h2.Driver()); -// retVal.setUrl("jdbc:h2:mem:testdb_r4"); - retVal.setUrl("jdbc:h2:file:./testdb_r4;create=true"); + retVal.setUrl("jdbc:h2:mem:testdb_r4"); retVal.setMaxWaitMillis(10000); retVal.setUsername(""); retVal.setPassword(""); From 2f1518344b28b5b5e50172067db3edf27c442e98 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Thu, 4 Mar 2021 16:53:05 -0500 Subject: [PATCH 02/22] Add changelogs, add patientbulkredaer implementation --- .../ca/uhn/fhir/util/SearchParameterUtil.java | 1 + .../5_4_0/2433-group-bulk-export-support.yaml | 5 ++ .../2445-support-patient-level-export.yaml | 4 + .../fhir/jpa/bulk/job/BaseBulkItemReader.java | 46 ++++++++++ .../jpa/bulk/job/GroupBulkItemReader.java | 49 +---------- .../jpa/bulk/job/PatientBulkItemReader.java | 88 +++++++++++++++++++ 6 files changed, 146 insertions(+), 47 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2433-group-bulk-export-support.yaml create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2445-support-patient-level-export.yaml create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/SearchParameterUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/SearchParameterUtil.java index f066d30fac6..75d1c7c4b2a 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/SearchParameterUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/SearchParameterUtil.java @@ -50,6 +50,7 @@ public class SearchParameterUtil { return retVal; } + @Nullable public static String getCode(FhirContext theContext, IBaseResource theResource) { return getStringChild(theContext, theResource, "code"); diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2433-group-bulk-export-support.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2433-group-bulk-export-support.yaml new file mode 100644 index 00000000000..4e040e2b258 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2433-group-bulk-export-support.yaml @@ -0,0 +1,5 @@ +--- +type: add +issue: 2433 +title: "Support has been added for MDM expansion during Group bulk export. Calling a group export via `/Group/123/$export?_mdm=true` + will cause Bulk Export to not only match group members, but also any MDM-matched patients, and their related golden record patients" diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2445-support-patient-level-export.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2445-support-patient-level-export.yaml new file mode 100644 index 00000000000..ff509c01f0d --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2445-support-patient-level-export.yaml @@ -0,0 +1,4 @@ +--- +type: add +issue: 2445 +title: "Support has been added for patient type level Bulk export. This can be done via the `/Patient/$export` endpoint." diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java index 29e6fcf5cbe..49b8f8495db 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java @@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.bulk.job; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.RuntimeResourceDefinition; +import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.batch.log.Logs; @@ -53,6 +54,7 @@ public abstract class BaseBulkItemReader implements ItemReader myPidIterator; + private RuntimeSearchParam myPatientSearchParam; /** * Get and cache an ISearchBuilder for the given resource type this partition is responsible for. @@ -137,4 +139,48 @@ public abstract class BaseBulkItemReader implements ItemReader1 result, throw an error + * 3.2 If that returns 1 result, return it + */ + protected RuntimeSearchParam getPatientSearchParamForCurrentResourceType() { + if (myPatientSearchParam == null) { + RuntimeResourceDefinition runtimeResourceDefinition = myContext.getResourceDefinition(myResourceType); + myPatientSearchParam = runtimeResourceDefinition.getSearchParam("patient"); + if (myPatientSearchParam == null) { + myPatientSearchParam = runtimeResourceDefinition.getSearchParam("subject"); + if (myPatientSearchParam == null) { + myPatientSearchParam = getRuntimeSearchParamByCompartment(runtimeResourceDefinition); + if (myPatientSearchParam == null) { + String errorMessage = String.format("[%s] has no search parameters that are for patients, so it is invalid for Group Bulk Export!", myResourceType); + throw new IllegalArgumentException(errorMessage); + } + } + } + } + return myPatientSearchParam; + } + + /** + * Search the resource definition for a compartment named 'patient' and return its related Search Parameter. + */ + protected RuntimeSearchParam getRuntimeSearchParamByCompartment(RuntimeResourceDefinition runtimeResourceDefinition) { + RuntimeSearchParam patientSearchParam; + List searchParams = runtimeResourceDefinition.getSearchParamsForCompartmentName("Patient"); + if (searchParams == null || searchParams.size() == 0) { + String errorMessage = String.format("Resource type [%s] is not eligible for Group Bulk export, as it contains no Patient compartment, and no `patient` or `subject` search parameter", myResourceType); + throw new IllegalArgumentException(errorMessage); + } else if (searchParams.size() == 1) { + patientSearchParam = searchParams.get(0); + } else { + String errorMessage = String.format("Resource type [%s] is not eligible for Group Bulk export, as we are unable to disambiguate which patient search parameter we should be searching by.", myResourceType); + throw new IllegalArgumentException(errorMessage); + } + return patientSearchParam; + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkItemReader.java index f57729cdff7..482d3f773ee 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkItemReader.java @@ -173,7 +173,6 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade return expandedIds; } - private void queryResourceTypeWithReferencesToPatients(List myReadPids, List idChunk) { //Build SP map //First, inject the _typeFilters and _since from the export job @@ -198,58 +197,14 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade private void filterSearchByResourceIds(List idChunk, SearchParameterMap expandedSpMap) { ReferenceOrListParam orList = new ReferenceOrListParam(); idChunk.forEach(id -> orList.add(new ReferenceParam(id))); - expandedSpMap.add(getPatientSearchParam().getName(), orList); + expandedSpMap.add(getPatientSearchParamForCurrentResourceType().getName(), orList); } private RuntimeSearchParam validateSearchParameters(SearchParameterMap expandedSpMap) { - RuntimeSearchParam runtimeSearchParam = getPatientSearchParam(); + RuntimeSearchParam runtimeSearchParam = getPatientSearchParamForCurrentResourceType(); if (expandedSpMap.get(runtimeSearchParam.getName()) != null) { throw new IllegalArgumentException(String.format("Group Bulk Export manually modifies the Search Parameter called [%s], so you may not include this search parameter in your _typeFilter!", runtimeSearchParam.getName())); } return runtimeSearchParam; } - - /** - * Given the resource type, fetch its patient-based search parameter name - * 1. Attempt to find one called 'patient' - * 2. If that fails, find one called 'subject' - * 3. If that fails, find find by Patient Compartment. - * 3.1 If that returns >1 result, throw an error - * 3.2 If that returns 1 result, return it - */ - private RuntimeSearchParam getPatientSearchParam() { - if (myPatientSearchParam == null) { - RuntimeResourceDefinition runtimeResourceDefinition = myContext.getResourceDefinition(myResourceType); - myPatientSearchParam = runtimeResourceDefinition.getSearchParam("patient"); - if (myPatientSearchParam == null) { - myPatientSearchParam = runtimeResourceDefinition.getSearchParam("subject"); - if (myPatientSearchParam == null) { - myPatientSearchParam = getRuntimeSearchParamByCompartment(runtimeResourceDefinition); - if (myPatientSearchParam == null) { - String errorMessage = String.format("[%s] has no search parameters that are for patients, so it is invalid for Group Bulk Export!", myResourceType); - throw new IllegalArgumentException(errorMessage); - } - } - } - } - return myPatientSearchParam; - } - - /** - * Search the resource definition for a compartment named 'patient' and return its related Search Parameter. - */ - private RuntimeSearchParam getRuntimeSearchParamByCompartment(RuntimeResourceDefinition runtimeResourceDefinition) { - RuntimeSearchParam patientSearchParam; - List searchParams = runtimeResourceDefinition.getSearchParamsForCompartmentName("Patient"); - if (searchParams == null || searchParams.size() == 0) { - String errorMessage = String.format("Resource type [%s] is not eligible for Group Bulk export, as it contains no Patient compartment, and no `patient` or `subject` search parameter", myResourceType); - throw new IllegalArgumentException(errorMessage); - } else if (searchParams.size() == 1) { - patientSearchParam = searchParams.get(0); - } else { - String errorMessage = String.format("Resource type [%s] is not eligible for Group Bulk export, as we are unable to disambiguate which patient search parameter we should be searching by.", myResourceType); - throw new IllegalArgumentException(errorMessage); - } - return patientSearchParam; - } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java new file mode 100644 index 00000000000..46c72ac6200 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java @@ -0,0 +1,88 @@ +package ca.uhn.fhir.jpa.bulk.job; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.context.RuntimeResourceDefinition; +import ca.uhn.fhir.context.RuntimeSearchParam; +import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.jpa.batch.log.Logs; +import ca.uhn.fhir.jpa.dao.IResultIterator; +import ca.uhn.fhir.jpa.dao.ISearchBuilder; +import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; +import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; +import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; +import ca.uhn.fhir.rest.param.StringParam; +import org.slf4j.Logger; +import org.springframework.batch.item.ItemReader; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; + +/** + * Bulk Item reader for the Patient Bulk Export job. + * Instead of performing a normal query on the resource type using type filters, we instead + * + * 1. Determine the resourcetype + * 2. Search for anything that has `patient-compartment-search-param:missing=false` + */ +public class PatientBulkItemReader extends BaseBulkItemReader implements ItemReader> { + private static final Logger ourLog = Logs.getBatchTroubleshootingLog(); + + + private RuntimeSearchParam validateSearchParameters(SearchParameterMap expandedSpMap) { + RuntimeSearchParam runtimeSearchParam = getPatientSearchParamForCurrentResourceType(); + if (expandedSpMap.get(runtimeSearchParam.getName()) != null) { + throw new IllegalArgumentException(String.format("Group Bulk Export manually modifies the Search Parameter called [%s], so you may not include this search parameter in your _typeFilter!", runtimeSearchParam.getName())); + } + return runtimeSearchParam; + } + @Override + Iterator getResourcePidIterator() { + List myReadPids = new ArrayList<>(); + + //use _typeFilter and _since and all those fancy bits and bobs to generate our basic SP map. + SearchParameterMap map = createSearchParameterMapForJob(); + + String patientSearchParam = getPatientSearchParamForCurrentResourceType().getName(); + + //Ensure users did not monkey with the patient compartment search parameter. + validateSearchParameters(map); + + //Skip adding the parameter querying for patient= if we are in fact querying the patient resource type. + if (!myResourceType.equalsIgnoreCase("Patient")) { + //Now that we have our basic built Bulk Export SP map, we inject the condition that the resources returned + //must have a patient= or subject= reference set. + map.add(patientSearchParam, new StringParam().setMissing(false)); + } + + ISearchBuilder sb = getSearchBuilderForLocalResourceType(); + IResultIterator myResultIterator = sb.createQuery(map, new SearchRuntimeDetails(null, myJobUUID), null, RequestPartitionId.allPartitions()); + + while (myResultIterator.hasNext()) { + myReadPids.add(myResultIterator.next()); + } + + return myReadPids.iterator(); + } + + +} From f5372c9f073de5e28986bbe8c656c3dff254be92 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Thu, 4 Mar 2021 16:53:52 -0500 Subject: [PATCH 03/22] Fix typo --- .../fhir/changelog/5_4_0/2445-support-patient-level-export.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2445-support-patient-level-export.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2445-support-patient-level-export.yaml index ff509c01f0d..2e07e60402a 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2445-support-patient-level-export.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2445-support-patient-level-export.yaml @@ -1,4 +1,4 @@ --- type: add issue: 2445 -title: "Support has been added for patient type level Bulk export. This can be done via the `/Patient/$export` endpoint." +title: "Support has been added for patient level Bulk export. This can be done via the `/Patient/$export` endpoint." From 954704884886e1153ff69f0b2e61110c50c7cd4b Mon Sep 17 00:00:00 2001 From: Tadgh Date: Thu, 4 Mar 2021 20:16:50 -0500 Subject: [PATCH 04/22] Add support for Patient export --- .../uhn/fhir/jpa/batch/BatchJobsConfig.java | 2 +- .../bulk/api/GroupBulkDataExportOptions.java | 2 +- .../jpa/bulk/job/BulkExportJobConfig.java | 34 +++++++++---------- .../job/CreateBulkExportEntityTasklet.java | 2 +- .../jpa/bulk/job/PatientBulkItemReader.java | 3 +- .../bulk/provider/BulkDataExportProvider.java | 4 +-- .../jpa/bulk/svc/BulkDataExportSvcImpl.java | 5 +++ .../jpa/bulk/BulkDataExportSvcImplR4Test.java | 33 ++++++++---------- 8 files changed, 44 insertions(+), 41 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/BatchJobsConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/BatchJobsConfig.java index 5cb70419561..178ee7358d5 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/BatchJobsConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/BatchJobsConfig.java @@ -33,5 +33,5 @@ import org.springframework.context.annotation.Import; public class BatchJobsConfig { public static final String BULK_EXPORT_JOB_NAME = "bulkExportJob"; public static final String GROUP_BULK_EXPORT_JOB_NAME = "groupBulkExportJob"; - public static final String PATIENT_BULK_EXPORT_JOB_NAME = "patientBulkExport"; + public static final String PATIENT_BULK_EXPORT_JOB_NAME = "patientBulkExportJob"; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/GroupBulkDataExportOptions.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/GroupBulkDataExportOptions.java index fb80bf2843f..2718f4e461c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/GroupBulkDataExportOptions.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/GroupBulkDataExportOptions.java @@ -10,7 +10,7 @@ public class GroupBulkDataExportOptions extends BulkDataExportOptions { private final boolean myMdm; public GroupBulkDataExportOptions(String theOutputFormat, Set theResourceTypes, Date theSince, Set theFilters, IIdType theGroupId, boolean theMdm) { - super(theOutputFormat, theResourceTypes, theSince, theFilters); + super(theOutputFormat, theResourceTypes, theSince, theFilters, false); myGroupId = theGroupId; myMdm = theMdm; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobConfig.java index 31cf3b93e7c..a2b8d804e3b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobConfig.java @@ -95,7 +95,7 @@ public class BulkExportJobConfig { @Bean @Lazy public Job patientBulkExportJob() { - return myJobBuilderFactory.get(BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME) + return myJobBuilderFactory.get(BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME) .validator(bulkJobParameterValidator()) .start(createBulkExportEntityStep()) .next(patientPartitionStep()) @@ -126,6 +126,7 @@ public class BulkExportJobConfig { return new BulkExportJobParameterValidator(); } + //Writers @Bean public Step groupBulkExportGenerateResourceFilesStep() { return myStepBuilderFactory.get("groupBulkExportGenerateResourceFilesStep") @@ -137,13 +138,6 @@ public class BulkExportJobConfig { .build(); } - @Bean - @StepScope - public GroupBulkItemReader groupBulkItemReader(){ - return new GroupBulkItemReader(); - } - - @Bean public Step bulkExportGenerateResourceFilesStep() { return myStepBuilderFactory.get("bulkExportGenerateResourceFilesStep") @@ -154,6 +148,16 @@ public class BulkExportJobConfig { .listener(bulkExportGenerateResourceFilesStepListener()) .build(); } + @Bean + public Step patientBulkExportGenerateResourceFilesStep() { + return myStepBuilderFactory.get("patientBulkExportGenerateResourceFilesStep") + ., List> chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time. + .reader(patientBulkItemReader()) + .processor(myPidToIBaseResourceProcessor) + .writer(resourceToFileWriter()) + .listener(bulkExportGenerateResourceFilesStepListener()) + .build(); + } @Bean @JobScope @@ -204,15 +208,11 @@ public class BulkExportJobConfig { .build(); } + @Bean - public Step patientBulkExportGenerateResourceFilesStep() { - return myStepBuilderFactory.get("patientBulkExportGenerateResourceFilesStep") - ., List> chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time. - .reader(patientBulkItemReader()) - .processor(myPidToIBaseResourceProcessor) - .writer(resourceToFileWriter()) - .listener(bulkExportGenerateResourceFilesStepListener()) - .build(); + @StepScope + public GroupBulkItemReader groupBulkItemReader(){ + return new GroupBulkItemReader(); } @Bean @@ -235,7 +235,7 @@ public class BulkExportJobConfig { @Bean @StepScope - public ItemWriter> resourceToFileWriter() { + public ResourceToFileWriter resourceToFileWriter() { return new ResourceToFileWriter(); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/CreateBulkExportEntityTasklet.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/CreateBulkExportEntityTasklet.java index 91df88ef7cf..b6466e7532c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/CreateBulkExportEntityTasklet.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/CreateBulkExportEntityTasklet.java @@ -66,7 +66,7 @@ public class CreateBulkExportEntityTasklet implements Tasklet { outputFormat = Constants.CT_FHIR_NDJSON; } - IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(outputFormat, resourceTypeSet, since, filterSet)); + IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(outputFormat, resourceTypeSet, since, filterSet, true)); addUUIDToJobContext(theChunkContext, jobInfo.getJobId()); return RepeatStatus.FINISHED; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java index 46c72ac6200..c470b7f77c4 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java @@ -29,6 +29,7 @@ import ca.uhn.fhir.jpa.dao.ISearchBuilder; import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; +import ca.uhn.fhir.rest.param.ReferenceParam; import ca.uhn.fhir.rest.param.StringParam; import org.slf4j.Logger; import org.springframework.batch.item.ItemReader; @@ -71,7 +72,7 @@ public class PatientBulkItemReader extends BaseBulkItemReader implements ItemRea if (!myResourceType.equalsIgnoreCase("Patient")) { //Now that we have our basic built Bulk Export SP map, we inject the condition that the resources returned //must have a patient= or subject= reference set. - map.add(patientSearchParam, new StringParam().setMissing(false)); + map.add(patientSearchParam, new ReferenceParam().setMissing(true)); } ISearchBuilder sb = getSearchBuilderForLocalResourceType(); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java index e251bcd31d1..df3fcf2aeb9 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java @@ -75,7 +75,7 @@ public class BulkDataExportProvider { @OperationParam(name = JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, min = 0, max = 1, typeName = "string") IPrimitiveType theOutputFormat, @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE, min = 0, max = 1, typeName = "string") IPrimitiveType theType, @OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType theSince, - @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = 1, typeName = "string") IPrimitiveType theTypeFilter, + @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") IPrimitiveType theTypeFilter, ServletRequestDetails theRequestDetails ) { @@ -102,7 +102,7 @@ public class BulkDataExportProvider { filters = ArrayUtil.commaSeparatedListToCleanSet(theTypeFilter.getValueAsString()); } - IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(outputFormat, resourceTypes, since, filters)); + IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(outputFormat, resourceTypes, since, filters, true)); String serverBase = getServerBase(theRequestDetails); String pollLocation = serverBase + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + outcome.getJobId(); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java index a1145c97609..d2f989814bb 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java @@ -114,6 +114,10 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { @Qualifier(BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME) private org.springframework.batch.core.Job myGroupBulkExportJob; + @Autowired + @Qualifier(BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME) + private org.springframework.batch.core.Job myPatientBulkExportJob; + private final int myRetentionPeriod = (int) (2 * DateUtils.MILLIS_PER_HOUR); /** @@ -242,6 +246,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { private boolean isPatientBulkJob(BulkExportJobEntity theBulkExportJobEntity) { //TODO GGG + return true; } private void enhanceBulkParametersWithGroupParameters(BulkExportJobEntity theBulkExportJobEntity, JobParametersBuilder theParameters) { diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java index 986cba83cfd..9dbc82b709c 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java @@ -159,7 +159,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { @Test public void testSubmit_InvalidOutputFormat() { try { - myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_JSON_NEW, Sets.newHashSet("Patient", "Observation"), null, null)); + myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_JSON_NEW, Sets.newHashSet("Patient", "Observation"), null, null, true)); fail(); } catch (InvalidRequestException e) { assertEquals("Invalid output format: application/fhir+json", e.getMessage()); @@ -169,7 +169,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { @Test public void testSubmit_OnlyBinarySelected() { try { - myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_JSON_NEW, Sets.newHashSet("Binary"), null, null)); + myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_JSON_NEW, Sets.newHashSet("Binary"), null, null, true)); fail(); } catch (InvalidRequestException e) { assertEquals("Invalid output format: application/fhir+json", e.getMessage()); @@ -179,7 +179,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { @Test public void testSubmit_InvalidResourceTypes() { try { - myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient", "FOO"), null, null)); + myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient", "FOO"), null, null, true)); fail(); } catch (InvalidRequestException e) { assertEquals("Unknown or unsupported resource type: FOO", e.getMessage()); @@ -189,7 +189,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { @Test public void testSubmit_MultipleTypeFiltersForSameType() { try { - myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient"), null, Sets.newHashSet("Patient?name=a", "Patient?active=true"))); + myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient"), null, Sets.newHashSet("Patient?name=a", "Patient?active=true"), true)); fail(); } catch (InvalidRequestException e) { assertEquals("Invalid _typeFilter value \"Patient?name=a\". Multiple filters found for type Patient", e.getMessage()); @@ -199,7 +199,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { @Test public void testSubmit_TypeFilterForNonSelectedType() { try { - myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient"), null, Sets.newHashSet("Observation?code=123"))); + myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient"), null, Sets.newHashSet("Observation?code=123"), true)); fail(); } catch (InvalidRequestException e) { assertEquals("Invalid _typeFilter value \"Observation?code=123\". Resource type does not appear in _type list", e.getMessage()); @@ -209,7 +209,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { @Test public void testSubmit_TypeFilterInvalid() { try { - myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient"), null, Sets.newHashSet("Hello"))); + myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient"), null, Sets.newHashSet("Hello"), true)); fail(); } catch (InvalidRequestException e) { assertEquals("Invalid _typeFilter value \"Hello\". Must be in the form [ResourceType]?[params]", e.getMessage()); @@ -220,11 +220,11 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { public void testSubmit_ReusesExisting() { // Submit - IBulkDataExportSvc.JobInfo jobDetails1 = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient", "Observation"), null, null)); + IBulkDataExportSvc.JobInfo jobDetails1 = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient", "Observation"), null, null, true)); assertNotNull(jobDetails1.getJobId()); // Submit again - IBulkDataExportSvc.JobInfo jobDetails2 = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient", "Observation"), null, null)); + IBulkDataExportSvc.JobInfo jobDetails2 = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient", "Observation"), null, null, true)); assertNotNull(jobDetails2.getJobId()); assertEquals(jobDetails1.getJobId(), jobDetails2.getJobId()); @@ -245,7 +245,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { createResources(); // Create a bulk job - IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient"), null, null)); + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient"), null, null, true)); assertNotNull(jobDetails.getJobId()); // Check the status @@ -275,7 +275,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { createResources(); // Create a bulk job - IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient", "Observation"), null, Sets.newHashSet(TEST_FILTER))); + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient", "Observation"), null, Sets.newHashSet(TEST_FILTER), true)); assertNotNull(jobDetails.getJobId()); // Check the status @@ -328,7 +328,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { myBinaryDao.create(b); // Create a bulk job - IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, null, null, null)); + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, null, null, null, true)); assertNotNull(jobDetails.getJobId()); // Check the status @@ -382,7 +382,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { // Create a bulk job HashSet types = Sets.newHashSet("Patient"); Set typeFilters = Sets.newHashSet("Patient?_has:Observation:patient:identifier=SYS|VAL3"); - IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, types, null, typeFilters)); + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, types, null, typeFilters, true)); assertNotNull(jobDetails.getJobId()); // Check the status @@ -434,7 +434,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { } // Create a bulk job - IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient", "Observation"), cutoff.getValue(), null)); + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient", "Observation"), cutoff.getValue(), null, true)); assertNotNull(jobDetails.getJobId()); // Check the status @@ -513,7 +513,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { createResources(); // Create a bulk job - IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient", "Observation"), null, null)); + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient", "Observation"), null, null, true)); //Add the UUID to the job BulkExportJobParametersBuilder paramBuilder = new BulkExportJobParametersBuilder() @@ -584,7 +584,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); - assertThat(jobInfo.getFiles().size(), equalTo(1)); + assertThat(jobInfo.getFiles().size(), equalTo(2)); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); // Iterate over the files @@ -615,9 +615,6 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { } - - } - // CareTeam has two patient references: participant and patient. This test checks if we find the patient if participant is null but patient is not null @Test public void testGroupBatchJobCareTeam() throws Exception { From 5b75ff2425b745191e45ae1711bb6a2d9ed853d7 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Fri, 5 Mar 2021 10:35:52 -0500 Subject: [PATCH 05/22] WIP working out bug in typefilters --- .../fhir/jpa/bulk/job/BaseBulkItemReader.java | 1 + .../bulk/provider/BulkDataExportProvider.java | 8 +- .../jpa/bulk/svc/BulkDataExportSvcImpl.java | 3 +- .../jpa/bulk/BulkDataExportProviderTest.java | 77 ++++++++++++++++++- 4 files changed, 85 insertions(+), 4 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java index 49b8f8495db..b759571561f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java @@ -87,6 +87,7 @@ public abstract class BaseBulkItemReader implements ItemReader requestUrl = UrlUtil.parseQueryStrings(jobEntity.getRequest()); String[] typeFilters = requestUrl.get(JpaConstants.PARAM_EXPORT_TYPE_FILTER); if (typeFilters != null) { + //TODO GGG START HERE Optional filter = Arrays.stream(typeFilters).filter(t -> t.startsWith(myResourceType + "?")).findFirst(); if (filter.isPresent()) { String matchUrl = filter.get(); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java index df3fcf2aeb9..39a624c4ba1 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java @@ -48,6 +48,8 @@ import org.springframework.beans.factory.annotation.Autowired; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.util.Date; +import java.util.HashSet; +import java.util.List; import java.util.Set; public class BulkDataExportProvider { @@ -75,7 +77,7 @@ public class BulkDataExportProvider { @OperationParam(name = JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, min = 0, max = 1, typeName = "string") IPrimitiveType theOutputFormat, @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE, min = 0, max = 1, typeName = "string") IPrimitiveType theType, @OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType theSince, - @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") IPrimitiveType theTypeFilter, + @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = 1, typeName = "string") IPrimitiveType theTypeFilter, ServletRequestDetails theRequestDetails ) { @@ -97,9 +99,11 @@ public class BulkDataExportProvider { since = theSince.getValue(); } - Set filters = null; + Set filters = new HashSet<>(); if (theTypeFilter != null) { +// theTypeFilter.stream().forEach(filter -> filters.addAll(ArrayUtil.commaSeparatedListToCleanSet(filter.getValueAsString()))); filters = ArrayUtil.commaSeparatedListToCleanSet(theTypeFilter.getValueAsString()); +// theTypeFilter.stream().forEach(filter -> filters.addAll(ArrayUtil.commaSeparatedListToCleanSet(filter.getValueAsString()))); } IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(outputFormat, resourceTypes, since, filters, true)); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java index d2f989814bb..55d135412cf 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java @@ -312,7 +312,8 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_SINCE).append("=").append(new InstantType(since).setTimeZoneZulu(true).getValueAsString()); } if (theBulkDataExportOptions.getFilters() != null && theBulkDataExportOptions.getFilters().size() > 0) { - requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_TYPE_FILTER).append("=").append(String.join(",", escapeUrlParams(theBulkDataExportOptions.getFilters()))); + theBulkDataExportOptions.getFilters().stream() + .forEach(filter -> requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_TYPE_FILTER).append("=").append(escapeUrlParam(filter))); } if (theBulkDataExportOptions instanceof GroupBulkDataExportOptions) { GroupBulkDataExportOptions groupOptions = (GroupBulkDataExportOptions) theBulkDataExportOptions; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java index 45ea95c7afb..4fc4bc2091c 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java @@ -267,7 +267,6 @@ public class BulkDataExportProviderTest { assertEquals("Patient", responseJson.getOutput().get(2).getType()); assertEquals("http://localhost:" + myPort + "/Binary/333", responseJson.getOutput().get(2).getUrl()); } - } @Test @@ -339,4 +338,80 @@ public class BulkDataExportProviderTest { assertEquals(GROUP_ID, options.getGroupId().getValue()); assertThat(options.isMdm(), is(equalTo(true))); } + + @Test + public void testInitiateWithGetAndMultipleTypeFilters() throws IOException { + //TODO GGG FIX ME + IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() + .setJobId(A_JOB_ID); + when(myBulkDataExportSvc.submitJob(any())).thenReturn(jobInfo); + + InstantType now = InstantType.now(); + + String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT + + "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON) + + "&" + JpaConstants.PARAM_EXPORT_TYPE + "=" + UrlUtil.escapeUrlParam("Patient, Practitioner") + + "&" + JpaConstants.PARAM_EXPORT_SINCE + "=" + UrlUtil.escapeUrlParam(now.getValueAsString()) +// + "&" + JpaConstants.PARAM_EXPORT_TYPE_FILTER + "=" + UrlUtil.escapeUrlParam("Patient?identifier=foo") +// + "&" + JpaConstants.PARAM_EXPORT_TYPE_FILTER + "=" + UrlUtil.escapeUrlParam("Practitioner?identifier=bar") +// + "&" + JpaConstants.PARAM_EXPORT_TYPE_FILTER + "=" + UrlUtil.escapeUrlParam("Patient?name=zoop"); + + "&" + JpaConstants.PARAM_EXPORT_TYPE_FILTER + "=" + // "MedicationRequest%3Fstatus%3Dactive,MedicationRequest%3Fstatus%3Dcompleted%26date%3Dgt2018-07-01T00%3A00%3A00Z"; + "Immunization?patient.identifier%3DSC378274-MRN%7C009999997%2CSC378274-MRN%7C009999998%2CSC378274-MRN%7C009999999%26date%3D2020-01-02"; + + HttpGet get = new HttpGet(url); + get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); + ourLog.info("Request: {}", url); + try (CloseableHttpResponse response = myClient.execute(get)) { + ourLog.info("Response: {}", response.toString()); + + assertEquals(202, response.getStatusLine().getStatusCode()); + assertEquals("Accepted", response.getStatusLine().getReasonPhrase()); + assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); + } + + verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture()); + BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue(); + assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat()); + assertThat(options.getResourceTypes(), containsInAnyOrder("Patient", "Practitioner")); + assertThat(options.getSince(), notNullValue()); + assertThat(options.getFilters(), containsInAnyOrder("Patient?identifier=foo", "Practitioner?identifier=bar", "Patient?name=zoop")); + } + + @Test + public void testInitiateWithPostAndMultipleTypeFilters() throws IOException { + + IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() + .setJobId(A_JOB_ID); + when(myBulkDataExportSvc.submitJob(any())).thenReturn(jobInfo); + + InstantType now = InstantType.now(); + + Parameters input = new Parameters(); + input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON)); + input.addParameter(JpaConstants.PARAM_EXPORT_TYPE, new StringType("Patient, Practitioner")); + input.addParameter(JpaConstants.PARAM_EXPORT_SINCE, now); + input.addParameter(JpaConstants.PARAM_EXPORT_TYPE_FILTER, new StringType("Patient?identifier=foo")); + + ourLog.info(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input)); + + HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT); + post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); + post.setEntity(new ResourceEntity(myCtx, input)); + ourLog.info("Request: {}", post); + try (CloseableHttpResponse response = myClient.execute(post)) { + ourLog.info("Response: {}", response.toString()); + + assertEquals(202, response.getStatusLine().getStatusCode()); + assertEquals("Accepted", response.getStatusLine().getReasonPhrase()); + assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); + } + + verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture()); + BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue(); + assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat()); + assertThat(options.getResourceTypes(), containsInAnyOrder("Patient", "Practitioner")); + assertThat(options.getSince(), notNullValue()); + assertThat(options.getFilters(), containsInAnyOrder("Patient?identifier=foo")); + } + } From ccf57986345d307f97b22b7bc96909f54a5c1124 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Fri, 5 Mar 2021 14:20:15 -0500 Subject: [PATCH 06/22] Fix bug with ignored typeFilters --- .../bulk/provider/BulkDataExportProvider.java | 38 ++++++++++-------- .../jpa/bulk/BulkDataExportProviderTest.java | 39 ++++++++++--------- 2 files changed, 41 insertions(+), 36 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java index 39a624c4ba1..c2f87b60775 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java @@ -47,12 +47,14 @@ import org.springframework.beans.factory.annotation.Autowired; import javax.servlet.http.HttpServletResponse; import java.io.IOException; +import java.util.Arrays; import java.util.Date; import java.util.HashSet; -import java.util.List; import java.util.Set; + public class BulkDataExportProvider { + public static final String FARM_TO_TABLE_TYPE_FILTER_REGEX = "(?:,)(?=[A-Z][a-z]+\\?)"; @Autowired private IBulkDataExportSvc myBulkDataExportSvc; @@ -80,7 +82,6 @@ public class BulkDataExportProvider { @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = 1, typeName = "string") IPrimitiveType theTypeFilter, ServletRequestDetails theRequestDetails ) { - String preferHeader = theRequestDetails.getHeader(Constants.HEADER_PREFER); PreferHeader prefer = RestfulServerUtils.parsePreferHeader(null, preferHeader); if (prefer.getRespondAsync() == false) { @@ -98,15 +99,9 @@ public class BulkDataExportProvider { if (theSince != null) { since = theSince.getValue(); } + Set typeFilters = splitTypeFilters(theTypeFilter); - Set filters = new HashSet<>(); - if (theTypeFilter != null) { -// theTypeFilter.stream().forEach(filter -> filters.addAll(ArrayUtil.commaSeparatedListToCleanSet(filter.getValueAsString()))); - filters = ArrayUtil.commaSeparatedListToCleanSet(theTypeFilter.getValueAsString()); -// theTypeFilter.stream().forEach(filter -> filters.addAll(ArrayUtil.commaSeparatedListToCleanSet(filter.getValueAsString()))); - } - - IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(outputFormat, resourceTypes, since, filters, true)); + IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(outputFormat, resourceTypes, since, typeFilters, true)); String serverBase = getServerBase(theRequestDetails); String pollLocation = serverBase + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + outcome.getJobId(); @@ -121,6 +116,19 @@ public class BulkDataExportProvider { response.setStatus(Constants.STATUS_HTTP_202_ACCEPTED); } + private Set splitTypeFilters(IPrimitiveType theTypeFilter) { + if (theTypeFilter== null) { + return null; + } + String typeFilterSring = theTypeFilter.getValueAsString(); + String[] typeFilters = typeFilterSring.split(FARM_TO_TABLE_TYPE_FILTER_REGEX); + if (typeFilters == null || typeFilters.length == 0) { + return null; + } + + return new HashSet<>(Arrays.asList(typeFilters)); + } + /** * $export-poll-status */ @@ -213,9 +221,6 @@ public class BulkDataExportProvider { resourceTypes = ArrayUtil.commaSeparatedListToCleanSet(theType.getValueAsString()); } - //TODO GGG eventually, we will support these things. - Set filters = null; - Date since = null; if (theSince != null) { since = theSince.getValue(); @@ -225,10 +230,9 @@ public class BulkDataExportProvider { if (theMdm != null) { mdm = theMdm.getValue(); } - if (theTypeFilter != null) { - filters = ArrayUtil.commaSeparatedListToCleanSet(theTypeFilter.getValueAsString()); - } - IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(new GroupBulkDataExportOptions(outputFormat, resourceTypes, since, filters, theIdParam, mdm)); + Set typeFilters = splitTypeFilters(theTypeFilter); + + IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(new GroupBulkDataExportOptions(outputFormat, resourceTypes, since, typeFilters, theIdParam, mdm)); String serverBase = getServerBase(theRequestDetails); String pollLocation = serverBase + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + outcome.getJobId(); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java index 4fc4bc2091c..484c2da955f 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java @@ -350,31 +350,32 @@ public class BulkDataExportProviderTest { String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT + "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON) - + "&" + JpaConstants.PARAM_EXPORT_TYPE + "=" + UrlUtil.escapeUrlParam("Patient, Practitioner") - + "&" + JpaConstants.PARAM_EXPORT_SINCE + "=" + UrlUtil.escapeUrlParam(now.getValueAsString()) -// + "&" + JpaConstants.PARAM_EXPORT_TYPE_FILTER + "=" + UrlUtil.escapeUrlParam("Patient?identifier=foo") -// + "&" + JpaConstants.PARAM_EXPORT_TYPE_FILTER + "=" + UrlUtil.escapeUrlParam("Practitioner?identifier=bar") -// + "&" + JpaConstants.PARAM_EXPORT_TYPE_FILTER + "=" + UrlUtil.escapeUrlParam("Patient?name=zoop"); - + "&" + JpaConstants.PARAM_EXPORT_TYPE_FILTER + "=" + // "MedicationRequest%3Fstatus%3Dactive,MedicationRequest%3Fstatus%3Dcompleted%26date%3Dgt2018-07-01T00%3A00%3A00Z"; - "Immunization?patient.identifier%3DSC378274-MRN%7C009999997%2CSC378274-MRN%7C009999998%2CSC378274-MRN%7C009999999%26date%3D2020-01-02"; + + "&" + JpaConstants.PARAM_EXPORT_TYPE + "=" + UrlUtil.escapeUrlParam("Immunization, Observation") + + "&" + JpaConstants.PARAM_EXPORT_SINCE + "=" + UrlUtil.escapeUrlParam(now.getValueAsString()); + + String immunizationTypeFilter1 = "Immunization?patient.identifier=SC378274-MRN|009999997,SC378274-MRN|009999998,SC378274-MRN|009999999&date=2020-01-02"; + String immunizationTypeFilter2 = "Immunization?patient=Patient/123"; + String observationFilter1 = "Observation?subject=Patient/123&created=ge2020-01-01"; + StringBuilder multiValuedTypeFilterBuilder = new StringBuilder() + .append("&") + .append(JpaConstants.PARAM_EXPORT_TYPE_FILTER) + .append("=") + .append(UrlUtil.escapeUrlParam(immunizationTypeFilter1)) + .append(",") + .append(UrlUtil.escapeUrlParam(immunizationTypeFilter2)) + .append(",") + .append(UrlUtil.escapeUrlParam(observationFilter1)); + + url += multiValuedTypeFilterBuilder.toString(); HttpGet get = new HttpGet(url); get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); - ourLog.info("Request: {}", url); - try (CloseableHttpResponse response = myClient.execute(get)) { - ourLog.info("Response: {}", response.toString()); - - assertEquals(202, response.getStatusLine().getStatusCode()); - assertEquals("Accepted", response.getStatusLine().getReasonPhrase()); - assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); - } + myClient.execute(get); verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture()); BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue(); - assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat()); - assertThat(options.getResourceTypes(), containsInAnyOrder("Patient", "Practitioner")); - assertThat(options.getSince(), notNullValue()); - assertThat(options.getFilters(), containsInAnyOrder("Patient?identifier=foo", "Practitioner?identifier=bar", "Patient?name=zoop")); + + assertThat(options.getFilters(), containsInAnyOrder(immunizationTypeFilter1, immunizationTypeFilter2, observationFilter1)); } @Test From 27952ab5e428d1f674530ddce090c900fb503910 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Tue, 9 Mar 2021 00:56:49 -0500 Subject: [PATCH 07/22] add patient export, partial rework for multiple _typeFilters --- .../jpa/bulk/api/BulkDataExportOptions.java | 61 +++++- .../bulk/api/GroupBulkDataExportOptions.java | 25 --- .../fhir/jpa/bulk/job/BaseBulkItemReader.java | 26 +-- .../job/BulkExportJobParametersBuilder.java | 5 + .../uhn/fhir/jpa/bulk/job/BulkItemReader.java | 14 +- .../job/CreateBulkExportEntityTasklet.java | 16 +- .../jpa/bulk/job/GroupBulkItemReader.java | 4 +- .../jpa/bulk/job/PatientBulkItemReader.java | 18 +- .../bulk/provider/BulkDataExportProvider.java | 169 +++++++++------- .../jpa/bulk/svc/BulkDataExportSvcImpl.java | 33 ++- .../jpa/bulk/BulkDataExportProviderTest.java | 57 ++++-- .../jpa/bulk/BulkDataExportSvcImplR4Test.java | 191 +++++++++++++----- .../ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java | 4 +- .../uhn/fhir/jpa/model/util/JpaConstants.java | 4 +- 14 files changed, 403 insertions(+), 224 deletions(-) delete mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/GroupBulkDataExportOptions.java diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/BulkDataExportOptions.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/BulkDataExportOptions.java index 93e8aa75ce6..fc52961b18b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/BulkDataExportOptions.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/BulkDataExportOptions.java @@ -1,21 +1,52 @@ package ca.uhn.fhir.jpa.bulk.api; +import org.hl7.fhir.instance.model.api.IIdType; + import java.util.Date; import java.util.Set; public class BulkDataExportOptions { - private final String myOutputFormat; - private final Set myResourceTypes; - private final Date mySince; - private final Set myFilters; - private boolean mySystemLevel; + public BulkDataExportOptions() { - public BulkDataExportOptions(String theOutputFormat, Set theResourceTypes, Date theSince, Set theFilters, boolean theSystemLevel) { + } + + public enum ExportStyle { + PATIENT, + GROUP, + SYSTEM + } + private String myOutputFormat; + private Set myResourceTypes; + private Date mySince; + private Set myFilters; + private ExportStyle myExportStyle; + private boolean myExpandMdm; + private IIdType myGroupId; + + + + public void setOutputFormat(String theOutputFormat) { myOutputFormat = theOutputFormat; + } + + public void setResourceTypes(Set theResourceTypes) { myResourceTypes = theResourceTypes; + } + + public void setSince(Date theSince) { mySince = theSince; + } + + public void setFilters(Set theFilters) { myFilters = theFilters; - mySystemLevel = theSystemLevel; + } + + public ExportStyle getExportStyle() { + return myExportStyle; + } + + public void setExportStyle(ExportStyle theExportStyle) { + myExportStyle = theExportStyle; } public String getOutputFormat() { @@ -34,7 +65,19 @@ public class BulkDataExportOptions { return myFilters; } - public boolean isSystemLevel() { - return mySystemLevel; + public boolean isExpandMdm() { + return myExpandMdm; + } + + public void setExpandMdm(boolean theExpandMdm) { + myExpandMdm = theExpandMdm; + } + + public IIdType getGroupId() { + return myGroupId; + } + + public void setGroupId(IIdType theGroupId) { + myGroupId = theGroupId; } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/GroupBulkDataExportOptions.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/GroupBulkDataExportOptions.java deleted file mode 100644 index 2718f4e461c..00000000000 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/GroupBulkDataExportOptions.java +++ /dev/null @@ -1,25 +0,0 @@ -package ca.uhn.fhir.jpa.bulk.api; - -import org.hl7.fhir.instance.model.api.IIdType; - -import java.util.Date; -import java.util.Set; - -public class GroupBulkDataExportOptions extends BulkDataExportOptions { - private final IIdType myGroupId; - private final boolean myMdm; - - public GroupBulkDataExportOptions(String theOutputFormat, Set theResourceTypes, Date theSince, Set theFilters, IIdType theGroupId, boolean theMdm) { - super(theOutputFormat, theResourceTypes, theSince, theFilters, false); - myGroupId = theGroupId; - myMdm = theMdm; - } - - public IIdType getGroupId() { - return myGroupId; - } - - public boolean isMdm() { - return myMdm; - } -} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java index b759571561f..c0b41d4090f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java @@ -28,6 +28,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.stream.Collectors; public abstract class BaseBulkItemReader implements ItemReader> { private static final Logger ourLog = Logs.getBatchTroubleshootingLog(); @@ -80,25 +81,26 @@ public abstract class BaseBulkItemReader implements ItemReader getResourcePidIterator(); - protected SearchParameterMap createSearchParameterMapForJob() { + protected List createSearchParameterMapsForResourceType() { BulkExportJobEntity jobEntity = getJobEntity(); RuntimeResourceDefinition theDef = getResourceDefinition(); - SearchParameterMap map = new SearchParameterMap(); Map requestUrl = UrlUtil.parseQueryStrings(jobEntity.getRequest()); String[] typeFilters = requestUrl.get(JpaConstants.PARAM_EXPORT_TYPE_FILTER); if (typeFilters != null) { - //TODO GGG START HERE - Optional filter = Arrays.stream(typeFilters).filter(t -> t.startsWith(myResourceType + "?")).findFirst(); - if (filter.isPresent()) { - String matchUrl = filter.get(); - map = myMatchUrlService.translateMatchUrl(matchUrl, theDef); - } + List maps = Arrays.stream(typeFilters) + .filter(typeFilter -> typeFilter.startsWith(myResourceType + "?")) + .map(filter -> buildSearchParameterMapForTypeFilter(filter, theDef)) + .collect(Collectors.toList()); + return maps; } - if (jobEntity.getSince() != null) { - map.setLastUpdated(new DateRangeParam(jobEntity.getSince(), null)); + } + public SearchParameterMap buildSearchParameterMapForTypeFilter(String theFilter, RuntimeResourceDefinition theDef) { + SearchParameterMap searchParameterMap = myMatchUrlService.translateMatchUrl(theFilter, theDef); + if (getJobEntity().getSince() != null) { + searchParameterMap.setLastUpdated(new DateRangeParam(getJobEntity().getSince(), null)); } - map.setLoadSynchronous(true); - return map; + searchParameterMap.setLoadSynchronous(true); + } protected RuntimeResourceDefinition getResourceDefinition() { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobParametersBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobParametersBuilder.java index d9bc48b2b2a..7219c900c87 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobParametersBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkExportJobParametersBuilder.java @@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.bulk.job; * #L% */ +import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; import ca.uhn.fhir.rest.api.Constants; import org.springframework.batch.core.JobParametersBuilder; @@ -65,4 +66,8 @@ public class BulkExportJobParametersBuilder extends JobParametersBuilder { this.addLong("readChunkSize", theReadChunkSize); return this; } + public BulkExportJobParametersBuilder setExportStyle(BulkDataExportOptions.ExportStyle theExportStyle) { + this.addString("exportStyle", theExportStyle.name()); + return this; + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkItemReader.java index a88d1a2f648..7075ad7105b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkItemReader.java @@ -20,30 +20,18 @@ package ca.uhn.fhir.jpa.bulk.job; * #L% */ -import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.interceptor.model.RequestPartitionId; -import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.batch.log.Logs; import ca.uhn.fhir.jpa.dao.IResultIterator; import ca.uhn.fhir.jpa.dao.ISearchBuilder; -import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao; -import ca.uhn.fhir.jpa.entity.BulkExportJobEntity; import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; -import ca.uhn.fhir.jpa.model.util.JpaConstants; -import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; -import ca.uhn.fhir.rest.param.DateRangeParam; -import ca.uhn.fhir.util.UrlUtil; import org.slf4j.Logger; -import org.springframework.beans.factory.annotation.Autowired; import java.util.ArrayList; -import java.util.Arrays; import java.util.Iterator; import java.util.List; -import java.util.Map; -import java.util.Optional; /** * Basic Bulk Export implementation which simply reads all type filters and applies them, along with the _since param @@ -58,7 +46,7 @@ public class BulkItemReader extends BaseBulkItemReader { ourLog.info("Bulk export assembling export of type {} for job {}", myResourceType, myJobUUID); - SearchParameterMap map = createSearchParameterMapForJob(); + SearchParameterMap map = createSearchParameterMapsForResourceType(); ISearchBuilder sb = getSearchBuilderForLocalResourceType(); IResultIterator myResultIterator = sb.createQuery(map, new SearchRuntimeDetails(null, myJobUUID), null, RequestPartitionId.allPartitions()); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/CreateBulkExportEntityTasklet.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/CreateBulkExportEntityTasklet.java index b6466e7532c..74e85d2188a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/CreateBulkExportEntityTasklet.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/CreateBulkExportEntityTasklet.java @@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.bulk.job; import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; +import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.api.Constants; import org.apache.commons.lang3.StringUtils; import org.springframework.batch.core.StepContribution; @@ -66,7 +67,20 @@ public class CreateBulkExportEntityTasklet implements Tasklet { outputFormat = Constants.CT_FHIR_NDJSON; } - IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(outputFormat, resourceTypeSet, since, filterSet, true)); + BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions(); + bulkDataExportOptions.setOutputFormat(outputFormat); + bulkDataExportOptions.setResourceTypes(resourceTypeSet); + bulkDataExportOptions.setSince(since); + bulkDataExportOptions.setFilters(filterSet); + //Set export style + String exportStyle = (String)jobParameters.get("exportStyle"); + bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.valueOf(exportStyle)); + + //Set group id if present + String groupId = (String)jobParameters.get("groupId"); + bulkDataExportOptions.setGroupId(new IdDt(groupId)); + + IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.submitJob(bulkDataExportOptions); addUUIDToJobContext(theChunkContext, jobInfo.getJobId()); return RepeatStatus.FINISHED; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkItemReader.java index 482d3f773ee..ffa06a29723 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkItemReader.java @@ -20,7 +20,6 @@ package ca.uhn.fhir.jpa.bulk.job; * #L% */ -import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.batch.log.Logs; @@ -39,7 +38,6 @@ import ca.uhn.fhir.rest.param.ReferenceParam; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import org.jetbrains.annotations.NotNull; import org.slf4j.Logger; import org.springframework.batch.item.ItemReader; import org.springframework.beans.factory.annotation.Autowired; @@ -176,7 +174,7 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade private void queryResourceTypeWithReferencesToPatients(List myReadPids, List idChunk) { //Build SP map //First, inject the _typeFilters and _since from the export job - SearchParameterMap expandedSpMap = createSearchParameterMapForJob(); + SearchParameterMap expandedSpMap = createSearchParameterMapsForResourceType(); //Since we are in a bulk job, we have to ensure the user didn't jam in a patient search param, since we need to manually set that. validateSearchParameters(expandedSpMap); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java index c470b7f77c4..699b426d28d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java @@ -20,9 +20,9 @@ package ca.uhn.fhir.jpa.bulk.job; * #L% */ -import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.batch.log.Logs; import ca.uhn.fhir.jpa.dao.IResultIterator; import ca.uhn.fhir.jpa.dao.ISearchBuilder; @@ -30,9 +30,9 @@ import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.rest.param.ReferenceParam; -import ca.uhn.fhir.rest.param.StringParam; import org.slf4j.Logger; import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Autowired; import java.util.ArrayList; import java.util.Iterator; @@ -46,6 +46,9 @@ import java.util.List; * 2. Search for anything that has `patient-compartment-search-param:missing=false` */ public class PatientBulkItemReader extends BaseBulkItemReader implements ItemReader> { + @Autowired + private DaoConfig myDaoConfig; + private static final Logger ourLog = Logs.getBatchTroubleshootingLog(); @@ -56,12 +59,19 @@ public class PatientBulkItemReader extends BaseBulkItemReader implements ItemRea } return runtimeSearchParam; } + @Override Iterator getResourcePidIterator() { + if (myDaoConfig.getIndexMissingFields() == DaoConfig.IndexEnabledEnum.DISABLED) { + String errorMessage = "You attempted to start a Patient Bulk Export, but the system has `Index Missing Fields` disabled. It must be enabled for Patient Bulk Export"; + ourLog.error(errorMessage); + throw new IllegalStateException(errorMessage); + } + List myReadPids = new ArrayList<>(); //use _typeFilter and _since and all those fancy bits and bobs to generate our basic SP map. - SearchParameterMap map = createSearchParameterMapForJob(); + SearchParameterMap map = createSearchParameterMapsForResourceType(); String patientSearchParam = getPatientSearchParamForCurrentResourceType().getName(); @@ -72,7 +82,7 @@ public class PatientBulkItemReader extends BaseBulkItemReader implements ItemRea if (!myResourceType.equalsIgnoreCase("Patient")) { //Now that we have our basic built Bulk Export SP map, we inject the condition that the resources returned //must have a patient= or subject= reference set. - map.add(patientSearchParam, new ReferenceParam().setMissing(true)); + map.add(patientSearchParam, new ReferenceParam().setMissing(false)); } ISearchBuilder sb = getSearchBuilderForLocalResourceType(); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java index c2f87b60775..5ff7be95bb8 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java @@ -21,8 +21,8 @@ package ca.uhn.fhir.jpa.bulk.provider; */ import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; -import ca.uhn.fhir.jpa.bulk.api.GroupBulkDataExportOptions; import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.model.BulkExportResponseJson; import ca.uhn.fhir.jpa.model.util.JpaConstants; @@ -60,6 +60,8 @@ public class BulkDataExportProvider { private IBulkDataExportSvc myBulkDataExportSvc; @Autowired private FhirContext myFhirContext; + @Autowired + private DaoConfig myDaoConfig; @VisibleForTesting public void setFhirContextForUnitTest(FhirContext theFhirContext) { @@ -77,56 +79,57 @@ public class BulkDataExportProvider { @Operation(name = JpaConstants.OPERATION_EXPORT, global = false /* set to true once we can handle this */, manualResponse = true, idempotent = true) public void export( @OperationParam(name = JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, min = 0, max = 1, typeName = "string") IPrimitiveType theOutputFormat, - @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE, min = 0, max = 1, typeName = "string") IPrimitiveType theType, + @OperationParam(name = JpaConstants.PARAM_EXPORT_STYLE, min = 0, max = 1, typeName = "string") IPrimitiveType theType, @OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType theSince, @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = 1, typeName = "string") IPrimitiveType theTypeFilter, ServletRequestDetails theRequestDetails ) { - String preferHeader = theRequestDetails.getHeader(Constants.HEADER_PREFER); - PreferHeader prefer = RestfulServerUtils.parsePreferHeader(null, preferHeader); - if (prefer.getRespondAsync() == false) { - throw new InvalidRequestException("Must request async processing for $export"); - } - - String outputFormat = theOutputFormat != null ? theOutputFormat.getValueAsString() : null; - - Set resourceTypes = null; - if (theType != null) { - resourceTypes = ArrayUtil.commaSeparatedListToCleanSet(theType.getValueAsString()); - } - - Date since = null; - if (theSince != null) { - since = theSince.getValue(); - } - Set typeFilters = splitTypeFilters(theTypeFilter); - - IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(outputFormat, resourceTypes, since, typeFilters, true)); - - String serverBase = getServerBase(theRequestDetails); - String pollLocation = serverBase + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + outcome.getJobId(); - - HttpServletResponse response = theRequestDetails.getServletResponse(); - - // Add standard headers - theRequestDetails.getServer().addHeadersToResponse(response); - - // Successful 202 Accepted - response.addHeader(Constants.HEADER_CONTENT_LOCATION, pollLocation); - response.setStatus(Constants.STATUS_HTTP_202_ACCEPTED); + validatePreferAsyncHeader(theRequestDetails); + BulkDataExportOptions bulkDataExportOptions = buildSystemBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter); + IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(bulkDataExportOptions); + writePollingLocationToResponseHeaders(theRequestDetails, outcome); } - private Set splitTypeFilters(IPrimitiveType theTypeFilter) { - if (theTypeFilter== null) { - return null; - } - String typeFilterSring = theTypeFilter.getValueAsString(); - String[] typeFilters = typeFilterSring.split(FARM_TO_TABLE_TYPE_FILTER_REGEX); - if (typeFilters == null || typeFilters.length == 0) { - return null; - } - return new HashSet<>(Arrays.asList(typeFilters)); + + private String getServerBase(ServletRequestDetails theRequestDetails) { + return StringUtils.removeEnd(theRequestDetails.getServerBaseForRequest(), "/"); + } + + /** + * Group/Id/$export + */ + @Operation(name = JpaConstants.OPERATION_EXPORT, manualResponse = true, idempotent = true, typeName = "Group") + public void groupExport( + @IdParam IIdType theIdParam, + @OperationParam(name = JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, min = 0, max = 1, typeName = "string") IPrimitiveType theOutputFormat, + @OperationParam(name = JpaConstants.PARAM_EXPORT_STYLE, min = 0, max = 1, typeName = "string") IPrimitiveType theType, + @OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType theSince, + @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = 1, typeName = "string") IPrimitiveType theTypeFilter, + @OperationParam(name = JpaConstants.PARAM_EXPORT_MDM, min = 0, max = 1, typeName = "boolean") IPrimitiveType theMdm, + ServletRequestDetails theRequestDetails + ) { + validatePreferAsyncHeader(theRequestDetails); + BulkDataExportOptions bulkDataExportOptions = buildGroupBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theIdParam, theMdm); + IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(bulkDataExportOptions); + writePollingLocationToResponseHeaders(theRequestDetails, outcome); + } + + /** + * Patient/$export + */ + @Operation(name = JpaConstants.OPERATION_EXPORT, manualResponse = true, idempotent = true, typeName = "Patient") + public void patientExport( + @OperationParam(name = JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, min = 0, max = 1, typeName = "string") IPrimitiveType theOutputFormat, + @OperationParam(name = JpaConstants.PARAM_EXPORT_STYLE, min = 0, max = 1, typeName = "string") IPrimitiveType theType, + @OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType theSince, + @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = 1, typeName = "string") IPrimitiveType theTypeFilter, + ServletRequestDetails theRequestDetails + ) { + validatePreferAsyncHeader(theRequestDetails); + BulkDataExportOptions bulkDataExportOptions = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter); + IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(bulkDataExportOptions); + writePollingLocationToResponseHeaders(theRequestDetails, outcome); } /** @@ -183,37 +186,31 @@ public class BulkDataExportProvider { OperationOutcomeUtil.addIssue(myFhirContext, oo, "error", status.getStatusMessage(), null, null); myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToWriter(oo, response.getWriter()); response.getWriter().close(); - } - - } - private String getServerBase(ServletRequestDetails theRequestDetails) { - return StringUtils.removeEnd(theRequestDetails.getServerBaseForRequest(), "/"); + private BulkDataExportOptions buildSystemBulkExportOptions(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, IPrimitiveType theTypeFilter) { + return buildBulkDataExportOptions(theOutputFormat, theType, theSince, theTypeFilter, BulkDataExportOptions.ExportStyle.SYSTEM); } - /** - * Group/Id/$export - */ - @Operation(name = JpaConstants.OPERATION_EXPORT, manualResponse = true, idempotent = true, typeName = "Group") - public void groupExport( - @IdParam IIdType theIdParam, - @OperationParam(name = JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, min = 0, max = 1, typeName = "string") IPrimitiveType theOutputFormat, - @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE, min = 0, max = 1, typeName = "string") IPrimitiveType theType, - @OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType theSince, - @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = 1, typeName = "string") IPrimitiveType theTypeFilter, - @OperationParam(name = JpaConstants.PARAM_EXPORT_MDM, min = 0, max = 1, typeName = "boolean") IPrimitiveType theMdm, + private BulkDataExportOptions buildGroupBulkExportOptions(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, IPrimitiveType theTypeFilter, IIdType theGroupId, IPrimitiveType theExpandMdm) { + BulkDataExportOptions bulkDataExportOptions = buildBulkDataExportOptions(theOutputFormat, theType, theSince, theTypeFilter, BulkDataExportOptions.ExportStyle.GROUP); + bulkDataExportOptions.setGroupId(theGroupId); - ServletRequestDetails theRequestDetails - ) { - - String preferHeader = theRequestDetails.getHeader(Constants.HEADER_PREFER); - PreferHeader prefer = RestfulServerUtils.parsePreferHeader(null, preferHeader); - if (prefer.getRespondAsync() == false) { - throw new InvalidRequestException("Must request async processing for $export"); + boolean mdm = false; + if (theExpandMdm != null) { + mdm = theExpandMdm.getValue(); } + bulkDataExportOptions.setExpandMdm(mdm); + return bulkDataExportOptions; + } + + private BulkDataExportOptions buildPatientBulkExportOptions(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, IPrimitiveType theTypeFilter) { + return buildBulkDataExportOptions(theOutputFormat, theType, theSince, theTypeFilter, BulkDataExportOptions.ExportStyle.PATIENT); + } + + private BulkDataExportOptions buildBulkDataExportOptions(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, IPrimitiveType theTypeFilter, BulkDataExportOptions.ExportStyle theExportStyle) { String outputFormat = theOutputFormat != null ? theOutputFormat.getValueAsString() : null; Set resourceTypes = null; @@ -226,16 +223,20 @@ public class BulkDataExportProvider { since = theSince.getValue(); } - boolean mdm = false; - if (theMdm != null) { - mdm = theMdm.getValue(); - } Set typeFilters = splitTypeFilters(theTypeFilter); - IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(new GroupBulkDataExportOptions(outputFormat, resourceTypes, since, typeFilters, theIdParam, mdm)); + BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions(); + bulkDataExportOptions.setFilters(typeFilters); + bulkDataExportOptions.setExportStyle(theExportStyle); + bulkDataExportOptions.setSince(since); + bulkDataExportOptions.setResourceTypes(resourceTypes); + bulkDataExportOptions.setOutputFormat(outputFormat); + return bulkDataExportOptions; + } + public void writePollingLocationToResponseHeaders(ServletRequestDetails theRequestDetails, IBulkDataExportSvc.JobInfo theOutcome) { String serverBase = getServerBase(theRequestDetails); - String pollLocation = serverBase + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + outcome.getJobId(); + String pollLocation = serverBase + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + theOutcome.getJobId(); HttpServletResponse response = theRequestDetails.getServletResponse(); @@ -246,4 +247,26 @@ public class BulkDataExportProvider { response.addHeader(Constants.HEADER_CONTENT_LOCATION, pollLocation); response.setStatus(Constants.STATUS_HTTP_202_ACCEPTED); } + + private void validatePreferAsyncHeader(ServletRequestDetails theRequestDetails) { + String preferHeader = theRequestDetails.getHeader(Constants.HEADER_PREFER); + PreferHeader prefer = RestfulServerUtils.parsePreferHeader(null, preferHeader); + if (prefer.getRespondAsync() == false) { + throw new InvalidRequestException("Must request async processing for $export"); + } + } + + private Set splitTypeFilters(IPrimitiveType theTypeFilter) { + if (theTypeFilter== null) { + return null; + } + String typeFilterSring = theTypeFilter.getValueAsString(); + String[] typeFilters = typeFilterSring.split(FARM_TO_TABLE_TYPE_FILTER_REGEX); + if (typeFilters == null || typeFilters.length == 0) { + return null; + } + + return new HashSet<>(Arrays.asList(typeFilters)); + } + } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java index 55d135412cf..8bcb29a56f2 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java @@ -27,7 +27,6 @@ import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.batch.BatchJobsConfig; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; -import ca.uhn.fhir.jpa.bulk.api.GroupBulkDataExportOptions; import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.job.BulkExportJobConfig; import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; @@ -45,16 +44,13 @@ import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.util.UrlUtil; -import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.time.DateUtils; import org.hl7.fhir.instance.model.api.IBaseBinary; import org.hl7.fhir.instance.model.api.IIdType; -import org.hl7.fhir.r4.model.Group; import org.hl7.fhir.r4.model.InstantType; import org.quartz.JobExecutionContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.JobParametersBuilder; import org.springframework.batch.core.JobParametersInvalidException; import org.springframework.beans.factory.annotation.Autowired; @@ -67,7 +63,6 @@ import org.springframework.transaction.support.TransactionTemplate; import javax.annotation.PostConstruct; import javax.transaction.Transactional; -import java.util.Arrays; import java.util.Date; import java.util.HashSet; import java.util.Map; @@ -78,7 +73,6 @@ import java.util.stream.Collectors; import static ca.uhn.fhir.util.UrlUtil.escapeUrlParam; import static ca.uhn.fhir.util.UrlUtil.escapeUrlParams; -import static org.apache.commons.lang3.StringUtils.contains; import static org.apache.commons.lang3.StringUtils.isNotBlank; public class BulkDataExportSvcImpl implements IBulkDataExportSvc { @@ -245,8 +239,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { } private boolean isPatientBulkJob(BulkExportJobEntity theBulkExportJobEntity) { - //TODO GGG - return true; + return theBulkExportJobEntity.getRequest().startsWith("/Patient/"); } private void enhanceBulkParametersWithGroupParameters(BulkExportJobEntity theBulkExportJobEntity, JobParametersBuilder theParameters) { @@ -257,7 +250,8 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { } private boolean isGroupBulkJob(BulkExportJobEntity theBulkExportJobEntity) { - return getQueryParameterIfPresent(theBulkExportJobEntity.getRequest(), JpaConstants.PARAM_EXPORT_GROUP_ID) != null; + String queryParameterIfPresent = getQueryParameterIfPresent(theBulkExportJobEntity.getRequest(), JpaConstants.PARAM_EXPORT_GROUP_ID); + return queryParameterIfPresent != null; } @SuppressWarnings("unchecked") @@ -296,11 +290,14 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { StringBuilder requestBuilder = new StringBuilder(); requestBuilder.append("/"); - //Prefix the export url with Group/[id]/ - if (theBulkDataExportOptions instanceof GroupBulkDataExportOptions) { - requestBuilder.append(((GroupBulkDataExportOptions)theBulkDataExportOptions).getGroupId()).append("/"); + //Prefix the export url with Group/[id]/ or /Patient/ depending on what type of request it is. + if (theBulkDataExportOptions.getExportStyle().equals(BulkDataExportOptions.ExportStyle.GROUP)) { + requestBuilder.append(theBulkDataExportOptions.getGroupId().toVersionless()).append("/"); + } else if (theBulkDataExportOptions.getExportStyle().equals(BulkDataExportOptions.ExportStyle.PATIENT)) { + requestBuilder.append("Patient/"); } + requestBuilder.append(JpaConstants.OPERATION_EXPORT); requestBuilder.append("?").append(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT).append("=").append(escapeUrlParam(outputFormat)); Set resourceTypes = theBulkDataExportOptions.getResourceTypes(); @@ -315,13 +312,13 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { theBulkDataExportOptions.getFilters().stream() .forEach(filter -> requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_TYPE_FILTER).append("=").append(escapeUrlParam(filter))); } - if (theBulkDataExportOptions instanceof GroupBulkDataExportOptions) { - GroupBulkDataExportOptions groupOptions = (GroupBulkDataExportOptions) theBulkDataExportOptions; - requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_GROUP_ID).append("=").append(groupOptions.getGroupId().getValue()); - requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_MDM).append("=").append(groupOptions.isMdm()); + + if (theBulkDataExportOptions.getExportStyle().equals(BulkDataExportOptions.ExportStyle.GROUP)) { + requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_GROUP_ID).append("=").append(theBulkDataExportOptions.getGroupId().getValue()); + requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_MDM).append("=").append(theBulkDataExportOptions.isExpandMdm()); } - requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_SYSTEM_LEVEL).append("=").append(theBulkDataExportOptions.isSystemLevel()); + String request = requestBuilder.toString(); @@ -399,7 +396,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { } String resourceType = next.substring(0, next.indexOf("?")); if (!theResourceTypes.contains(resourceType)) { - throw new InvalidRequestException("Invalid " + JpaConstants.PARAM_EXPORT_TYPE_FILTER + " value \"" + next + "\". Resource type does not appear in " + JpaConstants.PARAM_EXPORT_TYPE + " list"); + throw new InvalidRequestException("Invalid " + JpaConstants.PARAM_EXPORT_TYPE_FILTER + " value \"" + next + "\". Resource type does not appear in " + JpaConstants.PARAM_EXPORT_TYPE+ " list"); } if (!types.add(resourceType)) { throw new InvalidRequestException("Invalid " + JpaConstants.PARAM_EXPORT_TYPE_FILTER + " value \"" + next + "\". Multiple filters found for type " + resourceType); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java index 484c2da955f..9914c83d0b0 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java @@ -3,7 +3,6 @@ package ca.uhn.fhir.jpa.bulk; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; -import ca.uhn.fhir.jpa.bulk.api.GroupBulkDataExportOptions; import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.model.BulkExportResponseJson; import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; @@ -27,7 +26,6 @@ import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.servlet.ServletHandler; import org.eclipse.jetty.servlet.ServletHolder; -import org.hl7.fhir.r4.model.BooleanType; import org.hl7.fhir.r4.model.IdType; import org.hl7.fhir.r4.model.InstantType; import org.hl7.fhir.r4.model.Parameters; @@ -78,8 +76,6 @@ public class BulkDataExportProviderTest { private CloseableHttpClient myClient; @Captor private ArgumentCaptor myBulkDataExportOptionsCaptor; - @Captor - private ArgumentCaptor myGroupBulkDataExportOptionsCaptor; @AfterEach public void after() throws Exception { @@ -122,7 +118,7 @@ public class BulkDataExportProviderTest { Parameters input = new Parameters(); input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON)); - input.addParameter(JpaConstants.PARAM_EXPORT_TYPE, new StringType("Patient, Practitioner")); + input.addParameter(JpaConstants.PARAM_EXPORT_STYLE, new StringType("Patient, Practitioner")); input.addParameter(JpaConstants.PARAM_EXPORT_SINCE, now); input.addParameter(JpaConstants.PARAM_EXPORT_TYPE_FILTER, new StringType("Patient?identifier=foo")); @@ -159,7 +155,7 @@ public class BulkDataExportProviderTest { String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT + "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON) - + "&" + JpaConstants.PARAM_EXPORT_TYPE + "=" + UrlUtil.escapeUrlParam("Patient, Practitioner") + + "&" + JpaConstants.PARAM_EXPORT_STYLE + "=" + UrlUtil.escapeUrlParam("Patient, Practitioner") + "&" + JpaConstants.PARAM_EXPORT_SINCE + "=" + UrlUtil.escapeUrlParam(now.getValueAsString()) + "&" + JpaConstants.PARAM_EXPORT_TYPE_FILTER + "=" + UrlUtil.escapeUrlParam("Patient?identifier=foo"); @@ -311,7 +307,7 @@ public class BulkDataExportProviderTest { Parameters input = new Parameters(); StringType obsTypeFilter = new StringType("Observation?code=OBSCODE,DiagnosticReport?code=DRCODE"); input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON)); - input.addParameter(JpaConstants.PARAM_EXPORT_TYPE, new StringType("Observation, DiagnosticReport")); + input.addParameter(JpaConstants.PARAM_EXPORT_STYLE, new StringType("Observation, DiagnosticReport")); input.addParameter(JpaConstants.PARAM_EXPORT_SINCE, now); input.addParameter(JpaConstants.PARAM_EXPORT_MDM, true); input.addParameter(JpaConstants.PARAM_EXPORT_TYPE_FILTER, obsTypeFilter); @@ -329,14 +325,14 @@ public class BulkDataExportProviderTest { assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + G_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); } - verify(myBulkDataExportSvc, times(1)).submitJob(myGroupBulkDataExportOptionsCaptor.capture()); - GroupBulkDataExportOptions options = myGroupBulkDataExportOptionsCaptor.getValue(); + verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture()); + BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue(); assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat()); assertThat(options.getResourceTypes(), containsInAnyOrder("Observation", "DiagnosticReport")); assertThat(options.getSince(), notNullValue()); assertThat(options.getFilters(), notNullValue()); assertEquals(GROUP_ID, options.getGroupId().getValue()); - assertThat(options.isMdm(), is(equalTo(true))); + assertThat(options.isExpandMdm(), is(equalTo(true))); } @Test @@ -350,7 +346,7 @@ public class BulkDataExportProviderTest { String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT + "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON) - + "&" + JpaConstants.PARAM_EXPORT_TYPE + "=" + UrlUtil.escapeUrlParam("Immunization, Observation") + + "&" + JpaConstants.PARAM_EXPORT_STYLE + "=" + UrlUtil.escapeUrlParam("Immunization, Observation") + "&" + JpaConstants.PARAM_EXPORT_SINCE + "=" + UrlUtil.escapeUrlParam(now.getValueAsString()); String immunizationTypeFilter1 = "Immunization?patient.identifier=SC378274-MRN|009999997,SC378274-MRN|009999998,SC378274-MRN|009999999&date=2020-01-02"; @@ -389,7 +385,7 @@ public class BulkDataExportProviderTest { Parameters input = new Parameters(); input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON)); - input.addParameter(JpaConstants.PARAM_EXPORT_TYPE, new StringType("Patient, Practitioner")); + input.addParameter(JpaConstants.PARAM_EXPORT_STYLE, new StringType("Patient, Practitioner")); input.addParameter(JpaConstants.PARAM_EXPORT_SINCE, now); input.addParameter(JpaConstants.PARAM_EXPORT_TYPE_FILTER, new StringType("Patient?identifier=foo")); @@ -415,4 +411,41 @@ public class BulkDataExportProviderTest { assertThat(options.getFilters(), containsInAnyOrder("Patient?identifier=foo")); } + @Test + public void testInitiatePatientExportRequest() throws IOException { + + IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() + .setJobId(A_JOB_ID); + when(myBulkDataExportSvc.submitJob(any())).thenReturn(jobInfo); + + InstantType now = InstantType.now(); + + Parameters input = new Parameters(); + input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON)); + input.addParameter(JpaConstants.PARAM_EXPORT_STYLE, new StringType("Immunization, Observation")); + input.addParameter(JpaConstants.PARAM_EXPORT_SINCE, now); + input.addParameter(JpaConstants.PARAM_EXPORT_TYPE_FILTER, new StringType("Immunization?vaccine-code=foo")); + + ourLog.info(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input)); + + HttpPost post = new HttpPost("http://localhost:" + myPort + "/Patient/" + JpaConstants.OPERATION_EXPORT); + post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); + post.setEntity(new ResourceEntity(myCtx, input)); + ourLog.info("Request: {}", post); + try (CloseableHttpResponse response = myClient.execute(post)) { + ourLog.info("Response: {}", response.toString()); + + assertEquals(202, response.getStatusLine().getStatusCode()); + assertEquals("Accepted", response.getStatusLine().getReasonPhrase()); + assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); + } + + verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture()); + BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue(); + assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat()); + assertThat(options.getResourceTypes(), containsInAnyOrder("Immunization", "Observation")); + assertThat(options.getSince(), notNullValue()); + assertThat(options.getFilters(), containsInAnyOrder("Immunization?vaccine-code=foo")); + } + } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java index 9dbc82b709c..eabb341f602 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java @@ -2,11 +2,11 @@ package ca.uhn.fhir.jpa.bulk; import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor; import ca.uhn.fhir.interceptor.api.Pointcut; +import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; import ca.uhn.fhir.jpa.batch.BatchJobsConfig; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; -import ca.uhn.fhir.jpa.bulk.api.GroupBulkDataExportOptions; import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.job.BulkExportJobParametersBuilder; import ca.uhn.fhir.jpa.bulk.job.GroupBulkExportJobParametersBuilder; @@ -103,6 +103,14 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { private IIdType myPatientGroupId; + + @Override + public void beforeFlushFT() { + super.beforeFlushFT(); + //This is needed for patient level export. + myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.ENABLED); + } + @Test public void testPurgeExpiredJobs() { @@ -159,7 +167,11 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { @Test public void testSubmit_InvalidOutputFormat() { try { - myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_JSON_NEW, Sets.newHashSet("Patient", "Observation"), null, null, true)); + BulkDataExportOptions options = new BulkDataExportOptions(); + options.setOutputFormat(Constants.CT_FHIR_JSON_NEW); + options.setResourceTypes(Sets.newHashSet("Patient", "Observation")); + options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); + myBulkDataExportSvc.submitJob(options); fail(); } catch (InvalidRequestException e) { assertEquals("Invalid output format: application/fhir+json", e.getMessage()); @@ -169,17 +181,20 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { @Test public void testSubmit_OnlyBinarySelected() { try { - myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_JSON_NEW, Sets.newHashSet("Binary"), null, null, true)); + BulkDataExportOptions options = new BulkDataExportOptions(); + options.setResourceTypes(Sets.newHashSet("Binary")); + options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); + myBulkDataExportSvc.submitJob(options); fail(); } catch (InvalidRequestException e) { - assertEquals("Invalid output format: application/fhir+json", e.getMessage()); + assertEquals("Binary resources may not be exported with bulk export", e.getMessage()); } } @Test public void testSubmit_InvalidResourceTypes() { try { - myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient", "FOO"), null, null, true)); + myBulkDataExportSvc.submitJob(buildBulkDataForResourceTypes(Sets.newHashSet("Patient", "FOO"))); fail(); } catch (InvalidRequestException e) { assertEquals("Unknown or unsupported resource type: FOO", e.getMessage()); @@ -189,7 +204,12 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { @Test public void testSubmit_MultipleTypeFiltersForSameType() { try { - myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient"), null, Sets.newHashSet("Patient?name=a", "Patient?active=true"), true)); + BulkDataExportOptions options = new BulkDataExportOptions(); + options.setOutputFormat(Constants.CT_FHIR_NDJSON); + options.setResourceTypes(Sets.newHashSet("Patient")); + options.setFilters(Sets.newHashSet("Patient?name=a", "Patient?active=true")); + options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); + myBulkDataExportSvc.submitJob(options); fail(); } catch (InvalidRequestException e) { assertEquals("Invalid _typeFilter value \"Patient?name=a\". Multiple filters found for type Patient", e.getMessage()); @@ -199,7 +219,11 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { @Test public void testSubmit_TypeFilterForNonSelectedType() { try { - myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient"), null, Sets.newHashSet("Observation?code=123"), true)); + BulkDataExportOptions options = new BulkDataExportOptions(); + options.setResourceTypes(Sets.newHashSet("Patient")); + options.setFilters(Sets.newHashSet("Observation?code=123")); + options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); + myBulkDataExportSvc.submitJob(options); fail(); } catch (InvalidRequestException e) { assertEquals("Invalid _typeFilter value \"Observation?code=123\". Resource type does not appear in _type list", e.getMessage()); @@ -209,22 +233,32 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { @Test public void testSubmit_TypeFilterInvalid() { try { - myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient"), null, Sets.newHashSet("Hello"), true)); + BulkDataExportOptions options = new BulkDataExportOptions(); + options.setResourceTypes(Sets.newHashSet("Patient")); + options.setFilters(Sets.newHashSet("Hello")); + options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); + myBulkDataExportSvc.submitJob(options); fail(); } catch (InvalidRequestException e) { assertEquals("Invalid _typeFilter value \"Hello\". Must be in the form [ResourceType]?[params]", e.getMessage()); } } + private BulkDataExportOptions buildBulkDataForResourceTypes(Set resourceTypes) { + BulkDataExportOptions options = new BulkDataExportOptions(); + options.setResourceTypes(resourceTypes); + options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); + return options; + } @Test public void testSubmit_ReusesExisting() { // Submit - IBulkDataExportSvc.JobInfo jobDetails1 = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient", "Observation"), null, null, true)); + IBulkDataExportSvc.JobInfo jobDetails1 = myBulkDataExportSvc.submitJob(buildBulkDataForResourceTypes(Sets.newHashSet("Patient", "Observation"))); assertNotNull(jobDetails1.getJobId()); // Submit again - IBulkDataExportSvc.JobInfo jobDetails2 = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient", "Observation"), null, null, true)); + IBulkDataExportSvc.JobInfo jobDetails2 = myBulkDataExportSvc.submitJob(buildBulkDataForResourceTypes(Sets.newHashSet("Patient", "Observation"))); assertNotNull(jobDetails2.getJobId()); assertEquals(jobDetails1.getJobId(), jobDetails2.getJobId()); @@ -245,7 +279,10 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { createResources(); // Create a bulk job - IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient"), null, null, true)); + BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions(); + bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); + bulkDataExportOptions.setResourceTypes(Sets.newHashSet("Patient")); + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions); assertNotNull(jobDetails.getJobId()); // Check the status @@ -275,7 +312,11 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { createResources(); // Create a bulk job - IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient", "Observation"), null, Sets.newHashSet(TEST_FILTER), true)); + BulkDataExportOptions options = new BulkDataExportOptions(); + options.setResourceTypes(Sets.newHashSet("Patient", "Observation")); + options.setFilters(Sets.newHashSet(TEST_FILTER)); + options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(options); assertNotNull(jobDetails.getJobId()); // Check the status @@ -304,7 +345,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { assertEquals(7, nextContents.split("\n").length); // Only female patients } else if ("Observation".equals(next.getResourceType())) { assertThat(nextContents, containsString("\"subject\":{\"reference\":\"Patient/PAT0\"}}\n")); - assertEquals(16, nextContents.split("\n").length); + assertEquals(26, nextContents.split("\n").length); } else { fail(next.getResourceType()); } @@ -328,7 +369,9 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { myBinaryDao.create(b); // Create a bulk job - IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, null, null, null, true)); + BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions(); + bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions); assertNotNull(jobDetails.getJobId()); // Check the status @@ -357,10 +400,10 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { assertEquals(17, nextContents.split("\n").length); } else if ("Observation".equals(next.getResourceType())) { assertThat(nextContents, containsString("\"subject\":{\"reference\":\"Patient/PAT0\"}}\n")); - assertEquals(16, nextContents.split("\n").length); + assertEquals(26, nextContents.split("\n").length); }else if ("Immunization".equals(next.getResourceType())) { assertThat(nextContents, containsString("\"patient\":{\"reference\":\"Patient/PAT0\"}}\n")); - assertEquals(16, nextContents.split("\n").length); + assertEquals(26, nextContents.split("\n").length); } else if ("CareTeam".equals(next.getResourceType())) { assertThat(nextContents, containsString("\"id\":\"CT0\"")); assertEquals(16, nextContents.split("\n").length); @@ -382,7 +425,11 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { // Create a bulk job HashSet types = Sets.newHashSet("Patient"); Set typeFilters = Sets.newHashSet("Patient?_has:Observation:patient:identifier=SYS|VAL3"); - IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, types, null, typeFilters, true)); + BulkDataExportOptions options = new BulkDataExportOptions(); + options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); + options.setResourceTypes(types); + options.setFilters(typeFilters); + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(options); assertNotNull(jobDetails.getJobId()); // Check the status @@ -434,7 +481,12 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { } // Create a bulk job - IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient", "Observation"), cutoff.getValue(), null, true)); + BulkDataExportOptions options = new BulkDataExportOptions(); + options.setResourceTypes(Sets.newHashSet("Patient", "Observation")); + options.setSince(cutoff.getValue()); + options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); + + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(options); assertNotNull(jobDetails.getJobId()); // Check the status @@ -482,6 +534,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { paramBuilder .setReadChunkSize(100L) .setOutputFormat(Constants.CT_FHIR_NDJSON) + .setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM) .setResourceTypes(Arrays.asList("Patient", "Observation")); JobExecution jobExecution = myBatchJobSubmitter.runJob(myBulkJob, paramBuilder.toJobParameters()); @@ -496,6 +549,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { public void awaitAllBulkJobCompletions() { List bulkExport = myJobExplorer.findJobInstancesByJobName(BatchJobsConfig.BULK_EXPORT_JOB_NAME, 0, 100); + bulkExport.addAll(myJobExplorer.findJobInstancesByJobName(BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME, 0, 100)); bulkExport.addAll(myJobExplorer.findJobInstancesByJobName(BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME, 0, 100)); if (bulkExport.isEmpty()) { fail("There are no bulk export jobs running!"); @@ -513,7 +567,10 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { createResources(); // Create a bulk job - IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient", "Observation"), null, null, true)); + BulkDataExportOptions options = new BulkDataExportOptions(); + options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); + options.setResourceTypes(Sets.newHashSet("Patient", "Observation")); + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(options); //Add the UUID to the job BulkExportJobParametersBuilder paramBuilder = new BulkExportJobParametersBuilder() @@ -535,17 +592,20 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { createResources(); // Create a bulk job - IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new GroupBulkDataExportOptions(null, Sets.newHashSet("Immunization"), null, null, myPatientGroupId, true)); + BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions(); + bulkDataExportOptions.setOutputFormat(null); + bulkDataExportOptions.setResourceTypes(Sets.newHashSet("Immunization")); + bulkDataExportOptions.setSince(null); + bulkDataExportOptions.setFilters(null); + bulkDataExportOptions.setGroupId(myPatientGroupId); + bulkDataExportOptions.setExpandMdm(true); + bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP); + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions); - GroupBulkExportJobParametersBuilder paramBuilder = new GroupBulkExportJobParametersBuilder(); - paramBuilder.setGroupId(myPatientGroupId.getIdPart()); - paramBuilder.setJobUUID(jobDetails.getJobId()); - paramBuilder.setReadChunkSize(10L); + myBulkDataExportSvc.buildExportFiles(); + awaitAllBulkJobCompletions(); - JobExecution jobExecution = myBatchJobSubmitter.runJob(myGroupBulkJob, paramBuilder.toJobParameters()); - - awaitJobCompletion(jobExecution); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); @@ -568,10 +628,14 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { @Test public void testPatientLevelExportWorks() throws JobParametersInvalidException { + myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.ENABLED); createResources(); // Create a bulk job - IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Immunization", "Observation"), null, null, false)); + BulkDataExportOptions options = new BulkDataExportOptions(); + options.setResourceTypes(Sets.newHashSet("Immunization", "Observation")); + options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT); + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(options); GroupBulkExportJobParametersBuilder paramBuilder = new GroupBulkExportJobParametersBuilder(); paramBuilder.setGroupId(myPatientGroupId.getIdPart()); @@ -620,17 +684,27 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { public void testGroupBatchJobCareTeam() throws Exception { createResources(); + BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions(); + bulkDataExportOptions.setOutputFormat(null); + bulkDataExportOptions.setResourceTypes(Sets.newHashSet("CareTeam")); + bulkDataExportOptions.setSince(null); + bulkDataExportOptions.setFilters(null); + bulkDataExportOptions.setGroupId(myPatientGroupId); + bulkDataExportOptions.setExpandMdm(true); + bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP); // Create a bulk job - IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new GroupBulkDataExportOptions(null, Sets.newHashSet("CareTeam"), null, null, myPatientGroupId, true)); + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions); - GroupBulkExportJobParametersBuilder paramBuilder = new GroupBulkExportJobParametersBuilder(); - paramBuilder.setGroupId(myPatientGroupId.getIdPart()); - paramBuilder.setJobUUID(jobDetails.getJobId()); - paramBuilder.setReadChunkSize(10L); +// GroupBulkExportJobParametersBuilder paramBuilder = new GroupBulkExportJobParametersBuilder(); +// paramBuilder.setGroupId(myPatientGroupId.getIdPart()); +// paramBuilder.setJobUUID(jobDetails.getJobId()); +// paramBuilder.setReadChunkSize(10L); +// +// JobExecution jobExecution = myBatchJobSubmitter.runJob(myGroupBulkJob, paramBuilder.toJobParameters()); + myBulkDataExportSvc.buildExportFiles(); - JobExecution jobExecution = myBatchJobSubmitter.runJob(myGroupBulkJob, paramBuilder.toJobParameters()); - - awaitJobCompletion(jobExecution); + awaitAllBulkJobCompletions(); +// awaitJobCompletion(jobExecution); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); @@ -669,7 +743,15 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { createResources(); // Create a bulk job - IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new GroupBulkDataExportOptions(null, Sets.newHashSet("Patient"), null, null, myPatientGroupId, true)); + BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions(); + bulkDataExportOptions.setOutputFormat(null); + bulkDataExportOptions.setResourceTypes(Sets.newHashSet("Patient")); + bulkDataExportOptions.setSince(null); + bulkDataExportOptions.setFilters(null); + bulkDataExportOptions.setGroupId(myPatientGroupId); + bulkDataExportOptions.setExpandMdm(true); + bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP); + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions); myBulkDataExportSvc.buildExportFiles(); awaitAllBulkJobCompletions(); @@ -694,13 +776,21 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { createResources(); // Create a bulk job - IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new GroupBulkDataExportOptions(null, Sets.newHashSet("Immunization", "Observation"), null, null, myPatientGroupId, true)); + BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions(); + bulkDataExportOptions.setOutputFormat(null); + bulkDataExportOptions.setResourceTypes(Sets.newHashSet("Immunization", "Observation")); + bulkDataExportOptions.setSince(null); + bulkDataExportOptions.setFilters(null); + bulkDataExportOptions.setGroupId(myPatientGroupId); + bulkDataExportOptions.setExpandMdm(true); + bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP); + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions); myBulkDataExportSvc.buildExportFiles(); awaitAllBulkJobCompletions(); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); - assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation,Immunization&_groupId=" + myPatientGroupId +"&_mdm=true", jobInfo.getRequest()); + assertEquals("/Group/G0/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation,Immunization&_groupId=" + myPatientGroupId +"&_mdm=true", jobInfo.getRequest()); assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); assertThat(jobInfo.getFiles().size(), equalTo(2)); @@ -750,23 +840,24 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { @Test public void testGroupBulkExportSupportsTypeFilters() throws JobParametersInvalidException { createResources(); - Set filters = new HashSet<>(); //Only get COVID-19 vaccinations + Set filters = new HashSet<>(); filters.add("Immunization?vaccine-code=vaccines|COVID-19"); - GroupBulkDataExportOptions groupBulkDataExportOptions = new GroupBulkDataExportOptions(null, Sets.newHashSet("Immunization"), null, filters, myPatientGroupId, true ); - IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(groupBulkDataExportOptions); + BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions(); + bulkDataExportOptions.setOutputFormat(null); + bulkDataExportOptions.setResourceTypes(Sets.newHashSet("Immunization")); + bulkDataExportOptions.setSince(null); + bulkDataExportOptions.setFilters(filters); + bulkDataExportOptions.setGroupId(myPatientGroupId); + bulkDataExportOptions.setExpandMdm(true); + bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP); + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions); - GroupBulkExportJobParametersBuilder paramBuilder = new GroupBulkExportJobParametersBuilder(); - paramBuilder.setGroupId(myPatientGroupId.getIdPart()); - paramBuilder.setMdm(true); - paramBuilder.setJobUUID(jobDetails.getJobId()); - paramBuilder.setReadChunkSize(10L); + myBulkDataExportSvc.buildExportFiles(); + awaitAllBulkJobCompletions(); - JobExecution jobExecution = myBatchJobSubmitter.runJob(myGroupBulkJob, paramBuilder.toJobParameters()); - - awaitJobCompletion(jobExecution); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java index 6eaa3bf9027..3e9bbdd8c77 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java @@ -162,6 +162,7 @@ import org.hl7.fhir.r4.model.ValueSet; import org.hl7.fhir.r5.utils.IResourceValidator; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; @@ -542,12 +543,11 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil runInTransaction(() -> { SearchSession searchSession = Search.session(myEntityManager); searchSession.workspace(ResourceTable.class).purge(); -// searchSession.workspace(ResourceIndexedSearchParamString.class).purge(); searchSession.indexingPlan().execute(); }); myDaoConfig.setSchedulingDisabled(true); - myDaoConfig.setIndexMissingFields(new DaoConfig().getIndexMissingFields()); + myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.ENABLED); } @AfterEach diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java index 4b5d9ebb57f..a61325fb42e 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java @@ -195,9 +195,9 @@ public class JpaConstants { public static final String PARAM_EXPORT_MDM = "_mdm"; /** - * Whether the Export is for System level or Patient Level. + * the Type of the export */ - public static final String PARAM_EXPORT_SYSTEM_LEVEL = "_system"; + public static final String PARAM_EXPORT_STYLE = "_exportStyle"; /** * Parameter for delete to indicate the deleted resources should also be expunged From 1a1a5ee6613576ea19aada3ad3321eb41a302ee7 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Tue, 9 Mar 2021 12:42:39 -0500 Subject: [PATCH 08/22] WIP supporting multiple type filters --- .../fhir/jpa/bulk/job/BaseBulkItemReader.java | 20 ++- .../uhn/fhir/jpa/bulk/job/BulkItemReader.java | 16 ++- .../jpa/bulk/job/GroupBulkItemReader.java | 29 ++-- .../jpa/bulk/job/PatientBulkItemReader.java | 33 ++--- .../jpa/bulk/svc/BulkDataExportSvcImpl.java | 4 - .../jpa/bulk/BulkDataExportProviderTest.java | 12 +- .../jpa/bulk/BulkDataExportSvcImplR4Test.java | 124 +++++++++++++----- 7 files changed, 152 insertions(+), 86 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java index c0b41d4090f..278095ee393 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java @@ -24,6 +24,7 @@ import org.springframework.beans.factory.annotation.Value; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -92,15 +93,24 @@ public abstract class BaseBulkItemReader implements ItemReader buildSearchParameterMapForTypeFilter(filter, theDef)) .collect(Collectors.toList()); return maps; + } else { + SearchParameterMap map = new SearchParameterMap(); + enhanceSearchParameterMapWithCommonParameters(map); + return Collections.singletonList(map); } } + + private void enhanceSearchParameterMapWithCommonParameters(SearchParameterMap map) { + map.setLoadSynchronous(true); + if (getJobEntity().getSince() != null) { + map.setLastUpdated(new DateRangeParam(getJobEntity().getSince(), null)); + } + } + public SearchParameterMap buildSearchParameterMapForTypeFilter(String theFilter, RuntimeResourceDefinition theDef) { SearchParameterMap searchParameterMap = myMatchUrlService.translateMatchUrl(theFilter, theDef); - if (getJobEntity().getSince() != null) { - searchParameterMap.setLastUpdated(new DateRangeParam(getJobEntity().getSince(), null)); - } - searchParameterMap.setLoadSynchronous(true); - + enhanceSearchParameterMapWithCommonParameters(searchParameterMap); + return searchParameterMap; } protected RuntimeResourceDefinition getResourceDefinition() { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkItemReader.java index 7075ad7105b..0c9bf4fae2b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BulkItemReader.java @@ -30,8 +30,10 @@ import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import org.slf4j.Logger; import java.util.ArrayList; +import java.util.HashSet; import java.util.Iterator; import java.util.List; +import java.util.Set; /** * Basic Bulk Export implementation which simply reads all type filters and applies them, along with the _since param @@ -40,19 +42,21 @@ import java.util.List; public class BulkItemReader extends BaseBulkItemReader { private static final Logger ourLog = Logs.getBatchTroubleshootingLog(); - @Override Iterator getResourcePidIterator() { ourLog.info("Bulk export assembling export of type {} for job {}", myResourceType, myJobUUID); + Set myReadPids = new HashSet<>(); - SearchParameterMap map = createSearchParameterMapsForResourceType(); + List map = createSearchParameterMapsForResourceType(); ISearchBuilder sb = getSearchBuilderForLocalResourceType(); - IResultIterator myResultIterator = sb.createQuery(map, new SearchRuntimeDetails(null, myJobUUID), null, RequestPartitionId.allPartitions()); - List myReadPids = new ArrayList<>(); - while (myResultIterator.hasNext()) { - myReadPids.add(myResultIterator.next()); + for (SearchParameterMap spMap: map) { + ourLog.debug("About to evaluate query {}", spMap.toNormalizedQueryString(myContext)); + IResultIterator myResultIterator = sb.createQuery(spMap, new SearchRuntimeDetails(null, myJobUUID), null, RequestPartitionId.allPartitions()); + while (myResultIterator.hasNext()) { + myReadPids.add(myResultIterator.next()); + } } return myReadPids.iterator(); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkItemReader.java index ffa06a29723..1b3e22ce988 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkItemReader.java @@ -27,6 +27,7 @@ import ca.uhn.fhir.jpa.dao.IResultIterator; import ca.uhn.fhir.jpa.dao.ISearchBuilder; import ca.uhn.fhir.jpa.dao.data.IMdmLinkDao; import ca.uhn.fhir.jpa.dao.index.IdHelperService; +import ca.uhn.fhir.jpa.entity.Search; import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.util.QueryChunker; @@ -79,7 +80,7 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade @Override Iterator getResourcePidIterator() { - List myReadPids = new ArrayList<>(); + Set myReadPids = new HashSet<>(); //Short circuit out if we detect we are attempting to extract patients if (myResourceType.equalsIgnoreCase("Patient")) { @@ -171,24 +172,26 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade return expandedIds; } - private void queryResourceTypeWithReferencesToPatients(List myReadPids, List idChunk) { + private void queryResourceTypeWithReferencesToPatients(Set myReadPids, List idChunk) { //Build SP map //First, inject the _typeFilters and _since from the export job - SearchParameterMap expandedSpMap = createSearchParameterMapsForResourceType(); + List expandedSpMaps = createSearchParameterMapsForResourceType(); + for (SearchParameterMap expandedSpMap: expandedSpMaps) { - //Since we are in a bulk job, we have to ensure the user didn't jam in a patient search param, since we need to manually set that. - validateSearchParameters(expandedSpMap); + //Since we are in a bulk job, we have to ensure the user didn't jam in a patient search param, since we need to manually set that. + validateSearchParameters(expandedSpMap); - // Now, further filter the query with patient references defined by the chunk of IDs we have. - filterSearchByResourceIds(idChunk, expandedSpMap); + // Now, further filter the query with patient references defined by the chunk of IDs we have. + filterSearchByResourceIds(idChunk, expandedSpMap); - // Fetch and cache a search builder for this resource type - ISearchBuilder searchBuilder = getSearchBuilderForLocalResourceType(); + // Fetch and cache a search builder for this resource type + ISearchBuilder searchBuilder = getSearchBuilderForLocalResourceType(); - //Execute query and all found pids to our local iterator. - IResultIterator resultIterator = searchBuilder.createQuery(expandedSpMap, new SearchRuntimeDetails(null, myJobUUID), null, RequestPartitionId.allPartitions()); - while (resultIterator.hasNext()) { - myReadPids.add(resultIterator.next()); + //Execute query and all found pids to our local iterator. + IResultIterator resultIterator = searchBuilder.createQuery(expandedSpMap, new SearchRuntimeDetails(null, myJobUUID), null, RequestPartitionId.allPartitions()); + while (resultIterator.hasNext()) { + myReadPids.add(resultIterator.next()); + } } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java index 699b426d28d..c5d2ff5c5de 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java @@ -71,27 +71,28 @@ public class PatientBulkItemReader extends BaseBulkItemReader implements ItemRea List myReadPids = new ArrayList<>(); //use _typeFilter and _since and all those fancy bits and bobs to generate our basic SP map. - SearchParameterMap map = createSearchParameterMapsForResourceType(); + List maps = createSearchParameterMapsForResourceType(); String patientSearchParam = getPatientSearchParamForCurrentResourceType().getName(); - //Ensure users did not monkey with the patient compartment search parameter. - validateSearchParameters(map); + for (SearchParameterMap map: maps) { + //Ensure users did not monkey with the patient compartment search parameter. + validateSearchParameters(map); + //Skip adding the parameter querying for patient= if we are in fact querying the patient resource type. + if (!myResourceType.equalsIgnoreCase("Patient")) { + //Now that we have our basic built Bulk Export SP map, we inject the condition that the resources returned + //must have a patient= or subject= reference set. + map.add(patientSearchParam, new ReferenceParam().setMissing(false)); + } - //Skip adding the parameter querying for patient= if we are in fact querying the patient resource type. - if (!myResourceType.equalsIgnoreCase("Patient")) { - //Now that we have our basic built Bulk Export SP map, we inject the condition that the resources returned - //must have a patient= or subject= reference set. - map.add(patientSearchParam, new ReferenceParam().setMissing(false)); + ourLog.debug("About to execute query {}", map.toNormalizedQueryString(myContext)); + ISearchBuilder sb = getSearchBuilderForLocalResourceType(); + IResultIterator myResultIterator = sb.createQuery(map, new SearchRuntimeDetails(null, myJobUUID), null, RequestPartitionId.allPartitions()); + + while (myResultIterator.hasNext()) { + myReadPids.add(myResultIterator.next()); + } } - - ISearchBuilder sb = getSearchBuilderForLocalResourceType(); - IResultIterator myResultIterator = sb.createQuery(map, new SearchRuntimeDetails(null, myJobUUID), null, RequestPartitionId.allPartitions()); - - while (myResultIterator.hasNext()) { - myReadPids.add(myResultIterator.next()); - } - return myReadPids.iterator(); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java index 8bcb29a56f2..610c123527c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java @@ -389,7 +389,6 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { public void validateTypeFilters(Set theTheFilters, Set theResourceTypes) { if (theTheFilters != null) { - Set types = new HashSet<>(); for (String next : theTheFilters) { if (!next.contains("?")) { throw new InvalidRequestException("Invalid " + JpaConstants.PARAM_EXPORT_TYPE_FILTER + " value \"" + next + "\". Must be in the form [ResourceType]?[params]"); @@ -398,9 +397,6 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { if (!theResourceTypes.contains(resourceType)) { throw new InvalidRequestException("Invalid " + JpaConstants.PARAM_EXPORT_TYPE_FILTER + " value \"" + next + "\". Resource type does not appear in " + JpaConstants.PARAM_EXPORT_TYPE+ " list"); } - if (!types.add(resourceType)) { - throw new InvalidRequestException("Invalid " + JpaConstants.PARAM_EXPORT_TYPE_FILTER + " value \"" + next + "\". Multiple filters found for type " + resourceType); - } } } } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java index 9914c83d0b0..377bec8ba97 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java @@ -337,7 +337,6 @@ public class BulkDataExportProviderTest { @Test public void testInitiateWithGetAndMultipleTypeFilters() throws IOException { - //TODO GGG FIX ME IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() .setJobId(A_JOB_ID); when(myBulkDataExportSvc.submitJob(any())).thenReturn(jobInfo); @@ -385,9 +384,8 @@ public class BulkDataExportProviderTest { Parameters input = new Parameters(); input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON)); - input.addParameter(JpaConstants.PARAM_EXPORT_STYLE, new StringType("Patient, Practitioner")); - input.addParameter(JpaConstants.PARAM_EXPORT_SINCE, now); - input.addParameter(JpaConstants.PARAM_EXPORT_TYPE_FILTER, new StringType("Patient?identifier=foo")); + input.addParameter(JpaConstants.PARAM_EXPORT_STYLE, new StringType("Patient")); + input.addParameter(JpaConstants.PARAM_EXPORT_TYPE_FILTER, new StringType("Patient?gender=male,Patient?gender=female")); ourLog.info(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input)); @@ -406,9 +404,8 @@ public class BulkDataExportProviderTest { verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture()); BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue(); assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat()); - assertThat(options.getResourceTypes(), containsInAnyOrder("Patient", "Practitioner")); - assertThat(options.getSince(), notNullValue()); - assertThat(options.getFilters(), containsInAnyOrder("Patient?identifier=foo")); + assertThat(options.getResourceTypes(), containsInAnyOrder("Patient")); + assertThat(options.getFilters(), containsInAnyOrder("Patient?gender=male", "Patient?gender=female")); } @Test @@ -447,5 +444,4 @@ public class BulkDataExportProviderTest { assertThat(options.getSince(), notNullValue()); assertThat(options.getFilters(), containsInAnyOrder("Immunization?vaccine-code=foo")); } - } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java index eabb341f602..be18ce383a8 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java @@ -39,6 +39,7 @@ import org.hl7.fhir.r4.model.InstantType; import org.hl7.fhir.r4.model.Observation; import org.hl7.fhir.r4.model.Patient; import org.hl7.fhir.r4.model.Reference; +import org.jetbrains.annotations.NotNull; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -613,10 +614,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); // Iterate over the files - Binary nextBinary = myBinaryDao.read(jobInfo.getFiles().get(0).getResourceId()); - assertEquals(Constants.CT_FHIR_NDJSON, nextBinary.getContentType()); - String nextContents = new String(nextBinary.getContent(), Constants.CHARSET_UTF8); - ourLog.info("Next contents for type {}:\n{}", nextBinary.getResourceType(), nextContents); + String nextContents = getBinaryContents(jobInfo, 0); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); assertThat(nextContents, is(containsString("IMM0"))); @@ -652,10 +650,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); // Iterate over the files - Binary nextBinary = myBinaryDao.read(jobInfo.getFiles().get(0).getResourceId()); - assertEquals(Constants.CT_FHIR_NDJSON, nextBinary.getContentType()); - String nextContents = new String(nextBinary.getContent(), Constants.CHARSET_UTF8); - ourLog.info("Next contents for type {}:\n{}", nextBinary.getResourceType(), nextContents); + String nextContents = getBinaryContents(jobInfo, 0); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); assertThat(nextContents, is(containsString("IMM0"))); @@ -695,16 +690,9 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { // Create a bulk job IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions); -// GroupBulkExportJobParametersBuilder paramBuilder = new GroupBulkExportJobParametersBuilder(); -// paramBuilder.setGroupId(myPatientGroupId.getIdPart()); -// paramBuilder.setJobUUID(jobDetails.getJobId()); -// paramBuilder.setReadChunkSize(10L); -// -// JobExecution jobExecution = myBatchJobSubmitter.runJob(myGroupBulkJob, paramBuilder.toJobParameters()); myBulkDataExportSvc.buildExportFiles(); - awaitAllBulkJobCompletions(); -// awaitJobCompletion(jobExecution); + IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); @@ -712,10 +700,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("CareTeam"))); // Iterate over the files - Binary nextBinary = myBinaryDao.read(jobInfo.getFiles().get(0).getResourceId()); - assertEquals(Constants.CT_FHIR_NDJSON, nextBinary.getContentType()); - String nextContents = new String(nextBinary.getContent(), Constants.CHARSET_UTF8); - ourLog.info("Next contents for type {}:\n{}", nextBinary.getResourceType(), nextContents); + String nextContents = getBinaryContents(jobInfo, 0); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("CareTeam"))); assertThat(nextContents, is(containsString("CT0"))); @@ -731,12 +716,93 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { JobParametersBuilder paramBuilder = new JobParametersBuilder().addString("jobUUID", "I'm not real!"); try { myBatchJobSubmitter.runJob(myBulkJob, paramBuilder.toJobParameters()); - fail("Should have had invalid parameter execption!"); + fail("Should have had invalid parameter exception!"); } catch (JobParametersInvalidException e) { // good } } + @Test + public void testSystemExportWithMultipleTypeFilters() { + createResources(); + + // Create a bulk job + BulkDataExportOptions options = new BulkDataExportOptions(); + options.setResourceTypes(Sets.newHashSet("Immunization")); + options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); + options.setFilters(Sets.newHashSet("Immunization?vaccine-code=Flu", "Immunization?patient=Patient/PAT1")); + + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(options); + myBulkDataExportSvc.buildExportFiles(); + awaitAllBulkJobCompletions(); + + + IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); + + assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getFiles().size(), equalTo(1)); + assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); + + // Iterate over the files + String nextContents = getBinaryContents(jobInfo, 0); + + assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); + //These are the COVID-19 entries + assertThat(nextContents, is(containsString("IMM0"))); + assertThat(nextContents, is(containsString("IMM2"))); + assertThat(nextContents, is(containsString("IMM4"))); + assertThat(nextContents, is(containsString("IMM6"))); + assertThat(nextContents, is(containsString("IMM8"))); + + //This is the entry for the one referencing patient/1 + assertThat(nextContents, is(containsString("IMM1"))); + } + + @Test + public void testGroupExportWithMultipleTypeFilters() { + createResources(); + + // Create a bulk job + BulkDataExportOptions options = new BulkDataExportOptions(); + options.setResourceTypes(Sets.newHashSet("Observation")); + options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP); + options.setGroupId(myPatientGroupId); + options.setExpandMdm(false); + options.setFilters(Sets.newHashSet("Observation?identifier=VAL0,VAL2", "Observation?identifer=VAL4")); + + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(options); + myBulkDataExportSvc.buildExportFiles(); + awaitAllBulkJobCompletions(); + + IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); + + assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); + assertThat(jobInfo.getFiles().size(), equalTo(1)); + assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Observation"))); + String nextContents = getBinaryContents(jobInfo, 0); + + assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); + //These are the COVID-19 entries + assertThat(nextContents, is(containsString("IMM0"))); + assertThat(nextContents, is(containsString("IMM2"))); + assertThat(nextContents, is(containsString("IMM4"))); + assertThat(nextContents, is(containsString("IMM6"))); + assertThat(nextContents, is(containsString("IMM8"))); + + //This is the entry for the one referencing patient/1 + assertThat(nextContents, is(containsString("IMM1"))); + } + + @NotNull + public String getBinaryContents(IBulkDataExportSvc.JobInfo theJobInfo, int theIndex) { + // Iterate over the files + Binary nextBinary = myBinaryDao.read(theJobInfo.getFiles().get(theIndex).getResourceId()); + assertEquals(Constants.CT_FHIR_NDJSON, nextBinary.getContentType()); + String nextContents = new String(nextBinary.getContent(), Constants.CHARSET_UTF8); + ourLog.info("Next contents for type {}:\n{}", nextBinary.getResourceType(), nextContents); + return nextContents; + } + @Test public void testMdmExpansionSuccessfullyExtractsPatients() throws JobParametersInvalidException { @@ -761,10 +827,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { assertThat(jobInfo.getFiles().size(), equalTo(1)); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Patient"))); - Binary patientExportContent = myBinaryDao.read(jobInfo.getFiles().get(0).getResourceId()); - assertEquals(Constants.CT_FHIR_NDJSON, patientExportContent.getContentType()); - String nextContents = new String(patientExportContent.getContent(), Constants.CHARSET_UTF8); - ourLog.info("Next contents for type {}:\n{}", patientExportContent.getResourceType(), nextContents); + String nextContents = getBinaryContents(jobInfo, 0); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Patient"))); //Output contains The entire group, plus the Mdm expansion, plus the golden resource @@ -797,10 +860,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); // Check immunization Content - Binary immunizationExportContent = myBinaryDao.read(jobInfo.getFiles().get(0).getResourceId()); - assertEquals(Constants.CT_FHIR_NDJSON, immunizationExportContent.getContentType()); - String nextContents = new String(immunizationExportContent.getContent(), Constants.CHARSET_UTF8); - ourLog.info("Next contents for type {}:\n{}", immunizationExportContent.getResourceType(), nextContents); + String nextContents = getBinaryContents(jobInfo, 0); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); assertThat(nextContents, is(containsString("IMM0"))); assertThat(nextContents, is(containsString("IMM2"))); @@ -865,12 +925,8 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); // Check immunization Content - Binary immunizationExportContent = myBinaryDao.read(jobInfo.getFiles().get(0).getResourceId()); - assertEquals(Constants.CT_FHIR_NDJSON, immunizationExportContent.getContentType()); - String nextContents = new String(immunizationExportContent.getContent(), Constants.CHARSET_UTF8); - ourLog.info("Next contents for type {}:\n{}", immunizationExportContent.getResourceType(), nextContents); + String nextContents = getBinaryContents(jobInfo, 0); - assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); assertThat(nextContents, is(containsString("IMM1"))); assertThat(nextContents, is(containsString("IMM3"))); assertThat(nextContents, is(containsString("IMM5"))); From 30a48894cc22a076cb414a62305d0a1c09a413fc Mon Sep 17 00:00:00 2001 From: Tadgh Date: Tue, 9 Mar 2021 22:54:51 -0500 Subject: [PATCH 09/22] Add another test --- .../jpa/bulk/BulkDataExportSvcImplR4Test.java | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java index be18ce383a8..26d31c3e11f 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java @@ -39,7 +39,6 @@ import org.hl7.fhir.r4.model.InstantType; import org.hl7.fhir.r4.model.Observation; import org.hl7.fhir.r4.model.Patient; import org.hl7.fhir.r4.model.Reference; -import org.jetbrains.annotations.NotNull; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -793,7 +792,6 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { assertThat(nextContents, is(containsString("IMM1"))); } - @NotNull public String getBinaryContents(IBulkDataExportSvc.JobInfo theJobInfo, int theIndex) { // Iterate over the files Binary nextBinary = myBinaryDao.read(theJobInfo.getFiles().get(theIndex).getResourceId()); @@ -937,6 +935,20 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { assertThat(nextContents, is(not(containsString("Flu")))); } + @Test + public void testPatientExportWithNoTypesWorks() { + // Create a bulk job + BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions(); + bulkDataExportOptions.setOutputFormat(null); + bulkDataExportOptions.setResourceTypes(Sets.newHashSet(myFhirCtx.getResourceTypes()); + bulkDataExportOptions.setSince(null); + bulkDataExportOptions.setFilters(null); + bulkDataExportOptions.setGroupId(myPatientGroupId); + bulkDataExportOptions.setExpandMdm(true); + bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT); + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions); + } + private void awaitJobCompletion(JobExecution theJobExecution) { await().atMost(120, TimeUnit.SECONDS).until(() -> { JobExecution jobExecution = myJobExplorer.getJobExecution(theJobExecution.getId()); @@ -960,8 +972,6 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { createImmunizationWithIndex(999, g1Outcome.getId()); createCareTeamWithIndex(999, g1Outcome.getId()); - //Lets create an observation and an immunization for our golden patient. - for (int i = 0; i < 10; i++) { DaoMethodOutcome patientOutcome = createPatientWithIndex(i); IIdType patId = patientOutcome.getId().toUnqualifiedVersionless(); From bfb9d0bc36bece6166617e08a8232ba511f3b739 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Wed, 10 Mar 2021 10:30:54 -0500 Subject: [PATCH 10/22] Filter default resource types based on export style --- .../fhir/jpa/bulk/api/IBulkDataExportSvc.java | 6 +++ .../fhir/jpa/bulk/job/BaseBulkItemReader.java | 2 +- .../jpa/bulk/job/PatientBulkItemReader.java | 3 +- .../bulk/provider/BulkDataExportProvider.java | 1 + .../jpa/bulk/svc/BulkDataExportSvcImpl.java | 51 +++++++++++++++++-- .../jpa/bulk/BulkDataExportSvcImplR4Test.java | 29 ++++++++++- 6 files changed, 82 insertions(+), 10 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/IBulkDataExportSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/IBulkDataExportSvc.java index 3784fc8a18d..16aa22858d7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/IBulkDataExportSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/IBulkDataExportSvc.java @@ -27,6 +27,7 @@ import javax.transaction.Transactional; import java.util.ArrayList; import java.util.Date; import java.util.List; +import java.util.Set; public interface IBulkDataExportSvc { void buildExportFiles(); @@ -38,6 +39,11 @@ public interface IBulkDataExportSvc { JobInfo getJobInfoOrThrowResourceNotFound(String theJobId); + /** + * Return a set of all resource types which contain search parameters which have Patient as a target. + */ + Set getPatientCompartmentResources(); + void cancelAndPurgeAllJobs(); class JobInfo { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java index 278095ee393..3f2cd98445a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java @@ -186,7 +186,7 @@ public abstract class BaseBulkItemReader implements ItemReader searchParams = runtimeResourceDefinition.getSearchParamsForCompartmentName("Patient"); if (searchParams == null || searchParams.size() == 0) { - String errorMessage = String.format("Resource type [%s] is not eligible for Group Bulk export, as it contains no Patient compartment, and no `patient` or `subject` search parameter", myResourceType); + String errorMessage = String.format("Resource type [%s] is not eligible for this type of export, as it contains no Patient compartment, and no `patient` or `subject` search parameter", myResourceType); throw new IllegalArgumentException(errorMessage); } else if (searchParams.size() == 1) { patientSearchParam = searchParams.get(0); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java index c5d2ff5c5de..219b4f17368 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java @@ -55,7 +55,7 @@ public class PatientBulkItemReader extends BaseBulkItemReader implements ItemRea private RuntimeSearchParam validateSearchParameters(SearchParameterMap expandedSpMap) { RuntimeSearchParam runtimeSearchParam = getPatientSearchParamForCurrentResourceType(); if (expandedSpMap.get(runtimeSearchParam.getName()) != null) { - throw new IllegalArgumentException(String.format("Group Bulk Export manually modifies the Search Parameter called [%s], so you may not include this search parameter in your _typeFilter!", runtimeSearchParam.getName())); + throw new IllegalArgumentException(String.format("Patient Bulk Export manually modifies the Search Parameter called [%s], so you may not include this search parameter in your _typeFilter!", runtimeSearchParam.getName())); } return runtimeSearchParam; } @@ -74,7 +74,6 @@ public class PatientBulkItemReader extends BaseBulkItemReader implements ItemRea List maps = createSearchParameterMapsForResourceType(); String patientSearchParam = getPatientSearchParamForCurrentResourceType().getName(); - for (SearchParameterMap map: maps) { //Ensure users did not monkey with the patient compartment search parameter. validateSearchParameters(map); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java index 5ff7be95bb8..4fde8b3c2f6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java @@ -126,6 +126,7 @@ public class BulkDataExportProvider { @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = 1, typeName = "string") IPrimitiveType theTypeFilter, ServletRequestDetails theRequestDetails ) { + myBulkDataExportSvc.getPatientCompartmentResources(); validatePreferAsyncHeader(theRequestDetails); BulkDataExportOptions bulkDataExportOptions = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter); IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(bulkDataExportOptions); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java index 610c123527c..6da24977af3 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java @@ -21,6 +21,9 @@ package ca.uhn.fhir.jpa.bulk.svc; */ import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.RuntimeResourceDefinition; +import ca.uhn.fhir.context.RuntimeSearchParam; +import ca.uhn.fhir.fhirpath.IFhirPath; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.model.ExpungeOptions; @@ -40,12 +43,16 @@ import ca.uhn.fhir.jpa.model.sched.HapiJob; import ca.uhn.fhir.jpa.model.sched.ISchedulerService; import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; import ca.uhn.fhir.jpa.model.util.JpaConstants; +import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.util.UrlUtil; +import com.google.common.collect.Sets; import org.apache.commons.lang3.time.DateUtils; +import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseBinary; +import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.InstantType; import org.quartz.JobExecutionContext; @@ -64,13 +71,16 @@ import org.springframework.transaction.support.TransactionTemplate; import javax.annotation.PostConstruct; import javax.transaction.Transactional; import java.util.Date; -import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; +import static ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions.ExportStyle.GROUP; +import static ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions.ExportStyle.PATIENT; +import static ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions.ExportStyle.SYSTEM; import static ca.uhn.fhir.util.UrlUtil.escapeUrlParam; import static ca.uhn.fhir.util.UrlUtil.escapeUrlParams; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -112,6 +122,8 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { @Qualifier(BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME) private org.springframework.batch.core.Job myPatientBulkExportJob; + private Set myCompartmentResources; + private final int myRetentionPeriod = (int) (2 * DateUtils.MILLIS_PER_HOUR); /** @@ -291,9 +303,9 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { requestBuilder.append("/"); //Prefix the export url with Group/[id]/ or /Patient/ depending on what type of request it is. - if (theBulkDataExportOptions.getExportStyle().equals(BulkDataExportOptions.ExportStyle.GROUP)) { + if (theBulkDataExportOptions.getExportStyle().equals(GROUP)) { requestBuilder.append(theBulkDataExportOptions.getGroupId().toVersionless()).append("/"); - } else if (theBulkDataExportOptions.getExportStyle().equals(BulkDataExportOptions.ExportStyle.PATIENT)) { + } else if (theBulkDataExportOptions.getExportStyle().equals(PATIENT)) { requestBuilder.append("Patient/"); } @@ -313,7 +325,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { .forEach(filter -> requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_TYPE_FILTER).append("=").append(escapeUrlParam(filter))); } - if (theBulkDataExportOptions.getExportStyle().equals(BulkDataExportOptions.ExportStyle.GROUP)) { + if (theBulkDataExportOptions.getExportStyle().equals(GROUP)) { requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_GROUP_ID).append("=").append(theBulkDataExportOptions.getGroupId().getValue()); requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_MDM).append("=").append(theBulkDataExportOptions.isExpandMdm()); } @@ -338,7 +350,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { // This is probably not a useful default, but having the default be "download the whole // server" seems like a risky default too. We'll deal with that by having the default involve // only returning a small time span - resourceTypes = myContext.getResourceTypes(); + resourceTypes = getAllowedResourceTypesForBulkExportStyle(theBulkDataExportOptions.getExportStyle()); if (since == null) { since = DateUtils.addDays(new Date(), -1); } @@ -437,6 +449,35 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { return retVal; } + @Override + public Set getPatientCompartmentResources() { + if (myCompartmentResources == null) { + myCompartmentResources = myContext.getResourceTypes().stream() + .filter(this::resourceTypeIsInPatientCompartment) + .collect(Collectors.toSet()); + } + return myCompartmentResources; + } + + /** + * Return true if any search parameter in the resource can point at a patient, false otherwise + */ + private boolean resourceTypeIsInPatientCompartment(String theResourceType) { + RuntimeResourceDefinition runtimeResourceDefinition = myContext.getResourceDefinition(theResourceType); + List searchParams = runtimeResourceDefinition.getSearchParamsForCompartmentName("Patient"); + return searchParams != null && searchParams.size() >= 1; + } + + public Set getAllowedResourceTypesForBulkExportStyle(BulkDataExportOptions.ExportStyle theExportStyle) { + if (theExportStyle.equals(SYSTEM)) { + return myContext.getResourceTypes(); + } else if (theExportStyle.equals(GROUP) || theExportStyle.equals(PATIENT)) { + return getPatientCompartmentResources(); + } else { + return null; + } + } + private IIdType toId(String theResourceId) { IIdType retVal = myContext.getVersion().newIdType(); retVal.setValue(theResourceId); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java index 26d31c3e11f..eefc4e9afd6 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java @@ -936,17 +936,42 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { } @Test - public void testPatientExportWithNoTypesWorks() { + public void testAllExportStylesWorkWithNullResourceTypes() { + createResources(); + myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.ENABLED); // Create a bulk job BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions(); bulkDataExportOptions.setOutputFormat(null); - bulkDataExportOptions.setResourceTypes(Sets.newHashSet(myFhirCtx.getResourceTypes()); + bulkDataExportOptions.setResourceTypes(null); bulkDataExportOptions.setSince(null); bulkDataExportOptions.setFilters(null); bulkDataExportOptions.setGroupId(myPatientGroupId); bulkDataExportOptions.setExpandMdm(true); bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT); + + //Patient-style IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions); + myBulkDataExportSvc.buildExportFiles(); + awaitAllBulkJobCompletions(); + IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); + assertThat(jobInfo.getStatus(), is(equalTo(BulkJobStatusEnum.COMPLETE))); + + //Group-style + bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP); + bulkDataExportOptions.setGroupId(myPatientGroupId); + jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions); + myBulkDataExportSvc.buildExportFiles(); + awaitAllBulkJobCompletions(); + jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); + assertThat(jobInfo.getStatus(), is(equalTo(BulkJobStatusEnum.COMPLETE))); + + //System-style + bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); + jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions); + myBulkDataExportSvc.buildExportFiles(); + awaitAllBulkJobCompletions(); + jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); + assertThat(jobInfo.getStatus(), is(equalTo(BulkJobStatusEnum.COMPLETE))); } private void awaitJobCompletion(JobExecution theJobExecution) { From 3e249d0261090c20f4799500cb518d5fcfbda3f0 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Wed, 10 Mar 2021 12:03:19 -0500 Subject: [PATCH 11/22] Minor cleanup --- .../uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java | 3 +-- .../jpa/bulk/provider/BulkDataExportProvider.java | 2 -- .../uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java | 11 +++++------ 3 files changed, 6 insertions(+), 10 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java index 219b4f17368..c206404ac95 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java @@ -77,10 +77,9 @@ public class PatientBulkItemReader extends BaseBulkItemReader implements ItemRea for (SearchParameterMap map: maps) { //Ensure users did not monkey with the patient compartment search parameter. validateSearchParameters(map); + //Skip adding the parameter querying for patient= if we are in fact querying the patient resource type. if (!myResourceType.equalsIgnoreCase("Patient")) { - //Now that we have our basic built Bulk Export SP map, we inject the condition that the resources returned - //must have a patient= or subject= reference set. map.add(patientSearchParam, new ReferenceParam().setMissing(false)); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java index 4fde8b3c2f6..034190ef837 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java @@ -60,8 +60,6 @@ public class BulkDataExportProvider { private IBulkDataExportSvc myBulkDataExportSvc; @Autowired private FhirContext myFhirContext; - @Autowired - private DaoConfig myDaoConfig; @VisibleForTesting public void setFhirContextForUnitTest(FhirContext theFhirContext) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java index 6da24977af3..cfda28fa1d0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java @@ -254,6 +254,10 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { return theBulkExportJobEntity.getRequest().startsWith("/Patient/"); } + private boolean isGroupBulkJob(BulkExportJobEntity theBulkExportJobEntity) { + return theBulkExportJobEntity.getRequest().startsWith("/Group/"); + } + private void enhanceBulkParametersWithGroupParameters(BulkExportJobEntity theBulkExportJobEntity, JobParametersBuilder theParameters) { String theGroupId = getQueryParameterIfPresent(theBulkExportJobEntity.getRequest(), JpaConstants.PARAM_EXPORT_GROUP_ID); String expandMdm = getQueryParameterIfPresent(theBulkExportJobEntity.getRequest(), JpaConstants.PARAM_EXPORT_MDM); @@ -261,10 +265,6 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { theParameters.addString(BulkExportJobConfig.EXPAND_MDM_PARAMETER, expandMdm); } - private boolean isGroupBulkJob(BulkExportJobEntity theBulkExportJobEntity) { - String queryParameterIfPresent = getQueryParameterIfPresent(theBulkExportJobEntity.getRequest(), JpaConstants.PARAM_EXPORT_GROUP_ID); - return queryParameterIfPresent != null; - } @SuppressWarnings("unchecked") private IFhirResourceDao getBinaryDao() { @@ -309,7 +309,6 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { requestBuilder.append("Patient/"); } - requestBuilder.append(JpaConstants.OPERATION_EXPORT); requestBuilder.append("?").append(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT).append("=").append(escapeUrlParam(outputFormat)); Set resourceTypes = theBulkDataExportOptions.getResourceTypes(); @@ -474,7 +473,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { } else if (theExportStyle.equals(GROUP) || theExportStyle.equals(PATIENT)) { return getPatientCompartmentResources(); } else { - return null; + return throw new IllegalArgumentException(String.format("HAPI FHIR does not recognize a Bulk Export request of type: %s", theExportStyle)); } } From 374a65db7d77056320ef34782e398c4288453bff Mon Sep 17 00:00:00 2001 From: Tadgh Date: Wed, 10 Mar 2021 12:03:52 -0500 Subject: [PATCH 12/22] Fix bug --- .../java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java index cfda28fa1d0..bad1ff33a5c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java @@ -473,7 +473,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { } else if (theExportStyle.equals(GROUP) || theExportStyle.equals(PATIENT)) { return getPatientCompartmentResources(); } else { - return throw new IllegalArgumentException(String.format("HAPI FHIR does not recognize a Bulk Export request of type: %s", theExportStyle)); + throw new IllegalArgumentException(String.format("HAPI FHIR does not recognize a Bulk Export request of type: %s", theExportStyle)); } } From 5323486d282c5ccc50daa576d53a3ba21059c338 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Wed, 10 Mar 2021 12:48:54 -0500 Subject: [PATCH 13/22] Validate tyepes --- .../bulk/provider/BulkDataExportProvider.java | 13 +++++++++++ .../jpa/bulk/BulkDataExportProviderTest.java | 22 ++++++++++++++++++- 2 files changed, 34 insertions(+), 1 deletion(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java index 034190ef837..62260be1029 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java @@ -50,7 +50,9 @@ import java.io.IOException; import java.util.Arrays; import java.util.Date; import java.util.HashSet; +import java.util.List; import java.util.Set; +import java.util.stream.Collectors; public class BulkDataExportProvider { @@ -109,10 +111,21 @@ public class BulkDataExportProvider { ) { validatePreferAsyncHeader(theRequestDetails); BulkDataExportOptions bulkDataExportOptions = buildGroupBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theIdParam, theMdm); + validateResourceTypesAllContainPatientSearchParams(bulkDataExportOptions.getResourceTypes()); IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(bulkDataExportOptions); writePollingLocationToResponseHeaders(theRequestDetails, outcome); } + private void validateResourceTypesAllContainPatientSearchParams(Set theResourceTypes) { + List badResourceTypes = theResourceTypes.stream() + .filter(resourceType -> !myBulkDataExportSvc.getPatientCompartmentResources().contains(resourceType)) + .collect(Collectors.toList()); + + if (!badResourceTypes.isEmpty()) { + throw new IllegalArgumentException(String.format("Resource types [] are invalid for this type of export, as they do not contain search parameters that refer to patients.", String.join(",", badResourceTypes))); + } + } + /** * Patient/$export */ diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java index 377bec8ba97..40bed8c795f 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java @@ -373,6 +373,26 @@ public class BulkDataExportProviderTest { assertThat(options.getFilters(), containsInAnyOrder(immunizationTypeFilter1, immunizationTypeFilter2, observationFilter1)); } + + @Test + public void testInitiateGroupExportWithInvalidResourceTypesFails() throws IOException { + IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() + .setJobId(A_JOB_ID); + when(myBulkDataExportSvc.submitJob(any())).thenReturn(jobInfo); + + String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT + + "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON) + + "&" + JpaConstants.PARAM_EXPORT_TYPE + "=" + UrlUtil.escapeUrlParam("StructureDefinition"); + + HttpGet get = new HttpGet(url); + get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); + myClient.execute(get); + + verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture()); + BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue(); + + } + @Test public void testInitiateWithPostAndMultipleTypeFilters() throws IOException { @@ -419,7 +439,7 @@ public class BulkDataExportProviderTest { Parameters input = new Parameters(); input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON)); - input.addParameter(JpaConstants.PARAM_EXPORT_STYLE, new StringType("Immunization, Observation")); + input.addParameter(JpaConstants.PARAM_EXPORT_TYPE, new StringType("Immunization, Observation")); input.addParameter(JpaConstants.PARAM_EXPORT_SINCE, now); input.addParameter(JpaConstants.PARAM_EXPORT_TYPE_FILTER, new StringType("Immunization?vaccine-code=foo")); From 420c4f83785df1163f20a5d3ac537fcdf0fdb8f1 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Wed, 10 Mar 2021 13:34:04 -0500 Subject: [PATCH 14/22] Rework pid translator to accept non-forced-ids --- .../uhn/fhir/jpa/bulk/job/GroupBulkItemReader.java | 14 +++++++++----- .../jpa/bulk/provider/BulkDataExportProvider.java | 13 +++++++++++-- .../fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java | 1 + 3 files changed, 21 insertions(+), 7 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkItemReader.java index 1b3e22ce988..dd03381fa83 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/GroupBulkItemReader.java @@ -76,8 +76,6 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade @Autowired private IMdmLinkDao myMdmLinkDao; - private RuntimeSearchParam myPatientSearchParam; - @Override Iterator getResourcePidIterator() { Set myReadPids = new HashSet<>(); @@ -112,7 +110,6 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade */ private Iterator getExpandedPatientIterator() { Set patientPidsToExport = new HashSet<>(); - //This gets all member pids List members = getMembers(); List ids = members.stream().map(member -> new IdDt("Patient/" + member)).collect(Collectors.toList()); List pidsOrThrowException = myIdHelperService.getPidsOrThrowException(ids); @@ -161,8 +158,15 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade //Now lets translate these pids into resource IDs Set uniquePids = new HashSet<>(); goldenPidTargetPidTuple.forEach(uniquePids::addAll); - Map> longOptionalMap = myIdHelperService.translatePidsToForcedIds(uniquePids); - expandedIds = longOptionalMap.values().stream().map(Optional::get).collect(Collectors.toSet()); + + Map> pidToForcedIdMap = myIdHelperService.translatePidsToForcedIds(uniquePids); + + //If the result of the translation is an empty optional, it means there is no forced id, and we can use the PID as the resource ID. + Set resolvedResourceIds = pidToForcedIdMap.entrySet().stream() + .map(entry -> entry.getValue().isPresent() ? entry.getValue().get() : entry.getKey().toString()) + .collect(Collectors.toSet()); + + expandedIds.addAll(resolvedResourceIds); } //Now manually add the members of the group (its possible even with mdm expansion that some members dont have MDM matches, diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java index 62260be1029..a57c95d2160 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java @@ -43,6 +43,7 @@ import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.hl7.fhir.r4.model.InstantType; +import org.slf4j.Logger; import org.springframework.beans.factory.annotation.Autowired; import javax.servlet.http.HttpServletResponse; @@ -54,9 +55,12 @@ import java.util.List; import java.util.Set; import java.util.stream.Collectors; +import static org.slf4j.LoggerFactory.getLogger; + public class BulkDataExportProvider { public static final String FARM_TO_TABLE_TYPE_FILTER_REGEX = "(?:,)(?=[A-Z][a-z]+\\?)"; + private static final Logger ourLog = getLogger(BulkDataExportProvider.class); @Autowired private IBulkDataExportSvc myBulkDataExportSvc; @@ -90,8 +94,6 @@ public class BulkDataExportProvider { writePollingLocationToResponseHeaders(theRequestDetails, outcome); } - - private String getServerBase(ServletRequestDetails theRequestDetails) { return StringUtils.removeEnd(theRequestDetails.getServerBaseForRequest(), "/"); } @@ -109,6 +111,13 @@ public class BulkDataExportProvider { @OperationParam(name = JpaConstants.PARAM_EXPORT_MDM, min = 0, max = 1, typeName = "boolean") IPrimitiveType theMdm, ServletRequestDetails theRequestDetails ) { + ourLog.debug("Received Group Bulk Export Request for Group {}", theIdParam); + ourLog.debug("_type={}", theIdParam); + ourLog.debug("_since={}", theSince); + ourLog.debug("_typeFilter={}", theTypeFilter); + ourLog.debug("_mdm=", theMdm); + + validatePreferAsyncHeader(theRequestDetails); BulkDataExportOptions bulkDataExportOptions = buildGroupBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theIdParam, theMdm); validateResourceTypesAllContainPatientSearchParams(bulkDataExportOptions.getResourceTypes()); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java index eefc4e9afd6..c6fdf13ac53 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java @@ -1015,6 +1015,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { createImmunizationWithIndex(i, patId); createCareTeamWithIndex(i, patId); } + myPatientGroupId = myGroupDao.update(group).getId(); //Manually create another golden record From 827ee334143d9c717192a5c18349825eafc9276c Mon Sep 17 00:00:00 2001 From: Tadgh Date: Wed, 10 Mar 2021 13:52:15 -0500 Subject: [PATCH 15/22] Add test for provider type validation pre-submission --- .../bulk/provider/BulkDataExportProvider.java | 9 +++--- .../jpa/bulk/BulkDataExportProviderTest.java | 29 +++++++++---------- .../uhn/fhir/jpa/model/util/JpaConstants.java | 5 ---- 3 files changed, 17 insertions(+), 26 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java index a57c95d2160..83bdb9c8a3a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java @@ -21,7 +21,6 @@ package ca.uhn.fhir.jpa.bulk.provider; */ import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.model.BulkExportResponseJson; @@ -83,7 +82,7 @@ public class BulkDataExportProvider { @Operation(name = JpaConstants.OPERATION_EXPORT, global = false /* set to true once we can handle this */, manualResponse = true, idempotent = true) public void export( @OperationParam(name = JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, min = 0, max = 1, typeName = "string") IPrimitiveType theOutputFormat, - @OperationParam(name = JpaConstants.PARAM_EXPORT_STYLE, min = 0, max = 1, typeName = "string") IPrimitiveType theType, + @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE, min = 0, max = 1, typeName = "string") IPrimitiveType theType, @OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType theSince, @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = 1, typeName = "string") IPrimitiveType theTypeFilter, ServletRequestDetails theRequestDetails @@ -105,7 +104,7 @@ public class BulkDataExportProvider { public void groupExport( @IdParam IIdType theIdParam, @OperationParam(name = JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, min = 0, max = 1, typeName = "string") IPrimitiveType theOutputFormat, - @OperationParam(name = JpaConstants.PARAM_EXPORT_STYLE, min = 0, max = 1, typeName = "string") IPrimitiveType theType, + @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE, min = 0, max = 1, typeName = "string") IPrimitiveType theType, @OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType theSince, @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = 1, typeName = "string") IPrimitiveType theTypeFilter, @OperationParam(name = JpaConstants.PARAM_EXPORT_MDM, min = 0, max = 1, typeName = "boolean") IPrimitiveType theMdm, @@ -131,7 +130,7 @@ public class BulkDataExportProvider { .collect(Collectors.toList()); if (!badResourceTypes.isEmpty()) { - throw new IllegalArgumentException(String.format("Resource types [] are invalid for this type of export, as they do not contain search parameters that refer to patients.", String.join(",", badResourceTypes))); + throw new InvalidRequestException(String.format("Resource types [%s] are invalid for this type of export, as they do not contain search parameters that refer to patients.", String.join(",", badResourceTypes))); } } @@ -141,7 +140,7 @@ public class BulkDataExportProvider { @Operation(name = JpaConstants.OPERATION_EXPORT, manualResponse = true, idempotent = true, typeName = "Patient") public void patientExport( @OperationParam(name = JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, min = 0, max = 1, typeName = "string") IPrimitiveType theOutputFormat, - @OperationParam(name = JpaConstants.PARAM_EXPORT_STYLE, min = 0, max = 1, typeName = "string") IPrimitiveType theType, + @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE, min = 0, max = 1, typeName = "string") IPrimitiveType theType, @OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType theSince, @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = 1, typeName = "string") IPrimitiveType theTypeFilter, ServletRequestDetails theRequestDetails diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java index 40bed8c795f..481c35826de 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java @@ -16,6 +16,7 @@ import ca.uhn.fhir.test.utilities.JettyUtil; import ca.uhn.fhir.util.JsonUtil; import ca.uhn.fhir.util.UrlUtil; import com.google.common.base.Charsets; +import com.google.common.collect.Sets; import org.apache.commons.io.IOUtils; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; @@ -118,7 +119,7 @@ public class BulkDataExportProviderTest { Parameters input = new Parameters(); input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON)); - input.addParameter(JpaConstants.PARAM_EXPORT_STYLE, new StringType("Patient, Practitioner")); + input.addParameter(JpaConstants.PARAM_EXPORT_TYPE, new StringType("Patient, Practitioner")); input.addParameter(JpaConstants.PARAM_EXPORT_SINCE, now); input.addParameter(JpaConstants.PARAM_EXPORT_TYPE_FILTER, new StringType("Patient?identifier=foo")); @@ -155,7 +156,7 @@ public class BulkDataExportProviderTest { String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT + "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON) - + "&" + JpaConstants.PARAM_EXPORT_STYLE + "=" + UrlUtil.escapeUrlParam("Patient, Practitioner") + + "&" + JpaConstants.PARAM_EXPORT_TYPE + "=" + UrlUtil.escapeUrlParam("Patient, Practitioner") + "&" + JpaConstants.PARAM_EXPORT_SINCE + "=" + UrlUtil.escapeUrlParam(now.getValueAsString()) + "&" + JpaConstants.PARAM_EXPORT_TYPE_FILTER + "=" + UrlUtil.escapeUrlParam("Patient?identifier=foo"); @@ -307,7 +308,7 @@ public class BulkDataExportProviderTest { Parameters input = new Parameters(); StringType obsTypeFilter = new StringType("Observation?code=OBSCODE,DiagnosticReport?code=DRCODE"); input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON)); - input.addParameter(JpaConstants.PARAM_EXPORT_STYLE, new StringType("Observation, DiagnosticReport")); + input.addParameter(JpaConstants.PARAM_EXPORT_TYPE, new StringType("Observation, DiagnosticReport")); input.addParameter(JpaConstants.PARAM_EXPORT_SINCE, now); input.addParameter(JpaConstants.PARAM_EXPORT_MDM, true); input.addParameter(JpaConstants.PARAM_EXPORT_TYPE_FILTER, obsTypeFilter); @@ -345,7 +346,7 @@ public class BulkDataExportProviderTest { String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT + "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON) - + "&" + JpaConstants.PARAM_EXPORT_STYLE + "=" + UrlUtil.escapeUrlParam("Immunization, Observation") + + "&" + JpaConstants.PARAM_EXPORT_TYPE + "=" + UrlUtil.escapeUrlParam("Immunization, Observation") + "&" + JpaConstants.PARAM_EXPORT_SINCE + "=" + UrlUtil.escapeUrlParam(now.getValueAsString()); String immunizationTypeFilter1 = "Immunization?patient.identifier=SC378274-MRN|009999997,SC378274-MRN|009999998,SC378274-MRN|009999999&date=2020-01-02"; @@ -376,21 +377,18 @@ public class BulkDataExportProviderTest { @Test public void testInitiateGroupExportWithInvalidResourceTypesFails() throws IOException { - IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() - .setJobId(A_JOB_ID); - when(myBulkDataExportSvc.submitJob(any())).thenReturn(jobInfo); - - String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT + when (myBulkDataExportSvc.getPatientCompartmentResources()).thenReturn(Sets.newHashSet("Observation")); + String url = "http://localhost:" + myPort + "/" + "Group/123/" +JpaConstants.OPERATION_EXPORT + "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON) - + "&" + JpaConstants.PARAM_EXPORT_TYPE + "=" + UrlUtil.escapeUrlParam("StructureDefinition"); + + "&" + JpaConstants.PARAM_EXPORT_TYPE + "=" + UrlUtil.escapeUrlParam("StructureDefinition,Observation"); HttpGet get = new HttpGet(url); get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); - myClient.execute(get); - - verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture()); - BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue(); + CloseableHttpResponse execute = myClient.execute(get); + String responseBody = IOUtils.toString(execute.getEntity().getContent()); + assertThat(execute.getStatusLine().getStatusCode(), is(equalTo(400))); + assertThat(responseBody, is(containsString("Resource types [StructureDefinition] are invalid for this type of export, as they do not contain search parameters that refer to patients."))); } @Test @@ -404,7 +402,7 @@ public class BulkDataExportProviderTest { Parameters input = new Parameters(); input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON)); - input.addParameter(JpaConstants.PARAM_EXPORT_STYLE, new StringType("Patient")); + input.addParameter(JpaConstants.PARAM_EXPORT_TYPE, new StringType("Patient")); input.addParameter(JpaConstants.PARAM_EXPORT_TYPE_FILTER, new StringType("Patient?gender=male,Patient?gender=female")); ourLog.info(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input)); @@ -430,7 +428,6 @@ public class BulkDataExportProviderTest { @Test public void testInitiatePatientExportRequest() throws IOException { - IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() .setJobId(A_JOB_ID); when(myBulkDataExportSvc.submitJob(any())).thenReturn(jobInfo); diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java index a61325fb42e..3cf09bb4efc 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java @@ -194,11 +194,6 @@ public class JpaConstants { */ public static final String PARAM_EXPORT_MDM = "_mdm"; - /** - * the Type of the export - */ - public static final String PARAM_EXPORT_STYLE = "_exportStyle"; - /** * Parameter for delete to indicate the deleted resources should also be expunged */ From 412cf003b02be29460923e883984a51e6025c286 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Wed, 10 Mar 2021 14:52:53 -0500 Subject: [PATCH 16/22] Add test for cache busting header --- .../fhir/jpa/bulk/api/IBulkDataExportSvc.java | 2 + .../bulk/provider/BulkDataExportProvider.java | 14 +++- .../jpa/bulk/svc/BulkDataExportSvcImpl.java | 20 ++++-- .../fhir/jpa/dao/data/IBulkExportJobDao.java | 2 +- .../jpa/bulk/BulkDataExportProviderTest.java | 70 +++++++++++++++++++ .../jpa/bulk/BulkDataExportSvcImplR4Test.java | 37 ++++++++++ 6 files changed, 136 insertions(+), 9 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/IBulkDataExportSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/IBulkDataExportSvc.java index 16aa22858d7..af39667b19b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/IBulkDataExportSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/IBulkDataExportSvc.java @@ -37,6 +37,8 @@ public interface IBulkDataExportSvc { JobInfo submitJob(BulkDataExportOptions theBulkDataExportOptions); + JobInfo submitJob(BulkDataExportOptions theBulkDataExportOptions, Boolean useCache); + JobInfo getJobInfoOrThrowResourceNotFound(String theJobId); /** diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java index 83bdb9c8a3a..0c6cb15bf75 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java @@ -28,6 +28,7 @@ import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.Operation; import ca.uhn.fhir.rest.annotation.OperationParam; +import ca.uhn.fhir.rest.api.CacheControlDirective; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.PreferHeader; import ca.uhn.fhir.rest.server.RestfulServerUtils; @@ -44,6 +45,7 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.hl7.fhir.r4.model.InstantType; import org.slf4j.Logger; import org.springframework.beans.factory.annotation.Autowired; +import sun.misc.Cache; import javax.servlet.http.HttpServletResponse; import java.io.IOException; @@ -89,10 +91,16 @@ public class BulkDataExportProvider { ) { validatePreferAsyncHeader(theRequestDetails); BulkDataExportOptions bulkDataExportOptions = buildSystemBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter); - IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(bulkDataExportOptions); + Boolean useCache = shouldUseCache(theRequestDetails); + IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(bulkDataExportOptions, useCache); writePollingLocationToResponseHeaders(theRequestDetails, outcome); } + private boolean shouldUseCache(ServletRequestDetails theRequestDetails) { + CacheControlDirective cacheControlDirective = new CacheControlDirective().parse(theRequestDetails.getHeaders(Constants.HEADER_CACHE_CONTROL)); + return !cacheControlDirective.isNoCache(); + } + private String getServerBase(ServletRequestDetails theRequestDetails) { return StringUtils.removeEnd(theRequestDetails.getServerBaseForRequest(), "/"); } @@ -120,7 +128,7 @@ public class BulkDataExportProvider { validatePreferAsyncHeader(theRequestDetails); BulkDataExportOptions bulkDataExportOptions = buildGroupBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theIdParam, theMdm); validateResourceTypesAllContainPatientSearchParams(bulkDataExportOptions.getResourceTypes()); - IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(bulkDataExportOptions); + IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(bulkDataExportOptions, shouldUseCache(theRequestDetails)); writePollingLocationToResponseHeaders(theRequestDetails, outcome); } @@ -148,7 +156,7 @@ public class BulkDataExportProvider { myBulkDataExportSvc.getPatientCompartmentResources(); validatePreferAsyncHeader(theRequestDetails); BulkDataExportOptions bulkDataExportOptions = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter); - IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(bulkDataExportOptions); + IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(bulkDataExportOptions, shouldUseCache(theRequestDetails)); writePollingLocationToResponseHeaders(theRequestDetails, outcome); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java index bad1ff33a5c..7bec3d61a89 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java @@ -289,6 +289,12 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { @Transactional @Override public JobInfo submitJob(BulkDataExportOptions theBulkDataExportOptions) { + return submitJob(theBulkDataExportOptions, true); + } + + @Transactional + @Override + public JobInfo submitJob(BulkDataExportOptions theBulkDataExportOptions, Boolean useCache) { String outputFormat = Constants.CT_FHIR_NDJSON; if (isNotBlank(theBulkDataExportOptions.getOutputFormat())) { outputFormat = theBulkDataExportOptions.getOutputFormat(); @@ -333,11 +339,15 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { String request = requestBuilder.toString(); - Date cutoff = DateUtils.addMilliseconds(new Date(), -myReuseBulkExportForMillis); - Pageable page = PageRequest.of(0, 10); - Slice existing = myBulkExportJobDao.findExistingJob(page, request, cutoff, BulkJobStatusEnum.ERROR); - if (!existing.isEmpty()) { - return toSubmittedJobInfo(existing.iterator().next()); + + //If we are using the cache, then attempt to retrieve a matching job based on the Request String, otherwise just make a new one. + if (useCache) { + Date cutoff = DateUtils.addMilliseconds(new Date(), -myReuseBulkExportForMillis); + Pageable page = PageRequest.of(0, 10); + Slice existing = myBulkExportJobDao.findExistingJob(page, request, cutoff, BulkJobStatusEnum.ERROR); + if (!existing.isEmpty()) { + return toSubmittedJobInfo(existing.iterator().next()); + } } if (resourceTypes != null && resourceTypes.contains("Binary")) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportJobDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportJobDao.java index a99dae53aac..708425d5fdb 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportJobDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportJobDao.java @@ -43,7 +43,7 @@ public interface IBulkExportJobDao extends JpaRepository findByExpiry(Pageable thePage, @Param("cutoff") Date theCutoff); - @Query("SELECT j FROM BulkExportJobEntity j WHERE j.myRequest = :request AND j.myCreated > :createdAfter AND j.myStatus <> :status") + @Query("SELECT j FROM BulkExportJobEntity j WHERE j.myRequest = :request AND j.myCreated > :createdAfter AND j.myStatus <> :status ORDER BY j.myCreated DESC") Slice findExistingJob(Pageable thePage, @Param("request") String theRequest, @Param("createdAfter") Date theCreatedAfter, @Param("status") BulkJobStatusEnum theNotStatus); @Modifying diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java index 481c35826de..2879104690a 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java @@ -56,6 +56,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.isNull; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.times; @@ -77,6 +78,8 @@ public class BulkDataExportProviderTest { private CloseableHttpClient myClient; @Captor private ArgumentCaptor myBulkDataExportOptionsCaptor; + @Captor + private ArgumentCaptor myBooleanArgumentCaptor; @AfterEach public void after() throws Exception { @@ -336,6 +339,43 @@ public class BulkDataExportProviderTest { assertThat(options.isExpandMdm(), is(equalTo(true))); } + @Test + public void testSuccessfulInitiateGroupBulkRequest_Get() throws IOException { + + IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo().setJobId(G_JOB_ID); + when(myBulkDataExportSvc.submitJob(any())).thenReturn(jobInfo); + when(myBulkDataExportSvc.getPatientCompartmentResources()).thenReturn(Sets.newHashSet("Patient", "Practitioner")); + + InstantType now = InstantType.now(); + + String url = "http://localhost:" + myPort + "/" + GROUP_ID + "/" + JpaConstants.OPERATION_EXPORT + + "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON) + + "&" + JpaConstants.PARAM_EXPORT_TYPE + "=" + UrlUtil.escapeUrlParam("Patient, Practitioner") + + "&" + JpaConstants.PARAM_EXPORT_SINCE + "=" + UrlUtil.escapeUrlParam(now.getValueAsString()) + + "&" + JpaConstants.PARAM_EXPORT_TYPE_FILTER + "=" + UrlUtil.escapeUrlParam("Patient?identifier=foo|bar") + + "&" + JpaConstants.PARAM_EXPORT_MDM+ "=true"; + + HttpGet get = new HttpGet(url); + get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); + ourLog.info("Request: {}", url); + try (CloseableHttpResponse response = myClient.execute(get)) { + ourLog.info("Response: {}", response.toString()); + + assertEquals(202, response.getStatusLine().getStatusCode()); + assertEquals("Accepted", response.getStatusLine().getReasonPhrase()); + assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + G_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); + } + + verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture()); + BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue(); + assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat()); + assertThat(options.getResourceTypes(), containsInAnyOrder("Patient", "Practitioner")); + assertThat(options.getSince(), notNullValue()); + assertThat(options.getFilters(), notNullValue()); + assertEquals(GROUP_ID, options.getGroupId().getValue()); + assertThat(options.isExpandMdm(), is(equalTo(true))); + } + @Test public void testInitiateWithGetAndMultipleTypeFilters() throws IOException { IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() @@ -461,4 +501,34 @@ public class BulkDataExportProviderTest { assertThat(options.getSince(), notNullValue()); assertThat(options.getFilters(), containsInAnyOrder("Immunization?vaccine-code=foo")); } + + @Test + public void testProviderProcessesNoCacheHeader() throws IOException { + IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() + .setJobId(A_JOB_ID); + when(myBulkDataExportSvc.submitJob(any(), anyBoolean())).thenReturn(jobInfo); + + + Parameters input = new Parameters(); + input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON)); + input.addParameter(JpaConstants.PARAM_EXPORT_TYPE, new StringType("Patient, Practitioner")); + + HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT); + post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); + post.addHeader(Constants.HEADER_CACHE_CONTROL, Constants.CACHE_CONTROL_NO_CACHE); + post.setEntity(new ResourceEntity(myCtx, input)); + ourLog.info("Request: {}", post); + try (CloseableHttpResponse response = myClient.execute(post)) { + ourLog.info("Response: {}", response.toString()); + assertEquals(202, response.getStatusLine().getStatusCode()); + assertEquals("Accepted", response.getStatusLine().getReasonPhrase()); + assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); + } + + + verify(myBulkDataExportSvc).submitJob(myBulkDataExportOptionsCaptor.capture(), myBooleanArgumentCaptor.capture()); + Boolean usedCache = myBooleanArgumentCaptor.getValue(); + assertThat(usedCache, is(equalTo(false))); + } + } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java index c6fdf13ac53..5da62828243 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java @@ -61,6 +61,7 @@ import java.util.Set; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; +import java.util.stream.Stream; import static org.awaitility.Awaitility.await; import static org.hamcrest.CoreMatchers.containsString; @@ -70,6 +71,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { @@ -974,6 +976,41 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { assertThat(jobInfo.getStatus(), is(equalTo(BulkJobStatusEnum.COMPLETE))); } + @Test + public void testCacheSettingIsRespectedWhenCreatingNewJobs() { + BulkDataExportOptions options = new BulkDataExportOptions(); + options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); + options.setResourceTypes(Sets.newHashSet("Procedure")); + IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.submitJob(options, true); + IBulkDataExportSvc.JobInfo jobInfo1 = myBulkDataExportSvc.submitJob(options, true); + IBulkDataExportSvc.JobInfo jobInfo2 = myBulkDataExportSvc.submitJob(options, true); + IBulkDataExportSvc.JobInfo jobInfo3 = myBulkDataExportSvc.submitJob(options, true); + IBulkDataExportSvc.JobInfo jobInfo4 = myBulkDataExportSvc.submitJob(options, true); + + //Cached should have all identical Job IDs. + String initialJobId = jobInfo.getJobId(); + boolean allMatch = Stream.of(jobInfo, jobInfo1, jobInfo2, jobInfo3, jobInfo4).allMatch(job -> job.getJobId().equals(initialJobId)); + assertTrue(allMatch); + + BulkDataExportOptions options2 = new BulkDataExportOptions(); + options2.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); + options2.setResourceTypes(Sets.newHashSet("Procedure")); + IBulkDataExportSvc.JobInfo jobInfo5 = myBulkDataExportSvc.submitJob(options2, false); + IBulkDataExportSvc.JobInfo jobInfo6 = myBulkDataExportSvc.submitJob(options2, false); + IBulkDataExportSvc.JobInfo jobInfo7 = myBulkDataExportSvc.submitJob(options2, false); + IBulkDataExportSvc.JobInfo jobInfo8 = myBulkDataExportSvc.submitJob(options2, false); + IBulkDataExportSvc.JobInfo jobInfo9 = myBulkDataExportSvc.submitJob(options2, false); + + //First non-cached should retrieve new ID. + assertThat(initialJobId, is(not(equalTo(jobInfo5.getJobId())))); + + //Non-cached should all have unique IDs + List jobIds = Stream.of(jobInfo5, jobInfo6, jobInfo7, jobInfo8, jobInfo9).map(IBulkDataExportSvc.JobInfo::getJobId).collect(Collectors.toList()); + ourLog.info("ZOOP {}", String.join(", ", jobIds)); + Set uniqueJobIds = new HashSet<>(jobIds); + assertEquals(uniqueJobIds.size(), jobIds.size()); + } + private void awaitJobCompletion(JobExecution theJobExecution) { await().atMost(120, TimeUnit.SECONDS).until(() -> { JobExecution jobExecution = myJobExplorer.getJobExecution(theJobExecution.getId()); From 75f756b49f03f91c2df58833711e36ef32f746d7 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Wed, 10 Mar 2021 15:00:54 -0500 Subject: [PATCH 17/22] Add changelog --- .../fhir/changelog/5_4_0/2445-support-patient-level-export.yaml | 2 +- .../ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java | 1 - .../java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java | 1 - 3 files changed, 1 insertion(+), 3 deletions(-) diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2445-support-patient-level-export.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2445-support-patient-level-export.yaml index 2e07e60402a..1350b9b452f 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2445-support-patient-level-export.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2445-support-patient-level-export.yaml @@ -1,4 +1,4 @@ --- type: add issue: 2445 -title: "Support has been added for patient level Bulk export. This can be done via the `/Patient/$export` endpoint." +title: "Support has been added for patient level Bulk export. This can be done via the `/Patient/$export` endpoint. Also, support has been added for setting Cache-Control header to no-cache for Bulk Export requests." diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java index 0c6cb15bf75..f38535f6188 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java @@ -45,7 +45,6 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.hl7.fhir.r4.model.InstantType; import org.slf4j.Logger; import org.springframework.beans.factory.annotation.Autowired; -import sun.misc.Cache; import javax.servlet.http.HttpServletResponse; import java.io.IOException; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java index 5da62828243..c8175143e69 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java @@ -1079,7 +1079,6 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { createObservationWithIndex(index, null); createImmunizationWithIndex(index, null); } - } private DaoMethodOutcome createPatientWithIndex(int i) { From 9e8af41b8bfebed3b8de82d26e5e95f2da07b1e3 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Wed, 10 Mar 2021 15:16:21 -0500 Subject: [PATCH 18/22] Add test for cache recency --- .../jpa/bulk/BulkDataExportSvcImplR4Test.java | 18 ++++++++++-------- .../ca/uhn/fhir/mdm/util/MdmResourceUtil.java | 1 + 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java index c8175143e69..045988f0c79 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java @@ -992,14 +992,11 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { boolean allMatch = Stream.of(jobInfo, jobInfo1, jobInfo2, jobInfo3, jobInfo4).allMatch(job -> job.getJobId().equals(initialJobId)); assertTrue(allMatch); - BulkDataExportOptions options2 = new BulkDataExportOptions(); - options2.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); - options2.setResourceTypes(Sets.newHashSet("Procedure")); - IBulkDataExportSvc.JobInfo jobInfo5 = myBulkDataExportSvc.submitJob(options2, false); - IBulkDataExportSvc.JobInfo jobInfo6 = myBulkDataExportSvc.submitJob(options2, false); - IBulkDataExportSvc.JobInfo jobInfo7 = myBulkDataExportSvc.submitJob(options2, false); - IBulkDataExportSvc.JobInfo jobInfo8 = myBulkDataExportSvc.submitJob(options2, false); - IBulkDataExportSvc.JobInfo jobInfo9 = myBulkDataExportSvc.submitJob(options2, false); + IBulkDataExportSvc.JobInfo jobInfo5 = myBulkDataExportSvc.submitJob(options, false); + IBulkDataExportSvc.JobInfo jobInfo6 = myBulkDataExportSvc.submitJob(options, false); + IBulkDataExportSvc.JobInfo jobInfo7 = myBulkDataExportSvc.submitJob(options, false); + IBulkDataExportSvc.JobInfo jobInfo8 = myBulkDataExportSvc.submitJob(options, false); + IBulkDataExportSvc.JobInfo jobInfo9 = myBulkDataExportSvc.submitJob(options, false); //First non-cached should retrieve new ID. assertThat(initialJobId, is(not(equalTo(jobInfo5.getJobId())))); @@ -1009,6 +1006,11 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { ourLog.info("ZOOP {}", String.join(", ", jobIds)); Set uniqueJobIds = new HashSet<>(jobIds); assertEquals(uniqueJobIds.size(), jobIds.size()); + + //Now if we create another one and ask for the cache, we should get the most-recently-insert entry. + IBulkDataExportSvc.JobInfo jobInfo10 = myBulkDataExportSvc.submitJob(options, true); + assertThat(jobInfo10.getJobId(), is(equalTo(jobInfo9.getJobId()))); + } private void awaitJobCompletion(JobExecution theJobExecution) { diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/MdmResourceUtil.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/MdmResourceUtil.java index fbaf0557e51..a74665f4979 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/MdmResourceUtil.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/MdmResourceUtil.java @@ -30,6 +30,7 @@ import java.util.Optional; public final class MdmResourceUtil { private MdmResourceUtil() { + } /** From b86eee53925592a32d245b198c57285d63e5dbd3 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Wed, 10 Mar 2021 15:40:18 -0500 Subject: [PATCH 19/22] Add more validation of resource types --- .../ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java index f38535f6188..dbdb57e3f82 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java @@ -152,9 +152,9 @@ public class BulkDataExportProvider { @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = 1, typeName = "string") IPrimitiveType theTypeFilter, ServletRequestDetails theRequestDetails ) { - myBulkDataExportSvc.getPatientCompartmentResources(); validatePreferAsyncHeader(theRequestDetails); BulkDataExportOptions bulkDataExportOptions = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter); + validateResourceTypesAllContainPatientSearchParams(bulkDataExportOptions.getResourceTypes()); IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(bulkDataExportOptions, shouldUseCache(theRequestDetails)); writePollingLocationToResponseHeaders(theRequestDetails, outcome); } From 126fc212a70d2ef903c94e009169b2d6dfe8273a Mon Sep 17 00:00:00 2001 From: Tadgh Date: Wed, 10 Mar 2021 15:43:51 -0500 Subject: [PATCH 20/22] Reverting dumb changes --- .../src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java | 2 +- .../src/main/java/ca/uhn/fhir/mdm/util/MdmResourceUtil.java | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java index f8e844638f8..4d36ee8c648 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java @@ -544,7 +544,7 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil }); myDaoConfig.setSchedulingDisabled(true); - myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.ENABLED); + myDaoConfig.setIndexMissingFields(new DaoConfig().getIndexMissingFields()); } @AfterEach diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/MdmResourceUtil.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/MdmResourceUtil.java index a74665f4979..fbaf0557e51 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/MdmResourceUtil.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/MdmResourceUtil.java @@ -30,7 +30,6 @@ import java.util.Optional; public final class MdmResourceUtil { private MdmResourceUtil() { - } /** From c8ed81d338a36d3fd8584dfdc1c3d624edb00975 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Wed, 10 Mar 2021 18:26:16 -0500 Subject: [PATCH 21/22] Fix up some mocks, fix empty search param maps being generated --- .../fhir/jpa/bulk/job/BaseBulkItemReader.java | 18 ++++++---- .../jpa/bulk/BulkDataExportProviderTest.java | 28 ++++++++-------- .../jpa/bulk/BulkDataExportSvcImplR4Test.java | 33 ++++--------------- 3 files changed, 34 insertions(+), 45 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java index 4b9b67ca5f5..c47506ce768 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java @@ -30,6 +30,7 @@ import ca.uhn.fhir.jpa.dao.ISearchBuilder; import ca.uhn.fhir.jpa.dao.SearchBuilderFactory; import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao; import ca.uhn.fhir.jpa.entity.BulkExportJobEntity; +import ca.uhn.fhir.jpa.entity.Search; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; @@ -107,17 +108,22 @@ public abstract class BaseBulkItemReader implements ItemReader requestUrl = UrlUtil.parseQueryStrings(jobEntity.getRequest()); String[] typeFilters = requestUrl.get(JpaConstants.PARAM_EXPORT_TYPE_FILTER); + List spMaps = null; if (typeFilters != null) { - List maps = Arrays.stream(typeFilters) + spMaps = Arrays.stream(typeFilters) .filter(typeFilter -> typeFilter.startsWith(myResourceType + "?")) .map(filter -> buildSearchParameterMapForTypeFilter(filter, theDef)) .collect(Collectors.toList()); - return maps; - } else { - SearchParameterMap map = new SearchParameterMap(); - enhanceSearchParameterMapWithCommonParameters(map); - return Collections.singletonList(map); } + + //None of the _typeFilters applied to the current resource type, so just make a simple one. + if (spMaps == null || spMaps.isEmpty()) { + SearchParameterMap defaultMap = new SearchParameterMap(); + enhanceSearchParameterMapWithCommonParameters(defaultMap); + spMaps = Collections.singletonList(defaultMap); + } + + return spMaps; } private void enhanceSearchParameterMapWithCommonParameters(SearchParameterMap map) { diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java index 2879104690a..f33e57d7b06 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java @@ -116,7 +116,7 @@ public class BulkDataExportProviderTest { IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() .setJobId(A_JOB_ID); - when(myBulkDataExportSvc.submitJob(any())).thenReturn(jobInfo); + when(myBulkDataExportSvc.submitJob(any(), any())).thenReturn(jobInfo); InstantType now = InstantType.now(); @@ -140,7 +140,7 @@ public class BulkDataExportProviderTest { assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); } - verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture()); + verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), any()); BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue(); assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat()); assertThat(options.getResourceTypes(), containsInAnyOrder("Patient", "Practitioner")); @@ -153,7 +153,7 @@ public class BulkDataExportProviderTest { IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() .setJobId(A_JOB_ID); - when(myBulkDataExportSvc.submitJob(any())).thenReturn(jobInfo); + when(myBulkDataExportSvc.submitJob(any(),any())).thenReturn(jobInfo); InstantType now = InstantType.now(); @@ -174,7 +174,7 @@ public class BulkDataExportProviderTest { assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); } - verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture()); + verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), any()); BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue(); assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat()); assertThat(options.getResourceTypes(), containsInAnyOrder("Patient", "Practitioner")); @@ -303,7 +303,8 @@ public class BulkDataExportProviderTest { public void testSuccessfulInitiateGroupBulkRequest_Post() throws IOException { IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo().setJobId(G_JOB_ID); - when(myBulkDataExportSvc.submitJob(any())).thenReturn(jobInfo); + when(myBulkDataExportSvc.submitJob(any(),any())).thenReturn(jobInfo); + when(myBulkDataExportSvc.getPatientCompartmentResources()).thenReturn(Sets.newHashSet("Observation", "DiagnosticReport")); InstantType now = InstantType.now(); @@ -329,7 +330,7 @@ public class BulkDataExportProviderTest { assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + G_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); } - verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture()); + verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), any()); BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue(); assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat()); assertThat(options.getResourceTypes(), containsInAnyOrder("Observation", "DiagnosticReport")); @@ -343,7 +344,7 @@ public class BulkDataExportProviderTest { public void testSuccessfulInitiateGroupBulkRequest_Get() throws IOException { IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo().setJobId(G_JOB_ID); - when(myBulkDataExportSvc.submitJob(any())).thenReturn(jobInfo); + when(myBulkDataExportSvc.submitJob(any(), any())).thenReturn(jobInfo); when(myBulkDataExportSvc.getPatientCompartmentResources()).thenReturn(Sets.newHashSet("Patient", "Practitioner")); InstantType now = InstantType.now(); @@ -366,7 +367,7 @@ public class BulkDataExportProviderTest { assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + G_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); } - verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture()); + verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), any()); BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue(); assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat()); assertThat(options.getResourceTypes(), containsInAnyOrder("Patient", "Practitioner")); @@ -408,7 +409,7 @@ public class BulkDataExportProviderTest { get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); myClient.execute(get); - verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture()); + verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), anyBoolean()); BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue(); assertThat(options.getFilters(), containsInAnyOrder(immunizationTypeFilter1, immunizationTypeFilter2, observationFilter1)); @@ -436,7 +437,7 @@ public class BulkDataExportProviderTest { IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() .setJobId(A_JOB_ID); - when(myBulkDataExportSvc.submitJob(any())).thenReturn(jobInfo); + when(myBulkDataExportSvc.submitJob(any(), any())).thenReturn(jobInfo); InstantType now = InstantType.now(); @@ -459,7 +460,7 @@ public class BulkDataExportProviderTest { assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); } - verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture()); + verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), myBooleanArgumentCaptor.capture()); BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue(); assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat()); assertThat(options.getResourceTypes(), containsInAnyOrder("Patient")); @@ -470,7 +471,8 @@ public class BulkDataExportProviderTest { public void testInitiatePatientExportRequest() throws IOException { IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() .setJobId(A_JOB_ID); - when(myBulkDataExportSvc.submitJob(any())).thenReturn(jobInfo); + when(myBulkDataExportSvc.submitJob(any(), any())).thenReturn(jobInfo); + when(myBulkDataExportSvc.getPatientCompartmentResources()).thenReturn(Sets.newHashSet("Immunization", "Observation")); InstantType now = InstantType.now(); @@ -494,7 +496,7 @@ public class BulkDataExportProviderTest { assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); } - verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture()); + verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), myBooleanArgumentCaptor.capture()); BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue(); assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat()); assertThat(options.getResourceTypes(), containsInAnyOrder("Immunization", "Observation")); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java index 045988f0c79..b06c03fa049 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java @@ -203,21 +203,6 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { } } - @Test - public void testSubmit_MultipleTypeFiltersForSameType() { - try { - BulkDataExportOptions options = new BulkDataExportOptions(); - options.setOutputFormat(Constants.CT_FHIR_NDJSON); - options.setResourceTypes(Sets.newHashSet("Patient")); - options.setFilters(Sets.newHashSet("Patient?name=a", "Patient?active=true")); - options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); - myBulkDataExportSvc.submitJob(options); - fail(); - } catch (InvalidRequestException e) { - assertEquals("Invalid _typeFilter value \"Patient?name=a\". Multiple filters found for type Patient", e.getMessage()); - } - } - @Test public void testSubmit_TypeFilterForNonSelectedType() { try { @@ -318,6 +303,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { options.setResourceTypes(Sets.newHashSet("Patient", "Observation")); options.setFilters(Sets.newHashSet(TEST_FILTER)); options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(options); assertNotNull(jobDetails.getJobId()); @@ -769,7 +755,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP); options.setGroupId(myPatientGroupId); options.setExpandMdm(false); - options.setFilters(Sets.newHashSet("Observation?identifier=VAL0,VAL2", "Observation?identifer=VAL4")); + options.setFilters(Sets.newHashSet("Observation?identifier=VAL0,VAL2", "Observation?identifier=VAL4")); IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(options); myBulkDataExportSvc.buildExportFiles(); @@ -782,16 +768,11 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Observation"))); String nextContents = getBinaryContents(jobInfo, 0); - assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); - //These are the COVID-19 entries - assertThat(nextContents, is(containsString("IMM0"))); - assertThat(nextContents, is(containsString("IMM2"))); - assertThat(nextContents, is(containsString("IMM4"))); - assertThat(nextContents, is(containsString("IMM6"))); - assertThat(nextContents, is(containsString("IMM8"))); - - //This is the entry for the one referencing patient/1 - assertThat(nextContents, is(containsString("IMM1"))); + //These are the Observation entries + assertThat(nextContents, is(containsString("OBS0"))); + assertThat(nextContents, is(containsString("OBS2"))); + assertThat(nextContents, is(containsString("OBS4"))); + assertEquals(3, nextContents.split("\n").length); } public String getBinaryContents(IBulkDataExportSvc.JobInfo theJobInfo, int theIndex) { From 2633cbd1415bb9a72e4d1a7feab6587709e18ffe Mon Sep 17 00:00:00 2001 From: James Agnew Date: Thu, 11 Mar 2021 14:23:02 -0500 Subject: [PATCH 22/22] Improve error message for invalid date param (#2466) * Improve error message for invalid date param * Add changelog * Test fixes --- .../fhir/rest/param/BaseParamWithPrefix.java | 4 +++ .../fhir/rest/param/DateRangeParamTest.java | 32 +++++++++++++------ ...e-error-message-for-invalid-dateparam.yaml | 5 +++ .../provider/r4/ResourceProviderR4Test.java | 26 +++++++++++++++ 4 files changed, 58 insertions(+), 9 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2466-improve-error-message-for-invalid-dateparam.yaml diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/BaseParamWithPrefix.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/BaseParamWithPrefix.java index d962d6aa88d..257f5702be7 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/BaseParamWithPrefix.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/BaseParamWithPrefix.java @@ -56,6 +56,10 @@ public abstract class BaseParamWithPrefix extends BaseParam offset++; } + if (offset > 0 && theString.length() == offset) { + throw new DataFormatException("Invalid date/time format: \"" + theString + "\""); + } + String prefix = theString.substring(0, offset); if (!isBlank(prefix)) { diff --git a/hapi-fhir-base/src/test/java/ca/uhn/fhir/rest/param/DateRangeParamTest.java b/hapi-fhir-base/src/test/java/ca/uhn/fhir/rest/param/DateRangeParamTest.java index 2c590d415db..3da72a6a4b8 100644 --- a/hapi-fhir-base/src/test/java/ca/uhn/fhir/rest/param/DateRangeParamTest.java +++ b/hapi-fhir-base/src/test/java/ca/uhn/fhir/rest/param/DateRangeParamTest.java @@ -1,6 +1,7 @@ package ca.uhn.fhir.rest.param; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.rest.api.QualifiedParamList; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -10,6 +11,7 @@ import java.util.ArrayList; import java.util.List; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; public class DateRangeParamTest { private FhirContext fhirContext; @@ -19,7 +21,9 @@ public class DateRangeParamTest { fhirContext = Mockito.mock(FhirContext.class); } - /** Can happen e.g. when the query parameter for {@code _lastUpdated} is left empty. */ + /** + * Can happen e.g. when the query parameter for {@code _lastUpdated} is left empty. + */ @Test public void testParamWithoutPrefixAndWithoutValue() { QualifiedParamList qualifiedParamList = new QualifiedParamList(1); @@ -33,7 +37,9 @@ public class DateRangeParamTest { assertTrue(dateRangeParam.isEmpty()); } - /** Can happen e.g. when the query parameter for {@code _lastUpdated} is given as {@code lt} without any value. */ + /** + * Can happen e.g. when the query parameter for {@code _lastUpdated} is given as {@code lt} without any value. + */ @Test public void testUpperBoundWithPrefixWithoutValue() { QualifiedParamList qualifiedParamList = new QualifiedParamList(1); @@ -42,12 +48,17 @@ public class DateRangeParamTest { List params = new ArrayList<>(1); params.add(qualifiedParamList); DateRangeParam dateRangeParam = new DateRangeParam(); - dateRangeParam.setValuesAsQueryTokens(fhirContext, "_lastUpdated", params); - - assertTrue(dateRangeParam.isEmpty()); + try { + dateRangeParam.setValuesAsQueryTokens(fhirContext, "_lastUpdated", params); + fail(); + } catch (DataFormatException e) { + // good + } } - /** Can happen e.g. when the query parameter for {@code _lastUpdated} is given as {@code gt} without any value. */ + /** + * Can happen e.g. when the query parameter for {@code _lastUpdated} is given as {@code gt} without any value. + */ @Test public void testLowerBoundWithPrefixWithoutValue() { QualifiedParamList qualifiedParamList = new QualifiedParamList(1); @@ -56,8 +67,11 @@ public class DateRangeParamTest { List params = new ArrayList<>(1); params.add(qualifiedParamList); DateRangeParam dateRangeParam = new DateRangeParam(); - dateRangeParam.setValuesAsQueryTokens(fhirContext, "_lastUpdated", params); - - assertTrue(dateRangeParam.isEmpty()); + try { + dateRangeParam.setValuesAsQueryTokens(fhirContext, "_lastUpdated", params); + fail(); + } catch (DataFormatException e) { + // good + } } } diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2466-improve-error-message-for-invalid-dateparam.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2466-improve-error-message-for-invalid-dateparam.yaml new file mode 100644 index 00000000000..a02b92226b4 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2466-improve-error-message-for-invalid-dateparam.yaml @@ -0,0 +1,5 @@ +--- +type: fix +issue: 2466 +title: "When performainfg a search using a date search parameter, invalid values (e.g. `date=foo`) resulted + in a confusing error message. This has been improved." diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java index d61f84b905e..f7675d5566f 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java @@ -333,6 +333,32 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { } + @Test + public void testSearchWithDateInvalid() throws IOException { + HttpGet get = new HttpGet(ourServerBase + "/Condition?onset-date=junk"); + try (CloseableHttpResponse resp = ourHttpClient.execute(get)) { + String output = IOUtils.toString(resp.getEntity().getContent(), Charsets.UTF_8); + assertThat(output, containsString("Invalid date/time format: "junk"")); + assertEquals(400, resp.getStatusLine().getStatusCode()); + } + + get = new HttpGet(ourServerBase + "/Condition?onset-date=ge"); + try (CloseableHttpResponse resp = ourHttpClient.execute(get)) { + String output = IOUtils.toString(resp.getEntity().getContent(), Charsets.UTF_8); + assertThat(output, containsString("Invalid date/time format: "ge"")); + assertEquals(400, resp.getStatusLine().getStatusCode()); + } + + get = new HttpGet(ourServerBase + "/Condition?onset-date=" + UrlUtil.escapeUrlParam(">")); + try (CloseableHttpResponse resp = ourHttpClient.execute(get)) { + String output = IOUtils.toString(resp.getEntity().getContent(), Charsets.UTF_8); + assertThat(output, containsString("Invalid date/time format: ">"")); + assertEquals(400, resp.getStatusLine().getStatusCode()); + } + + } + + @Test public void testSearchWithSlashes() { myDaoConfig.setSearchPreFetchThresholds(Lists.newArrayList(10, 50, 10000));