From 70b2b83d1ab2e253e1be8fe1f30dfe72973beda6 Mon Sep 17 00:00:00 2001 From: Ken Stevens Date: Thu, 18 Feb 2021 21:47:43 -0500 Subject: [PATCH] begin with failing test --- .../jpa/bulk/api/BulkDataExportOptions.java | 34 ++++++++ .../fhir/jpa/bulk/api/IBulkDataExportSvc.java | 3 +- .../job/CreateBulkExportEntityTasklet.java | 5 +- .../bulk/provider/BulkDataExportProvider.java | 3 +- .../jpa/bulk/svc/BulkDataExportSvcImpl.java | 19 +++-- .../uhn/fhir/jpa/term/TermLoaderSvcImpl.java | 81 ++++++++++++++++-- .../java/ca/uhn/fhir/jpa/util/Counter.java | 6 +- .../jpa/bulk/BulkDataExportProviderTest.java | 84 ++++++++++++++----- .../jpa/bulk/BulkDataExportSvcImplR4Test.java | 29 +++---- 9 files changed, 205 insertions(+), 59 deletions(-) create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/BulkDataExportOptions.java diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/BulkDataExportOptions.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/BulkDataExportOptions.java new file mode 100644 index 00000000000..007cd78c97b --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/BulkDataExportOptions.java @@ -0,0 +1,34 @@ +package ca.uhn.fhir.jpa.bulk.api; + +import java.util.Date; +import java.util.Set; + +public class BulkDataExportOptions { + private final String myOutputFormat; + private final Set myResourceTypes; + private final Date mySince; + private final Set myFilters; + + public BulkDataExportOptions(String theOutputFormat, Set theResourceTypes, Date theSince, Set theFilters) { + myOutputFormat = theOutputFormat; + myResourceTypes = theResourceTypes; + mySince = theSince; + myFilters = theFilters; + } + + public String getOutputFormat() { + return myOutputFormat; + } + + public Set getResourceTypes() { + return myResourceTypes; + } + + public Date getSince() { + return mySince; + } + + public Set getFilters() { + return myFilters; + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/IBulkDataExportSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/IBulkDataExportSvc.java index ca4c07c474a..3784fc8a18d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/IBulkDataExportSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/IBulkDataExportSvc.java @@ -27,7 +27,6 @@ import javax.transaction.Transactional; import java.util.ArrayList; import java.util.Date; import java.util.List; -import java.util.Set; public interface IBulkDataExportSvc { void buildExportFiles(); @@ -35,7 +34,7 @@ public interface IBulkDataExportSvc { @Transactional(value = Transactional.TxType.NEVER) void purgeExpiredFiles(); - JobInfo submitJob(String theOutputFormat, Set theResourceTypes, Date theSince, Set theFilters); + JobInfo submitJob(BulkDataExportOptions theBulkDataExportOptions); JobInfo getJobInfoOrThrowResourceNotFound(String theJobId); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/CreateBulkExportEntityTasklet.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/CreateBulkExportEntityTasklet.java index 208e84cdfe2..e956904428e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/CreateBulkExportEntityTasklet.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/CreateBulkExportEntityTasklet.java @@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.bulk.job; * #L% */ +import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.rest.api.Constants; import org.apache.commons.lang3.StringUtils; @@ -60,12 +61,12 @@ public class CreateBulkExportEntityTasklet implements Tasklet { } Set resourceTypeSet = Arrays.stream(resourceTypes.split(",")).collect(Collectors.toSet()); - String outputFormat = (String)jobParameters.get("outputFormat"); + String outputFormat = (String) jobParameters.get("outputFormat"); if (StringUtils.isBlank(outputFormat)) { outputFormat = Constants.CT_FHIR_NDJSON; } - IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.submitJob(outputFormat, resourceTypeSet, since, filterSet); + IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(outputFormat, resourceTypeSet, since, filterSet)); addUUIDToJobContext(theChunkContext, jobInfo.getJobId()); return RepeatStatus.FINISHED; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java index c06529f4c72..fc52ad767cc 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java @@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.bulk.provider; */ import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.model.BulkExportResponseJson; import ca.uhn.fhir.jpa.model.util.JpaConstants; @@ -98,7 +99,7 @@ public class BulkDataExportProvider { filters = ArrayUtil.commaSeparatedListToCleanSet(theTypeFilter.getValueAsString()); } - IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(outputFormat, resourceTypes, since, filters); + IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(outputFormat, resourceTypes, since, filters)); String serverBase = getServerBase(theRequestDetails); String pollLocation = serverBase + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + outcome.getJobId(); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java index 9b08c93c44f..8a28665c589 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java @@ -25,6 +25,7 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; +import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao; @@ -229,28 +230,28 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { @Transactional @Override - public JobInfo submitJob(String theOutputFormat, Set theResourceTypes, Date theSince, Set theFilters) { + public JobInfo submitJob(BulkDataExportOptions theBulkDataExportOptions) { String outputFormat = Constants.CT_FHIR_NDJSON; - if (isNotBlank(theOutputFormat)) { - outputFormat = theOutputFormat; + if (isNotBlank(theBulkDataExportOptions.getOutputFormat())) { + outputFormat = theBulkDataExportOptions.getOutputFormat(); } if (!Constants.CTS_NDJSON.contains(outputFormat)) { - throw new InvalidRequestException("Invalid output format: " + theOutputFormat); + throw new InvalidRequestException("Invalid output format: " + theBulkDataExportOptions.getOutputFormat()); } StringBuilder requestBuilder = new StringBuilder(); requestBuilder.append("/").append(JpaConstants.OPERATION_EXPORT); requestBuilder.append("?").append(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT).append("=").append(escapeUrlParam(outputFormat)); - Set resourceTypes = theResourceTypes; + Set resourceTypes = theBulkDataExportOptions.getResourceTypes(); if (resourceTypes != null) { requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_TYPE).append("=").append(String.join(",", escapeUrlParams(resourceTypes))); } - Date since = theSince; + Date since = theBulkDataExportOptions.getSince(); if (since != null) { requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_SINCE).append("=").append(new InstantType(since).setTimeZoneZulu(true).getValueAsString()); } - if (theFilters != null && theFilters.size() > 0) { - requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_TYPE_FILTER).append("=").append(String.join(",", escapeUrlParams(theFilters))); + if (theBulkDataExportOptions.getFilters() != null && theBulkDataExportOptions.getFilters().size() > 0) { + requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_TYPE_FILTER).append("=").append(String.join(",", escapeUrlParams(theBulkDataExportOptions.getFilters()))); } String request = requestBuilder.toString(); @@ -291,7 +292,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { // Validate types validateTypes(resourceTypes); - validateTypeFilters(theFilters, resourceTypes); + validateTypeFilters(theBulkDataExportOptions.getFilters(), resourceTypes); updateExpiry(job); myBulkExportJobDao.save(job); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermLoaderSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermLoaderSvcImpl.java index a40491f13cd..dbde4943c9e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermLoaderSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermLoaderSvcImpl.java @@ -8,7 +8,24 @@ import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc; import ca.uhn.fhir.jpa.term.custom.CustomTerminologySet; -import ca.uhn.fhir.jpa.term.loinc.*; +import ca.uhn.fhir.jpa.term.loinc.LoincAnswerListHandler; +import ca.uhn.fhir.jpa.term.loinc.LoincAnswerListLinkHandler; +import ca.uhn.fhir.jpa.term.loinc.LoincDocumentOntologyHandler; +import ca.uhn.fhir.jpa.term.loinc.LoincGroupFileHandler; +import ca.uhn.fhir.jpa.term.loinc.LoincGroupTermsFileHandler; +import ca.uhn.fhir.jpa.term.loinc.LoincHandler; +import ca.uhn.fhir.jpa.term.loinc.LoincHierarchyHandler; +import ca.uhn.fhir.jpa.term.loinc.LoincIeeeMedicalDeviceCodeHandler; +import ca.uhn.fhir.jpa.term.loinc.LoincImagingDocumentCodeHandler; +import ca.uhn.fhir.jpa.term.loinc.LoincParentGroupFileHandler; +import ca.uhn.fhir.jpa.term.loinc.LoincPartHandler; +import ca.uhn.fhir.jpa.term.loinc.LoincPartLinkHandler; +import ca.uhn.fhir.jpa.term.loinc.LoincPartRelatedCodeMappingHandler; +import ca.uhn.fhir.jpa.term.loinc.LoincRsnaPlaybookHandler; +import ca.uhn.fhir.jpa.term.loinc.LoincTop2000LabResultsSiHandler; +import ca.uhn.fhir.jpa.term.loinc.LoincTop2000LabResultsUsHandler; +import ca.uhn.fhir.jpa.term.loinc.LoincUniversalOrderSetHandler; +import ca.uhn.fhir.jpa.term.loinc.PartTypeAndPartName; import ca.uhn.fhir.jpa.term.snomedct.SctHandlerConcept; import ca.uhn.fhir.jpa.term.snomedct.SctHandlerDescription; import ca.uhn.fhir.jpa.term.snomedct.SctHandlerRelationship; @@ -38,12 +55,66 @@ import org.springframework.beans.factory.annotation.Autowired; import javax.annotation.Nonnull; import javax.validation.constraints.NotNull; -import java.io.*; -import java.util.*; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.LineNumberReader; +import java.io.Reader; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; import java.util.Map.Entry; +import java.util.Optional; +import java.util.Properties; +import java.util.Set; import java.util.stream.Collectors; -import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.*; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_ANSWERLIST_FILE; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_ANSWERLIST_FILE_DEFAULT; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_ANSWERLIST_LINK_FILE; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_ANSWERLIST_LINK_FILE_DEFAULT; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_CODESYSTEM_VERSION; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_DOCUMENT_ONTOLOGY_FILE; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_DOCUMENT_ONTOLOGY_FILE_DEFAULT; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_FILE; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_FILE_DEFAULT; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_GROUP_FILE; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_GROUP_FILE_DEFAULT; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_GROUP_TERMS_FILE; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_GROUP_TERMS_FILE_DEFAULT; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_HIERARCHY_FILE; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_HIERARCHY_FILE_DEFAULT; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE_DEFAULT; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_IMAGING_DOCUMENT_CODES_FILE; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_IMAGING_DOCUMENT_CODES_FILE_DEFAULT; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PARENT_GROUP_FILE; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PARENT_GROUP_FILE_DEFAULT; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_FILE; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_FILE_DEFAULT; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_LINK_FILE; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_LINK_FILE_DEFAULT; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_LINK_FILE_PRIMARY; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_LINK_FILE_PRIMARY_DEFAULT; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_LINK_FILE_SUPPLEMENTARY; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_LINK_FILE_SUPPLEMENTARY_DEFAULT; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_RELATED_CODE_MAPPING_FILE; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_RELATED_CODE_MAPPING_FILE_DEFAULT; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_RSNA_PLAYBOOK_FILE; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_RSNA_PLAYBOOK_FILE_DEFAULT; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE_DEFAULT; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE_DEFAULT; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE_DEFAULT; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_UPLOAD_PROPERTIES_FILE; import static org.apache.commons.lang3.StringUtils.isNotBlank; /* @@ -592,7 +663,7 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { Counter circularCounter = new Counter(); for (TermConcept next : rootConcepts.values()) { - long count = circularCounter.getThenAdd(); + long count = circularCounter.getnAdd(); float pct = ((float) count / rootConcepts.size()) * 100.0f; ourLog.info(" * Scanning for circular refs - have scanned {} / {} codes ({}%)", count, rootConcepts.size(), pct); dropCircularRefs(next, new ArrayList<>(), code2concept); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/Counter.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/Counter.java index 05a546d1589..ca10f7ad5b6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/Counter.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/Counter.java @@ -23,9 +23,9 @@ package ca.uhn.fhir.jpa.util; public class Counter { private long myCount; - - public long getThenAdd() { + + public long getnAdd() { return myCount++; } - + } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java index 6ea7548091a..9374372c152 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java @@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.bulk; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.model.BulkExportResponseJson; import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; @@ -41,8 +42,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; -import java.util.Date; -import java.util.Set; import java.util.concurrent.TimeUnit; import static org.hamcrest.MatcherAssert.assertThat; @@ -61,6 +60,8 @@ public class BulkDataExportProviderTest { private static final String A_JOB_ID = "0000000-AAAAAA"; private static final Logger ourLog = LoggerFactory.getLogger(BulkDataExportProviderTest.class); + private static final String GROUP_ID = "G2401"; + private static final String G_JOB_ID = "0000000-GGGGGG"; private Server myServer; private FhirContext myCtx = FhirContext.forCached(FhirVersionEnum.R4); private int myPort; @@ -68,13 +69,7 @@ public class BulkDataExportProviderTest { private IBulkDataExportSvc myBulkDataExportSvc; private CloseableHttpClient myClient; @Captor - private ArgumentCaptor myOutputFormatCaptor; - @Captor - private ArgumentCaptor> myResourceTypesCaptor; - @Captor - private ArgumentCaptor mySinceCaptor; - @Captor - private ArgumentCaptor> myFiltersCaptor; + private ArgumentCaptor myBulkDataExportOptionsCaptor; @AfterEach public void after() throws Exception { @@ -111,7 +106,7 @@ public class BulkDataExportProviderTest { IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() .setJobId(A_JOB_ID); - when(myBulkDataExportSvc.submitJob(any(), any(), any(), any())).thenReturn(jobInfo); + when(myBulkDataExportSvc.submitJob(any())).thenReturn(jobInfo); InstantType now = InstantType.now(); @@ -135,12 +130,12 @@ public class BulkDataExportProviderTest { assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); } - verify(myBulkDataExportSvc, times(1)).submitJob(myOutputFormatCaptor.capture(), myResourceTypesCaptor.capture(), mySinceCaptor.capture(), myFiltersCaptor.capture()); - assertEquals(Constants.CT_FHIR_NDJSON, myOutputFormatCaptor.getValue()); - assertThat(myResourceTypesCaptor.getValue(), containsInAnyOrder("Patient", "Practitioner")); - assertThat(mySinceCaptor.getValue(), notNullValue()); - assertThat(myFiltersCaptor.getValue(), containsInAnyOrder("Patient?identifier=foo")); - + verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture()); + BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue(); + assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat()); + assertThat(options.getResourceTypes(), containsInAnyOrder("Patient", "Practitioner")); + assertThat(options.getSince(), notNullValue()); + assertThat(options.getFilters(), containsInAnyOrder("Patient?identifier=foo")); } @Test @@ -148,7 +143,7 @@ public class BulkDataExportProviderTest { IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() .setJobId(A_JOB_ID); - when(myBulkDataExportSvc.submitJob(any(), any(), any(), any())).thenReturn(jobInfo); + when(myBulkDataExportSvc.submitJob(any())).thenReturn(jobInfo); InstantType now = InstantType.now(); @@ -169,12 +164,12 @@ public class BulkDataExportProviderTest { assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); } - verify(myBulkDataExportSvc, times(1)).submitJob(myOutputFormatCaptor.capture(), myResourceTypesCaptor.capture(), mySinceCaptor.capture(), myFiltersCaptor.capture()); - assertEquals(Constants.CT_FHIR_NDJSON, myOutputFormatCaptor.getValue()); - assertThat(myResourceTypesCaptor.getValue(), containsInAnyOrder("Patient", "Practitioner")); - assertThat(mySinceCaptor.getValue(), notNullValue()); - assertThat(myFiltersCaptor.getValue(), containsInAnyOrder("Patient?identifier=foo")); - + verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture()); + BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue(); + assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat()); + assertThat(options.getResourceTypes(), containsInAnyOrder("Patient", "Practitioner")); + assertThat(options.getSince(), notNullValue()); + assertThat(options.getFilters(), containsInAnyOrder("Patient?identifier=foo")); } @Test @@ -286,5 +281,48 @@ public class BulkDataExportProviderTest { } + /** + * Group export tests + * See https://build.fhir.org/ig/HL7/us-bulk-data/ + *

+ * GET [fhir base]/Group/[id]/$export + *

+ * FHIR Operation to obtain data on all patients listed in a single FHIR Group Resource. + */ + @Test + public void testSuccessfulInitiateGroupBulkRequest_Post() throws IOException { + + IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo().setJobId(G_JOB_ID); + when(myBulkDataExportSvc.submitJob(any())).thenReturn(jobInfo); + + InstantType now = InstantType.now(); + + Parameters input = new Parameters(); + input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON)); + input.addParameter(JpaConstants.PARAM_EXPORT_TYPE, new StringType("Observation, DiagnosticReport")); + input.addParameter(JpaConstants.PARAM_EXPORT_SINCE, now); + input.addParameter(JpaConstants.PARAM_EXPORT_TYPE_FILTER, new StringType("Observation?code=OBSCODE,DiagnosticReport?code=DRCODE")); + + ourLog.info(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input)); + + HttpPost post = new HttpPost("http://localhost:" + myPort + "/Group/" + GROUP_ID + "/" + JpaConstants.OPERATION_EXPORT); + post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); + post.setEntity(new ResourceEntity(myCtx, input)); + ourLog.info("Request: {}", post); + try (CloseableHttpResponse response = myClient.execute(post)) { + ourLog.info("Response: {}", response.toString()); + + assertEquals(202, response.getStatusLine().getStatusCode()); + assertEquals("Accepted", response.getStatusLine().getReasonPhrase()); + assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + G_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); + } + + verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture()); + BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue(); + assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat()); + assertThat(options.getResourceTypes(), containsInAnyOrder("Observation", "DiagnosticReport")); + assertThat(options.getSince(), notNullValue()); + assertThat(options.getFilters(), containsInAnyOrder("Observation?code=OBSCODE", "DiagnosticReport?code=DRCODE")); + } } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java index 14f0fb51261..16c055d8aed 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java @@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.bulk; import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor; import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; +import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.job.BulkExportJobParametersBuilder; import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; @@ -134,7 +135,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { @Test public void testSubmit_InvalidOutputFormat() { try { - myBulkDataExportSvc.submitJob(Constants.CT_FHIR_JSON_NEW, Sets.newHashSet("Patient", "Observation"), null, null); + myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_JSON_NEW, Sets.newHashSet("Patient", "Observation"), null, null)); fail(); } catch (InvalidRequestException e) { assertEquals("Invalid output format: application/fhir+json", e.getMessage()); @@ -144,7 +145,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { @Test public void testSubmit_OnlyBinarySelected() { try { - myBulkDataExportSvc.submitJob(Constants.CT_FHIR_JSON_NEW, Sets.newHashSet("Binary"), null, null); + myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_JSON_NEW, Sets.newHashSet("Binary"), null, null)); fail(); } catch (InvalidRequestException e) { assertEquals("Invalid output format: application/fhir+json", e.getMessage()); @@ -154,7 +155,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { @Test public void testSubmit_InvalidResourceTypes() { try { - myBulkDataExportSvc.submitJob(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient", "FOO"), null, null); + myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient", "FOO"), null, null)); fail(); } catch (InvalidRequestException e) { assertEquals("Unknown or unsupported resource type: FOO", e.getMessage()); @@ -164,7 +165,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { @Test public void testSubmit_MultipleTypeFiltersForSameType() { try { - myBulkDataExportSvc.submitJob(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient"), null, Sets.newHashSet("Patient?name=a", "Patient?active=true")); + myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient"), null, Sets.newHashSet("Patient?name=a", "Patient?active=true"))); fail(); } catch (InvalidRequestException e) { assertEquals("Invalid _typeFilter value \"Patient?name=a\". Multiple filters found for type Patient", e.getMessage()); @@ -174,7 +175,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { @Test public void testSubmit_TypeFilterForNonSelectedType() { try { - myBulkDataExportSvc.submitJob(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient"), null, Sets.newHashSet("Observation?code=123")); + myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient"), null, Sets.newHashSet("Observation?code=123"))); fail(); } catch (InvalidRequestException e) { assertEquals("Invalid _typeFilter value \"Observation?code=123\". Resource type does not appear in _type list", e.getMessage()); @@ -184,7 +185,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { @Test public void testSubmit_TypeFilterInvalid() { try { - myBulkDataExportSvc.submitJob(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient"), null, Sets.newHashSet("Hello")); + myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient"), null, Sets.newHashSet("Hello"))); fail(); } catch (InvalidRequestException e) { assertEquals("Invalid _typeFilter value \"Hello\". Must be in the form [ResourceType]?[params]", e.getMessage()); @@ -195,11 +196,11 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { public void testSubmit_ReusesExisting() { // Submit - IBulkDataExportSvc.JobInfo jobDetails1 = myBulkDataExportSvc.submitJob(null, Sets.newHashSet("Patient", "Observation"), null, null); + IBulkDataExportSvc.JobInfo jobDetails1 = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient", "Observation"), null, null)); assertNotNull(jobDetails1.getJobId()); // Submit again - IBulkDataExportSvc.JobInfo jobDetails2 = myBulkDataExportSvc.submitJob(null, Sets.newHashSet("Patient", "Observation"), null, null); + IBulkDataExportSvc.JobInfo jobDetails2 = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient", "Observation"), null, null)); assertNotNull(jobDetails2.getJobId()); assertEquals(jobDetails1.getJobId(), jobDetails2.getJobId()); @@ -220,7 +221,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { createResources(); // Create a bulk job - IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(null, Sets.newHashSet("Patient"), null, null); + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient"), null, null)); assertNotNull(jobDetails.getJobId()); // Check the status @@ -250,7 +251,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { createResources(); // Create a bulk job - IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(null, Sets.newHashSet("Patient", "Observation"), null, Sets.newHashSet(TEST_FILTER)); + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient", "Observation"), null, Sets.newHashSet(TEST_FILTER))); assertNotNull(jobDetails.getJobId()); // Check the status @@ -303,7 +304,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { myBinaryDao.create(b); // Create a bulk job - IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(null, null, null, null); + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, null, null, null)); assertNotNull(jobDetails.getJobId()); // Check the status @@ -350,7 +351,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { // Create a bulk job HashSet types = Sets.newHashSet("Patient"); Set typeFilters = Sets.newHashSet("Patient?_has:Observation:patient:identifier=SYS|VAL3"); - IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(null, types, null, typeFilters); + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, types, null, typeFilters)); assertNotNull(jobDetails.getJobId()); // Check the status @@ -402,7 +403,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { } // Create a bulk job - IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(null, Sets.newHashSet("Patient", "Observation"), cutoff.getValue(), null); + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient", "Observation"), cutoff.getValue(), null)); assertNotNull(jobDetails.getJobId()); // Check the status @@ -480,7 +481,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { createResources(); // Create a bulk job - IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(null, Sets.newHashSet("Patient", "Observation"), null, null); + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient", "Observation"), null, null)); //Add the UUID to the job BulkExportJobParametersBuilder paramBuilder = new BulkExportJobParametersBuilder()