begin with failing test

This commit is contained in:
Ken Stevens 2021-02-18 21:47:43 -05:00
parent 3eda19c65a
commit 70b2b83d1a
9 changed files with 205 additions and 59 deletions

View File

@ -0,0 +1,34 @@
package ca.uhn.fhir.jpa.bulk.api;
import java.util.Date;
import java.util.Set;
public class BulkDataExportOptions {
private final String myOutputFormat;
private final Set<String> myResourceTypes;
private final Date mySince;
private final Set<String> myFilters;
public BulkDataExportOptions(String theOutputFormat, Set<String> theResourceTypes, Date theSince, Set<String> theFilters) {
myOutputFormat = theOutputFormat;
myResourceTypes = theResourceTypes;
mySince = theSince;
myFilters = theFilters;
}
public String getOutputFormat() {
return myOutputFormat;
}
public Set<String> getResourceTypes() {
return myResourceTypes;
}
public Date getSince() {
return mySince;
}
public Set<String> getFilters() {
return myFilters;
}
}

View File

@ -27,7 +27,6 @@ import javax.transaction.Transactional;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Set;
public interface IBulkDataExportSvc { public interface IBulkDataExportSvc {
void buildExportFiles(); void buildExportFiles();
@ -35,7 +34,7 @@ public interface IBulkDataExportSvc {
@Transactional(value = Transactional.TxType.NEVER) @Transactional(value = Transactional.TxType.NEVER)
void purgeExpiredFiles(); void purgeExpiredFiles();
JobInfo submitJob(String theOutputFormat, Set<String> theResourceTypes, Date theSince, Set<String> theFilters); JobInfo submitJob(BulkDataExportOptions theBulkDataExportOptions);
JobInfo getJobInfoOrThrowResourceNotFound(String theJobId); JobInfo getJobInfoOrThrowResourceNotFound(String theJobId);

View File

@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.bulk.job;
* #L% * #L%
*/ */
import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions;
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.Constants;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
@ -65,7 +66,7 @@ public class CreateBulkExportEntityTasklet implements Tasklet {
outputFormat = Constants.CT_FHIR_NDJSON; outputFormat = Constants.CT_FHIR_NDJSON;
} }
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.submitJob(outputFormat, resourceTypeSet, since, filterSet); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(outputFormat, resourceTypeSet, since, filterSet));
addUUIDToJobContext(theChunkContext, jobInfo.getJobId()); addUUIDToJobContext(theChunkContext, jobInfo.getJobId());
return RepeatStatus.FINISHED; return RepeatStatus.FINISHED;

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.bulk.provider;
*/ */
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions;
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.bulk.model.BulkExportResponseJson; import ca.uhn.fhir.jpa.bulk.model.BulkExportResponseJson;
import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.model.util.JpaConstants;
@ -98,7 +99,7 @@ public class BulkDataExportProvider {
filters = ArrayUtil.commaSeparatedListToCleanSet(theTypeFilter.getValueAsString()); filters = ArrayUtil.commaSeparatedListToCleanSet(theTypeFilter.getValueAsString());
} }
IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(outputFormat, resourceTypes, since, filters); IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(outputFormat, resourceTypes, since, filters));
String serverBase = getServerBase(theRequestDetails); String serverBase = getServerBase(theRequestDetails);
String pollLocation = serverBase + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + outcome.getJobId(); String pollLocation = serverBase + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + outcome.getJobId();

View File

@ -25,6 +25,7 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions;
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao; import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao;
@ -229,28 +230,28 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
@Transactional @Transactional
@Override @Override
public JobInfo submitJob(String theOutputFormat, Set<String> theResourceTypes, Date theSince, Set<String> theFilters) { public JobInfo submitJob(BulkDataExportOptions theBulkDataExportOptions) {
String outputFormat = Constants.CT_FHIR_NDJSON; String outputFormat = Constants.CT_FHIR_NDJSON;
if (isNotBlank(theOutputFormat)) { if (isNotBlank(theBulkDataExportOptions.getOutputFormat())) {
outputFormat = theOutputFormat; outputFormat = theBulkDataExportOptions.getOutputFormat();
} }
if (!Constants.CTS_NDJSON.contains(outputFormat)) { if (!Constants.CTS_NDJSON.contains(outputFormat)) {
throw new InvalidRequestException("Invalid output format: " + theOutputFormat); throw new InvalidRequestException("Invalid output format: " + theBulkDataExportOptions.getOutputFormat());
} }
StringBuilder requestBuilder = new StringBuilder(); StringBuilder requestBuilder = new StringBuilder();
requestBuilder.append("/").append(JpaConstants.OPERATION_EXPORT); requestBuilder.append("/").append(JpaConstants.OPERATION_EXPORT);
requestBuilder.append("?").append(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT).append("=").append(escapeUrlParam(outputFormat)); requestBuilder.append("?").append(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT).append("=").append(escapeUrlParam(outputFormat));
Set<String> resourceTypes = theResourceTypes; Set<String> resourceTypes = theBulkDataExportOptions.getResourceTypes();
if (resourceTypes != null) { if (resourceTypes != null) {
requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_TYPE).append("=").append(String.join(",", escapeUrlParams(resourceTypes))); requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_TYPE).append("=").append(String.join(",", escapeUrlParams(resourceTypes)));
} }
Date since = theSince; Date since = theBulkDataExportOptions.getSince();
if (since != null) { if (since != null) {
requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_SINCE).append("=").append(new InstantType(since).setTimeZoneZulu(true).getValueAsString()); requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_SINCE).append("=").append(new InstantType(since).setTimeZoneZulu(true).getValueAsString());
} }
if (theFilters != null && theFilters.size() > 0) { if (theBulkDataExportOptions.getFilters() != null && theBulkDataExportOptions.getFilters().size() > 0) {
requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_TYPE_FILTER).append("=").append(String.join(",", escapeUrlParams(theFilters))); requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_TYPE_FILTER).append("=").append(String.join(",", escapeUrlParams(theBulkDataExportOptions.getFilters())));
} }
String request = requestBuilder.toString(); String request = requestBuilder.toString();
@ -291,7 +292,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
// Validate types // Validate types
validateTypes(resourceTypes); validateTypes(resourceTypes);
validateTypeFilters(theFilters, resourceTypes); validateTypeFilters(theBulkDataExportOptions.getFilters(), resourceTypes);
updateExpiry(job); updateExpiry(job);
myBulkExportJobDao.save(job); myBulkExportJobDao.save(job);

View File

@ -8,7 +8,24 @@ import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc; import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
import ca.uhn.fhir.jpa.term.custom.CustomTerminologySet; import ca.uhn.fhir.jpa.term.custom.CustomTerminologySet;
import ca.uhn.fhir.jpa.term.loinc.*; import ca.uhn.fhir.jpa.term.loinc.LoincAnswerListHandler;
import ca.uhn.fhir.jpa.term.loinc.LoincAnswerListLinkHandler;
import ca.uhn.fhir.jpa.term.loinc.LoincDocumentOntologyHandler;
import ca.uhn.fhir.jpa.term.loinc.LoincGroupFileHandler;
import ca.uhn.fhir.jpa.term.loinc.LoincGroupTermsFileHandler;
import ca.uhn.fhir.jpa.term.loinc.LoincHandler;
import ca.uhn.fhir.jpa.term.loinc.LoincHierarchyHandler;
import ca.uhn.fhir.jpa.term.loinc.LoincIeeeMedicalDeviceCodeHandler;
import ca.uhn.fhir.jpa.term.loinc.LoincImagingDocumentCodeHandler;
import ca.uhn.fhir.jpa.term.loinc.LoincParentGroupFileHandler;
import ca.uhn.fhir.jpa.term.loinc.LoincPartHandler;
import ca.uhn.fhir.jpa.term.loinc.LoincPartLinkHandler;
import ca.uhn.fhir.jpa.term.loinc.LoincPartRelatedCodeMappingHandler;
import ca.uhn.fhir.jpa.term.loinc.LoincRsnaPlaybookHandler;
import ca.uhn.fhir.jpa.term.loinc.LoincTop2000LabResultsSiHandler;
import ca.uhn.fhir.jpa.term.loinc.LoincTop2000LabResultsUsHandler;
import ca.uhn.fhir.jpa.term.loinc.LoincUniversalOrderSetHandler;
import ca.uhn.fhir.jpa.term.loinc.PartTypeAndPartName;
import ca.uhn.fhir.jpa.term.snomedct.SctHandlerConcept; import ca.uhn.fhir.jpa.term.snomedct.SctHandlerConcept;
import ca.uhn.fhir.jpa.term.snomedct.SctHandlerDescription; import ca.uhn.fhir.jpa.term.snomedct.SctHandlerDescription;
import ca.uhn.fhir.jpa.term.snomedct.SctHandlerRelationship; import ca.uhn.fhir.jpa.term.snomedct.SctHandlerRelationship;
@ -38,12 +55,66 @@ import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.Nonnull; import javax.annotation.Nonnull;
import javax.validation.constraints.NotNull; import javax.validation.constraints.NotNull;
import java.io.*; import java.io.IOException;
import java.util.*; import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.LineNumberReader;
import java.io.Reader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Optional;
import java.util.Properties;
import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.*; import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_ANSWERLIST_FILE;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_ANSWERLIST_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_ANSWERLIST_LINK_FILE;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_ANSWERLIST_LINK_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_CODESYSTEM_VERSION;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_DOCUMENT_ONTOLOGY_FILE;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_DOCUMENT_ONTOLOGY_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_FILE;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_GROUP_FILE;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_GROUP_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_GROUP_TERMS_FILE;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_GROUP_TERMS_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_HIERARCHY_FILE;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_HIERARCHY_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_IMAGING_DOCUMENT_CODES_FILE;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_IMAGING_DOCUMENT_CODES_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PARENT_GROUP_FILE;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PARENT_GROUP_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_FILE;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_LINK_FILE;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_LINK_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_LINK_FILE_PRIMARY;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_LINK_FILE_PRIMARY_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_LINK_FILE_SUPPLEMENTARY;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_LINK_FILE_SUPPLEMENTARY_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_RELATED_CODE_MAPPING_FILE;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_RELATED_CODE_MAPPING_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_RSNA_PLAYBOOK_FILE;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_RSNA_PLAYBOOK_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE_DEFAULT;
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_UPLOAD_PROPERTIES_FILE;
import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank;
/* /*
@ -592,7 +663,7 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc {
Counter circularCounter = new Counter(); Counter circularCounter = new Counter();
for (TermConcept next : rootConcepts.values()) { for (TermConcept next : rootConcepts.values()) {
long count = circularCounter.getThenAdd(); long count = circularCounter.getnAdd();
float pct = ((float) count / rootConcepts.size()) * 100.0f; float pct = ((float) count / rootConcepts.size()) * 100.0f;
ourLog.info(" * Scanning for circular refs - have scanned {} / {} codes ({}%)", count, rootConcepts.size(), pct); ourLog.info(" * Scanning for circular refs - have scanned {} / {} codes ({}%)", count, rootConcepts.size(), pct);
dropCircularRefs(next, new ArrayList<>(), code2concept); dropCircularRefs(next, new ArrayList<>(), code2concept);

View File

@ -24,7 +24,7 @@ public class Counter {
private long myCount; private long myCount;
public long getThenAdd() { public long getnAdd() {
return myCount++; return myCount++;
} }

View File

@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.bulk;
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions;
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.bulk.model.BulkExportResponseJson; import ca.uhn.fhir.jpa.bulk.model.BulkExportResponseJson;
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
@ -41,8 +42,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.IOException; import java.io.IOException;
import java.util.Date;
import java.util.Set;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
@ -61,6 +60,8 @@ public class BulkDataExportProviderTest {
private static final String A_JOB_ID = "0000000-AAAAAA"; private static final String A_JOB_ID = "0000000-AAAAAA";
private static final Logger ourLog = LoggerFactory.getLogger(BulkDataExportProviderTest.class); private static final Logger ourLog = LoggerFactory.getLogger(BulkDataExportProviderTest.class);
private static final String GROUP_ID = "G2401";
private static final String G_JOB_ID = "0000000-GGGGGG";
private Server myServer; private Server myServer;
private FhirContext myCtx = FhirContext.forCached(FhirVersionEnum.R4); private FhirContext myCtx = FhirContext.forCached(FhirVersionEnum.R4);
private int myPort; private int myPort;
@ -68,13 +69,7 @@ public class BulkDataExportProviderTest {
private IBulkDataExportSvc myBulkDataExportSvc; private IBulkDataExportSvc myBulkDataExportSvc;
private CloseableHttpClient myClient; private CloseableHttpClient myClient;
@Captor @Captor
private ArgumentCaptor<String> myOutputFormatCaptor; private ArgumentCaptor<BulkDataExportOptions> myBulkDataExportOptionsCaptor;
@Captor
private ArgumentCaptor<Set<String>> myResourceTypesCaptor;
@Captor
private ArgumentCaptor<Date> mySinceCaptor;
@Captor
private ArgumentCaptor<Set<String>> myFiltersCaptor;
@AfterEach @AfterEach
public void after() throws Exception { public void after() throws Exception {
@ -111,7 +106,7 @@ public class BulkDataExportProviderTest {
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo()
.setJobId(A_JOB_ID); .setJobId(A_JOB_ID);
when(myBulkDataExportSvc.submitJob(any(), any(), any(), any())).thenReturn(jobInfo); when(myBulkDataExportSvc.submitJob(any())).thenReturn(jobInfo);
InstantType now = InstantType.now(); InstantType now = InstantType.now();
@ -135,12 +130,12 @@ public class BulkDataExportProviderTest {
assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
} }
verify(myBulkDataExportSvc, times(1)).submitJob(myOutputFormatCaptor.capture(), myResourceTypesCaptor.capture(), mySinceCaptor.capture(), myFiltersCaptor.capture()); verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture());
assertEquals(Constants.CT_FHIR_NDJSON, myOutputFormatCaptor.getValue()); BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue();
assertThat(myResourceTypesCaptor.getValue(), containsInAnyOrder("Patient", "Practitioner")); assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat());
assertThat(mySinceCaptor.getValue(), notNullValue()); assertThat(options.getResourceTypes(), containsInAnyOrder("Patient", "Practitioner"));
assertThat(myFiltersCaptor.getValue(), containsInAnyOrder("Patient?identifier=foo")); assertThat(options.getSince(), notNullValue());
assertThat(options.getFilters(), containsInAnyOrder("Patient?identifier=foo"));
} }
@Test @Test
@ -148,7 +143,7 @@ public class BulkDataExportProviderTest {
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo()
.setJobId(A_JOB_ID); .setJobId(A_JOB_ID);
when(myBulkDataExportSvc.submitJob(any(), any(), any(), any())).thenReturn(jobInfo); when(myBulkDataExportSvc.submitJob(any())).thenReturn(jobInfo);
InstantType now = InstantType.now(); InstantType now = InstantType.now();
@ -169,12 +164,12 @@ public class BulkDataExportProviderTest {
assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
} }
verify(myBulkDataExportSvc, times(1)).submitJob(myOutputFormatCaptor.capture(), myResourceTypesCaptor.capture(), mySinceCaptor.capture(), myFiltersCaptor.capture()); verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture());
assertEquals(Constants.CT_FHIR_NDJSON, myOutputFormatCaptor.getValue()); BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue();
assertThat(myResourceTypesCaptor.getValue(), containsInAnyOrder("Patient", "Practitioner")); assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat());
assertThat(mySinceCaptor.getValue(), notNullValue()); assertThat(options.getResourceTypes(), containsInAnyOrder("Patient", "Practitioner"));
assertThat(myFiltersCaptor.getValue(), containsInAnyOrder("Patient?identifier=foo")); assertThat(options.getSince(), notNullValue());
assertThat(options.getFilters(), containsInAnyOrder("Patient?identifier=foo"));
} }
@Test @Test
@ -286,5 +281,48 @@ public class BulkDataExportProviderTest {
} }
/**
* Group export tests
* See https://build.fhir.org/ig/HL7/us-bulk-data/
* <p>
* GET [fhir base]/Group/[id]/$export
* <p>
* FHIR Operation to obtain data on all patients listed in a single FHIR Group Resource.
*/
@Test
public void testSuccessfulInitiateGroupBulkRequest_Post() throws IOException {
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo().setJobId(G_JOB_ID);
when(myBulkDataExportSvc.submitJob(any())).thenReturn(jobInfo);
InstantType now = InstantType.now();
Parameters input = new Parameters();
input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON));
input.addParameter(JpaConstants.PARAM_EXPORT_TYPE, new StringType("Observation, DiagnosticReport"));
input.addParameter(JpaConstants.PARAM_EXPORT_SINCE, now);
input.addParameter(JpaConstants.PARAM_EXPORT_TYPE_FILTER, new StringType("Observation?code=OBSCODE,DiagnosticReport?code=DRCODE"));
ourLog.info(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input));
HttpPost post = new HttpPost("http://localhost:" + myPort + "/Group/" + GROUP_ID + "/" + JpaConstants.OPERATION_EXPORT);
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
post.setEntity(new ResourceEntity(myCtx, input));
ourLog.info("Request: {}", post);
try (CloseableHttpResponse response = myClient.execute(post)) {
ourLog.info("Response: {}", response.toString());
assertEquals(202, response.getStatusLine().getStatusCode());
assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + G_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
}
verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture());
BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue();
assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat());
assertThat(options.getResourceTypes(), containsInAnyOrder("Observation", "DiagnosticReport"));
assertThat(options.getSince(), notNullValue());
assertThat(options.getFilters(), containsInAnyOrder("Observation?code=OBSCODE", "DiagnosticReport?code=DRCODE"));
}
} }

View File

@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.bulk;
import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor; import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor;
import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions;
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.bulk.job.BulkExportJobParametersBuilder; import ca.uhn.fhir.jpa.bulk.job.BulkExportJobParametersBuilder;
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
@ -134,7 +135,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
@Test @Test
public void testSubmit_InvalidOutputFormat() { public void testSubmit_InvalidOutputFormat() {
try { try {
myBulkDataExportSvc.submitJob(Constants.CT_FHIR_JSON_NEW, Sets.newHashSet("Patient", "Observation"), null, null); myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_JSON_NEW, Sets.newHashSet("Patient", "Observation"), null, null));
fail(); fail();
} catch (InvalidRequestException e) { } catch (InvalidRequestException e) {
assertEquals("Invalid output format: application/fhir+json", e.getMessage()); assertEquals("Invalid output format: application/fhir+json", e.getMessage());
@ -144,7 +145,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
@Test @Test
public void testSubmit_OnlyBinarySelected() { public void testSubmit_OnlyBinarySelected() {
try { try {
myBulkDataExportSvc.submitJob(Constants.CT_FHIR_JSON_NEW, Sets.newHashSet("Binary"), null, null); myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_JSON_NEW, Sets.newHashSet("Binary"), null, null));
fail(); fail();
} catch (InvalidRequestException e) { } catch (InvalidRequestException e) {
assertEquals("Invalid output format: application/fhir+json", e.getMessage()); assertEquals("Invalid output format: application/fhir+json", e.getMessage());
@ -154,7 +155,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
@Test @Test
public void testSubmit_InvalidResourceTypes() { public void testSubmit_InvalidResourceTypes() {
try { try {
myBulkDataExportSvc.submitJob(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient", "FOO"), null, null); myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient", "FOO"), null, null));
fail(); fail();
} catch (InvalidRequestException e) { } catch (InvalidRequestException e) {
assertEquals("Unknown or unsupported resource type: FOO", e.getMessage()); assertEquals("Unknown or unsupported resource type: FOO", e.getMessage());
@ -164,7 +165,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
@Test @Test
public void testSubmit_MultipleTypeFiltersForSameType() { public void testSubmit_MultipleTypeFiltersForSameType() {
try { try {
myBulkDataExportSvc.submitJob(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient"), null, Sets.newHashSet("Patient?name=a", "Patient?active=true")); myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient"), null, Sets.newHashSet("Patient?name=a", "Patient?active=true")));
fail(); fail();
} catch (InvalidRequestException e) { } catch (InvalidRequestException e) {
assertEquals("Invalid _typeFilter value \"Patient?name=a\". Multiple filters found for type Patient", e.getMessage()); assertEquals("Invalid _typeFilter value \"Patient?name=a\". Multiple filters found for type Patient", e.getMessage());
@ -174,7 +175,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
@Test @Test
public void testSubmit_TypeFilterForNonSelectedType() { public void testSubmit_TypeFilterForNonSelectedType() {
try { try {
myBulkDataExportSvc.submitJob(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient"), null, Sets.newHashSet("Observation?code=123")); myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient"), null, Sets.newHashSet("Observation?code=123")));
fail(); fail();
} catch (InvalidRequestException e) { } catch (InvalidRequestException e) {
assertEquals("Invalid _typeFilter value \"Observation?code=123\". Resource type does not appear in _type list", e.getMessage()); assertEquals("Invalid _typeFilter value \"Observation?code=123\". Resource type does not appear in _type list", e.getMessage());
@ -184,7 +185,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
@Test @Test
public void testSubmit_TypeFilterInvalid() { public void testSubmit_TypeFilterInvalid() {
try { try {
myBulkDataExportSvc.submitJob(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient"), null, Sets.newHashSet("Hello")); myBulkDataExportSvc.submitJob(new BulkDataExportOptions(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient"), null, Sets.newHashSet("Hello")));
fail(); fail();
} catch (InvalidRequestException e) { } catch (InvalidRequestException e) {
assertEquals("Invalid _typeFilter value \"Hello\". Must be in the form [ResourceType]?[params]", e.getMessage()); assertEquals("Invalid _typeFilter value \"Hello\". Must be in the form [ResourceType]?[params]", e.getMessage());
@ -195,11 +196,11 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
public void testSubmit_ReusesExisting() { public void testSubmit_ReusesExisting() {
// Submit // Submit
IBulkDataExportSvc.JobInfo jobDetails1 = myBulkDataExportSvc.submitJob(null, Sets.newHashSet("Patient", "Observation"), null, null); IBulkDataExportSvc.JobInfo jobDetails1 = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient", "Observation"), null, null));
assertNotNull(jobDetails1.getJobId()); assertNotNull(jobDetails1.getJobId());
// Submit again // Submit again
IBulkDataExportSvc.JobInfo jobDetails2 = myBulkDataExportSvc.submitJob(null, Sets.newHashSet("Patient", "Observation"), null, null); IBulkDataExportSvc.JobInfo jobDetails2 = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient", "Observation"), null, null));
assertNotNull(jobDetails2.getJobId()); assertNotNull(jobDetails2.getJobId());
assertEquals(jobDetails1.getJobId(), jobDetails2.getJobId()); assertEquals(jobDetails1.getJobId(), jobDetails2.getJobId());
@ -220,7 +221,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
createResources(); createResources();
// Create a bulk job // Create a bulk job
IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(null, Sets.newHashSet("Patient"), null, null); IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient"), null, null));
assertNotNull(jobDetails.getJobId()); assertNotNull(jobDetails.getJobId());
// Check the status // Check the status
@ -250,7 +251,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
createResources(); createResources();
// Create a bulk job // Create a bulk job
IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(null, Sets.newHashSet("Patient", "Observation"), null, Sets.newHashSet(TEST_FILTER)); IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient", "Observation"), null, Sets.newHashSet(TEST_FILTER)));
assertNotNull(jobDetails.getJobId()); assertNotNull(jobDetails.getJobId());
// Check the status // Check the status
@ -303,7 +304,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
myBinaryDao.create(b); myBinaryDao.create(b);
// Create a bulk job // Create a bulk job
IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(null, null, null, null); IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, null, null, null));
assertNotNull(jobDetails.getJobId()); assertNotNull(jobDetails.getJobId());
// Check the status // Check the status
@ -350,7 +351,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
// Create a bulk job // Create a bulk job
HashSet<String> types = Sets.newHashSet("Patient"); HashSet<String> types = Sets.newHashSet("Patient");
Set<String> typeFilters = Sets.newHashSet("Patient?_has:Observation:patient:identifier=SYS|VAL3"); Set<String> typeFilters = Sets.newHashSet("Patient?_has:Observation:patient:identifier=SYS|VAL3");
IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(null, types, null, typeFilters); IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, types, null, typeFilters));
assertNotNull(jobDetails.getJobId()); assertNotNull(jobDetails.getJobId());
// Check the status // Check the status
@ -402,7 +403,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
} }
// Create a bulk job // Create a bulk job
IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(null, Sets.newHashSet("Patient", "Observation"), cutoff.getValue(), null); IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient", "Observation"), cutoff.getValue(), null));
assertNotNull(jobDetails.getJobId()); assertNotNull(jobDetails.getJobId());
// Check the status // Check the status
@ -480,7 +481,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
createResources(); createResources();
// Create a bulk job // Create a bulk job
IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(null, Sets.newHashSet("Patient", "Observation"), null, null); IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new BulkDataExportOptions(null, Sets.newHashSet("Patient", "Observation"), null, null));
//Add the UUID to the job //Add the UUID to the job
BulkExportJobParametersBuilder paramBuilder = new BulkExportJobParametersBuilder() BulkExportJobParametersBuilder paramBuilder = new BulkExportJobParametersBuilder()