4030 bulk export to support fhir basepatientid$export endpoint (#4032)

* Refactored test helper method to be more generic, added new failing test for feature, and changed BulkDataExportOptions to fix compile error

* added field to all objects needed in the bulk export pipeline for adding patient id, added new operation for the feature

* added changelog

* added support for patient parameter to patient export, changed patient id to a list to match the requirements for the patient parameter, added test for multiple id patient export

* refactored methods, added new changes to changelog

* code review changes

* fix test error

Co-authored-by: Steven Li <steven@smilecdr.com>
This commit is contained in:
StevenXLi 2022-09-20 13:42:55 -04:00 committed by GitHub
parent 4d16a109de
commit 3b781022c1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 291 additions and 34 deletions

View File

@ -0,0 +1,7 @@
---
type: add
issue: 4030
title: "Previously, Patient Bulk Export only supported endpoint [fhir base]/Patient/$export, which exports all patients.
Now, Patient Export can be done at the instance level, following this format: `[fhir base]/Patient/[id]/$export`, which will export only the records for one patient.
Additionally, added support for the `patient` parameter in Patient Bulk Export, which is another way to get the records of only one patient."

View File

@ -64,6 +64,7 @@ import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
@ -75,6 +76,7 @@ import java.util.Set;
import java.util.stream.Collectors;
import static ca.uhn.fhir.rest.api.Constants.PARAM_HAS;
import static ca.uhn.fhir.rest.api.Constants.PARAM_ID;
public class JpaBulkExportProcessor implements IBulkExportProcessor {
private static final Logger ourLog = LoggerFactory.getLogger(JpaBulkExportProcessor.class);
@ -146,9 +148,7 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor {
ISearchBuilder searchBuilder = getSearchBuilderForResourceType(theParams.getResourceType());
if (!resourceType.equalsIgnoreCase("Patient")) {
map.add(patientSearchParam, new ReferenceParam().setMissing(false));
}
filterBySpecificPatient(theParams, resourceType, patientSearchParam, map);
SearchRuntimeDetails searchRuntime = new SearchRuntimeDetails(null, jobId);
IResultIterator resultIterator = searchBuilder.createQuery(map, searchRuntime, null, RequestPartitionId.allPartitions());
@ -159,6 +159,31 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor {
return pids;
}
private static void filterBySpecificPatient(ExportPIDIteratorParameters theParams, String resourceType, String patientSearchParam, SearchParameterMap map) {
if (resourceType.equalsIgnoreCase("Patient")) {
if (theParams.getPatientIds() != null) {
ReferenceOrListParam referenceOrListParam = getReferenceOrListParam(theParams);
map.add(PARAM_ID, referenceOrListParam);
}
} else {
if (theParams.getPatientIds() != null) {
ReferenceOrListParam referenceOrListParam = getReferenceOrListParam(theParams);
map.add(patientSearchParam, referenceOrListParam);
} else {
map.add(patientSearchParam, new ReferenceParam().setMissing(false));
}
}
}
@Nonnull
private static ReferenceOrListParam getReferenceOrListParam(ExportPIDIteratorParameters theParams) {
ReferenceOrListParam referenceOrListParam = new ReferenceOrListParam();
for (String patientId : theParams.getPatientIds()) {
referenceOrListParam.addOr(new ReferenceParam(patientId));
}
return referenceOrListParam;
}
private Set<ResourcePersistentId> getPidsForSystemStyleExport(ExportPIDIteratorParameters theParams, String theJobId, RuntimeResourceDefinition theDef) {
Set<ResourcePersistentId> pids = new HashSet<>();
// System
@ -212,7 +237,7 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor {
private Set<ResourcePersistentId> getSingletonGroupList(ExportPIDIteratorParameters theParams) {
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(theParams.getGroupId()), SystemRequestDetails.newSystemRequestAllPartitions());
ResourcePersistentId pidOrNull = myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), group);
Set<ResourcePersistentId> pids = new HashSet<>();
Set<ResourcePersistentId> pids = new HashSet<>();
pids.add(pidOrNull);
return pids;
}
@ -329,7 +354,7 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor {
* This method takes an {@link SearchParameterMap} and adds a clause to it that will filter the search results to only
* return members of the defined group.
*
* @param theMap the map to add the clause to.
* @param theMap the map to add the clause to.
* @param theGroupId the group ID to filter by.
*/
private void addMembershipToGroupClause(SearchParameterMap theMap, String theGroupId) {

View File

@ -192,6 +192,10 @@ public class JpaConstants {
* Parameter for the $export operation
*/
public static final String PARAM_EXPORT_TYPE_FILTER = "_typeFilter";
/**
* Parameter for the $export operation
*/
public static final String PARAM_EXPORT_PATIENT = "patient";
/**
* Parameter for the $import operation

View File

@ -1,5 +1,6 @@
package ca.uhn.fhir.jpa.bulk;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
@ -11,10 +12,14 @@ import ca.uhn.fhir.util.JsonUtil;
import com.google.common.collect.Sets;
import org.hamcrest.Matchers;
import org.hl7.fhir.r4.model.Binary;
import org.hl7.fhir.r4.model.Encounter;
import org.hl7.fhir.r4.model.Enumerations;
import org.hl7.fhir.r4.model.Group;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.Observation;
import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.Reference;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -34,9 +39,17 @@ import static org.junit.jupiter.api.Assertions.assertNotNull;
public class BulkDataExportTest extends BaseResourceProviderR4Test {
private static final Logger ourLog = LoggerFactory.getLogger(BulkDataExportTest.class);
@Autowired
private DaoConfig myDaoConfig;
@Autowired
private IBatch2JobRunner myJobRunner;
@AfterEach
void afterEach() {
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED);
}
@Test
public void testGroupBulkExportWithTypeFilter() {
// Create some resources
@ -59,7 +72,14 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
group.addMember().getEntity().setReference("Patient/PM");
myClient.update().resource(group).execute();
verifyBulkExportResults("G", Sets.newHashSet("Patient?gender=female"), Collections.singletonList("\"PF\""), Collections.singletonList("\"PM\""));
// set the export options
BulkDataExportOptions options = new BulkDataExportOptions();
options.setResourceTypes(Sets.newHashSet("Patient"));
options.setGroupId(new IdType("Group", "G"));
options.setFilters(Sets.newHashSet("Patient?gender=female"));
options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
verifyBulkExportResults(options, Collections.singletonList("\"PF\""), Collections.singletonList("\"PM\""));
}
@Test
@ -90,11 +110,18 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
group.addMember().getEntity().setReference("Patient/PING2");
myClient.update().resource(group).execute();
verifyBulkExportResults("G2", new HashSet<>(), List.of("\"PING1\"", "\"PING2\""), Collections.singletonList("\"PNING3\""));
// set the export options
BulkDataExportOptions options = new BulkDataExportOptions();
options.setResourceTypes(Sets.newHashSet("Patient"));
options.setGroupId(new IdType("Group", "G2"));
options.setFilters(new HashSet<>());
options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
verifyBulkExportResults(options, List.of("\"PING1\"", "\"PING2\""), Collections.singletonList("\"PNING3\""));
}
@Test
public void testTwoBulkExportsInArow() {
public void testTwoBulkExportsInARow() {
// Create some resources
Patient patient = new Patient();
patient.setId("PING1");
@ -107,29 +134,142 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
group.setActive(true);
group.addMember().getEntity().setReference("Patient/PING1");
myClient.update().resource(group).execute();
// set the export options
BulkDataExportOptions options = new BulkDataExportOptions();
options.setResourceTypes(Sets.newHashSet("Patient"));
options.setGroupId(new IdType("Group", "G2"));
options.setFilters(new HashSet<>());
options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
myCaptureQueriesListener.clear();
verifyBulkExportResults("G2", new HashSet<>(), List.of("\"PING1\""), Collections.singletonList("\"PNING3\""));
verifyBulkExportResults(options, List.of("\"PING1\""), Collections.singletonList("\"PNING3\""));
myCaptureQueriesListener.logSelectQueries();
ourLog.error("************");
myCaptureQueriesListener.clear();
try {
verifyBulkExportResults("G2", new HashSet<>(), List.of("\"PING1\""), Collections.singletonList("\"PNING3\""));
verifyBulkExportResults(options, List.of("\"PING1\""), Collections.singletonList("\"PNING3\""));
} finally {
myCaptureQueriesListener.logSelectQueries();
}
}
@Test
public void testPatientBulkExportWithSingleId() {
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.ENABLED);
// create some resources
Patient patient = new Patient();
patient.setId("P1");
patient.setActive(true);
myClient.update().resource(patient).execute();
private void verifyBulkExportResults(String theGroupId, HashSet<String> theFilters, List<String> theContainedList, List<String> theExcludedList) {
Observation observation = new Observation();
observation.setSubject(new Reference().setReference("Patient/P1"));
observation.setStatus(Observation.ObservationStatus.PRELIMINARY);
String obsId = myClient.create().resource(observation).execute().getId().getIdPart();
Encounter encounter = new Encounter();
encounter.setSubject(new Reference().setReference("Patient/P1"));
encounter.setStatus(Encounter.EncounterStatus.INPROGRESS);
String encId = myClient.create().resource(encounter).execute().getId().getIdPart();
// diff patient
patient = new Patient();
patient.setId("P2");
patient.setActive(true);
myClient.update().resource(patient).execute();
observation = new Observation();
observation.setSubject(new Reference().setReference("Patient/P2"));
observation.setStatus(Observation.ObservationStatus.PRELIMINARY);
String obsId2 = myClient.create().resource(observation).execute().getId().getIdPart();
encounter = new Encounter();
encounter.setSubject(new Reference().setReference("Patient/P2"));
encounter.setStatus(Encounter.EncounterStatus.INPROGRESS);
String encId2 = myClient.create().resource(encounter).execute().getId().getIdPart();
observation = new Observation();
observation.setStatus(Observation.ObservationStatus.PRELIMINARY);
String obsId3 = myClient.create().resource(observation).execute().getId().getIdPart();
// set the export options
BulkDataExportOptions options = new BulkDataExportOptions();
options.setResourceTypes(Sets.newHashSet("Patient"));
options.setGroupId(new IdType("Group", theGroupId));
options.setFilters(theFilters);
options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
options.setResourceTypes(Sets.newHashSet("Patient", "Observation", "Encounter"));
options.setPatientIds(Sets.newHashSet(new IdType("Patient", "P1")));
options.setFilters(new HashSet<>());
options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
Batch2JobStartResponse startResponse = myJobRunner.startNewJob(BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
verifyBulkExportResults(options, List.of("\"P1\"", "\"" + obsId + "\"", "\"" + encId + "\""), List.of("\"P2\"", "\"" + obsId2 + "\"", "\"" + encId2 + "\"", "\"" + obsId3 + "\""));
}
@Test
public void testPatientBulkExportWithMultiIds() {
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.ENABLED);
// create some resources
Patient patient = new Patient();
patient.setId("P1");
patient.setActive(true);
myClient.update().resource(patient).execute();
Observation observation = new Observation();
observation.setSubject(new Reference().setReference("Patient/P1"));
observation.setStatus(Observation.ObservationStatus.PRELIMINARY);
String obsId = myClient.create().resource(observation).execute().getId().getIdPart();
Encounter encounter = new Encounter();
encounter.setSubject(new Reference().setReference("Patient/P1"));
encounter.setStatus(Encounter.EncounterStatus.INPROGRESS);
String encId = myClient.create().resource(encounter).execute().getId().getIdPart();
// diff patient
patient = new Patient();
patient.setId("P2");
patient.setActive(true);
myClient.update().resource(patient).execute();
observation = new Observation();
observation.setSubject(new Reference().setReference("Patient/P2"));
observation.setStatus(Observation.ObservationStatus.PRELIMINARY);
String obsId2 = myClient.create().resource(observation).execute().getId().getIdPart();
encounter = new Encounter();
encounter.setSubject(new Reference().setReference("Patient/P2"));
encounter.setStatus(Encounter.EncounterStatus.INPROGRESS);
String encId2 = myClient.create().resource(encounter).execute().getId().getIdPart();
// yet another diff patient
patient = new Patient();
patient.setId("P3");
patient.setActive(true);
myClient.update().resource(patient).execute();
observation = new Observation();
observation.setSubject(new Reference().setReference("Patient/P3"));
observation.setStatus(Observation.ObservationStatus.PRELIMINARY);
String obsId3 = myClient.create().resource(observation).execute().getId().getIdPart();
encounter = new Encounter();
encounter.setSubject(new Reference().setReference("Patient/P3"));
encounter.setStatus(Encounter.EncounterStatus.INPROGRESS);
String encId3 = myClient.create().resource(encounter).execute().getId().getIdPart();
// set the export options
BulkDataExportOptions options = new BulkDataExportOptions();
options.setResourceTypes(Sets.newHashSet("Patient", "Observation", "Encounter"));
options.setPatientIds(Sets.newHashSet(new IdType("Patient", "P1"), new IdType("Patient", "P2")));
options.setFilters(new HashSet<>());
options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
verifyBulkExportResults(options, List.of("\"P1\"", "\"" + obsId + "\"", "\"" + encId + "\"", "\"P2\"", "\"" + obsId2 + "\"", "\"" + encId2 + "\""), List.of("\"P3\"", "\"" + obsId3 + "\"", "\"" + encId3 + "\""));
}
private void verifyBulkExportResults(BulkDataExportOptions theOptions, List<String> theContainedList, List<String> theExcludedList) {
Batch2JobStartResponse startResponse = myJobRunner.startNewJob(BulkExportUtils.createBulkExportJobParametersFromExportOptions(theOptions));
assertNotNull(startResponse);
@ -141,22 +281,21 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
// Iterate over the files
String report = myJobRunner.getJobInfo(startResponse.getJobId()).getReport();
BulkExportJobResults results = JsonUtil.deserialize(report, BulkExportJobResults.class);
String contents = "";
for (Map.Entry<String, List<String>> file : results.getResourceTypeToBinaryIds().entrySet()) {
List<String> binaryIds = file.getValue();
assertEquals(1, binaryIds.size());
for (String binaryId : binaryIds) {
Binary binary = myBinaryDao.read(new IdType(binaryId));
assertEquals(Constants.CT_FHIR_NDJSON, binary.getContentType());
String contents = new String(binary.getContent(), Constants.CHARSET_UTF8);
ourLog.info("Next contents for type {} :\n{}", binary.getResourceType(), contents);
for (String containedString : theContainedList) {
assertThat(contents, Matchers.containsString(containedString));
Binary binary = myBinaryDao.read(new IdType(binaryIds.get(0)));
assertEquals(Constants.CT_FHIR_NDJSON, binary.getContentType());
contents += new String(binary.getContent(), Constants.CHARSET_UTF8) + "\n";
ourLog.info("Next contents for type {} :\n{}", binary.getResourceType(), new String(binary.getContent(), Constants.CHARSET_UTF8));
}
}
for (String excludedString : theExcludedList) {
assertThat(contents, not(Matchers.containsString(excludedString)));
}
}
for (String containedString : theContainedList) {
assertThat(contents, Matchers.containsString(containedString));
}
for (String excludedString : theExcludedList) {
assertThat(contents, not(Matchers.containsString(excludedString)));
}
}

View File

@ -43,6 +43,7 @@ public class BulkDataExportOptions {
private ExportStyle myExportStyle;
private boolean myExpandMdm;
private IIdType myGroupId;
private Set<IIdType> myPatientIds;
public void setOutputFormat(String theOutputFormat) {
myOutputFormat = theOutputFormat;
@ -99,4 +100,12 @@ public class BulkDataExportOptions {
public void setGroupId(IIdType theGroupId) {
myGroupId = theGroupId;
}
public Set<IIdType> getPatientIds() {
return myPatientIds;
}
public void setPatientIds(Set<IIdType> thePatientIds) {
myPatientIds = thePatientIds;
}
}

View File

@ -62,6 +62,7 @@ public class FetchResourceIdsStep implements IFirstJobStepWorker<BulkExportJobPa
providerParams.setStartDate(params.getStartDate());
providerParams.setExportStyle(params.getExportStyle());
providerParams.setGroupId(params.getGroupId());
providerParams.setPatientIds(params.getPatientIds());
providerParams.setExpandMdm(params.isExpandMdm());
int submissionCount = 0;

View File

@ -56,6 +56,9 @@ public class BulkExportJobParameters extends BulkExportJobBase {
@JsonProperty("exportStyle")
private BulkDataExportOptions.ExportStyle myExportStyle;
@JsonProperty("patientIds")
private List<String> myPatientIds;
// Stuff for group export only
/**
@ -107,6 +110,14 @@ public class BulkExportJobParameters extends BulkExportJobBase {
myExportStyle = theExportStyle;
}
public List<String> getPatientIds() {
return myPatientIds;
}
public void setPatientIds(List<String> thePatientIds) {
myPatientIds = thePatientIds;
}
public String getGroupId() {
return myGroupId;
}
@ -132,6 +143,7 @@ public class BulkExportJobParameters extends BulkExportJobBase {
params.setOutputFormat(theParameters.getOutputFormat());
params.setStartDate(theParameters.getStartDate());
params.setExpandMdm(theParameters.isExpandMdm());
params.setPatientIds(theParameters.getPatientIds());
return params;
}
}

View File

@ -70,6 +70,11 @@ public class BulkExportParameters extends Batch2BaseJobParameters {
*/
private boolean myExpandMdm;
/**
* Patient id(s)
*/
private List<String> myPatientIds;
public boolean isExpandMdm() {
return myExpandMdm;
}
@ -132,4 +137,12 @@ public class BulkExportParameters extends Batch2BaseJobParameters {
public void setOutputFormat(String theOutputFormat) {
myOutputFormat = theOutputFormat;
}
public List<String> getPatientIds() {
return myPatientIds;
}
public void setPatientIds(List<String> thePatientIds) {
myPatientIds = thePatientIds;
}
}

View File

@ -68,6 +68,11 @@ public class ExportPIDIteratorParameters {
*/
private boolean myExpandMdm;
/**
* The patient id
*/
private List<String> myPatientIds;
public String getResourceType() {
return myResourceType;
}
@ -124,6 +129,14 @@ public class ExportPIDIteratorParameters {
myExpandMdm = theExpandMdm;
}
public List<String> getPatientIds() {
return myPatientIds;
}
public void setPatientIds(List<String> thePatientIds) {
myPatientIds = thePatientIds;
}
@Override
public String toString() {
return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE);

View File

@ -21,8 +21,6 @@ package ca.uhn.fhir.jpa.bulk.export.provider;
*/
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
@ -67,6 +65,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
@ -169,7 +168,7 @@ public class BulkDataExportProvider {
}
/**
* Group/Id/$export
* Group/[id]/$export
*/
@Operation(name = JpaConstants.OPERATION_EXPORT, manualResponse = true, idempotent = true, typeName = "Group")
public void groupExport(
@ -230,10 +229,30 @@ public class BulkDataExportProvider {
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theType,
@OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType<Date> theSince,
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theTypeFilter,
@OperationParam(name = JpaConstants.PARAM_EXPORT_PATIENT, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> thePatient,
ServletRequestDetails theRequestDetails
) throws Exception {
validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_EXPORT);
BulkDataExportOptions bulkDataExportOptions = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter);
BulkDataExportOptions bulkDataExportOptions = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, thePatient);
validateResourceTypesAllContainPatientSearchParams(bulkDataExportOptions.getResourceTypes());
startJob(theRequestDetails, bulkDataExportOptions);
}
/**
* Patient/[id]/$export
*/
@Operation(name = JpaConstants.OPERATION_EXPORT, manualResponse = true, idempotent = true, typeName = "Patient")
public void patientInstanceExport(
@IdParam IIdType theIdParam,
@OperationParam(name = JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theOutputFormat,
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theType,
@OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType<Date> theSince,
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theTypeFilter,
ServletRequestDetails theRequestDetails
) throws Exception {
validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_EXPORT);
BulkDataExportOptions bulkDataExportOptions = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theIdParam);
validateResourceTypesAllContainPatientSearchParams(bulkDataExportOptions.getResourceTypes());
startJob(theRequestDetails, bulkDataExportOptions);
@ -364,8 +383,18 @@ public class BulkDataExportProvider {
return bulkDataExportOptions;
}
private BulkDataExportOptions buildPatientBulkExportOptions(IPrimitiveType<String> theOutputFormat, IPrimitiveType<String> theType, IPrimitiveType<Date> theSince, List<IPrimitiveType<String>> theTypeFilter) {
return buildBulkDataExportOptions(theOutputFormat, theType, theSince, theTypeFilter, BulkDataExportOptions.ExportStyle.PATIENT);
private BulkDataExportOptions buildPatientBulkExportOptions(IPrimitiveType<String> theOutputFormat, IPrimitiveType<String> theType, IPrimitiveType<Date> theSince, List<IPrimitiveType<String>> theTypeFilter, List<IPrimitiveType<String>> thePatientIds) {
BulkDataExportOptions bulkDataExportOptions = buildBulkDataExportOptions(theOutputFormat, theType, theSince, theTypeFilter, BulkDataExportOptions.ExportStyle.PATIENT);
if (thePatientIds != null) {
bulkDataExportOptions.setPatientIds(thePatientIds.stream().map((pid) -> new IdType(pid.getValueAsString())).collect(Collectors.toSet()));
}
return bulkDataExportOptions;
}
private BulkDataExportOptions buildPatientBulkExportOptions(IPrimitiveType<String> theOutputFormat, IPrimitiveType<String> theType, IPrimitiveType<Date> theSince, List<IPrimitiveType<String>> theTypeFilter, IIdType thePatientId) {
BulkDataExportOptions bulkDataExportOptions = buildBulkDataExportOptions(theOutputFormat, theType, theSince, theTypeFilter, BulkDataExportOptions.ExportStyle.PATIENT);
bulkDataExportOptions.setPatientIds(Collections.singleton(thePatientId));
return bulkDataExportOptions;
}
private BulkDataExportOptions buildBulkDataExportOptions(IPrimitiveType<String> theOutputFormat, IPrimitiveType<String> theType, IPrimitiveType<Date> theSince, List<IPrimitiveType<String>> theTypeFilter, BulkDataExportOptions.ExportStyle theExportStyle) {

View File

@ -23,8 +23,10 @@ package ca.uhn.fhir.jpa.util;
import ca.uhn.fhir.jpa.api.model.BulkExportParameters;
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
import org.hl7.fhir.instance.model.api.IIdType;
import java.util.ArrayList;
import java.util.stream.Collectors;
public class BulkExportUtils {
private BulkExportUtils() {}
@ -47,6 +49,9 @@ public class BulkExportUtils {
if (theOptions.getResourceTypes() != null) {
parameters.setResourceTypes(new ArrayList<>(theOptions.getResourceTypes()));
}
if (theOptions.getPatientIds() != null) {
parameters.setPatientIds(theOptions.getPatientIds().stream().map(IIdType::getValue).collect(Collectors.toList()));
}
parameters.setExpandMdm(theOptions.isExpandMdm());
parameters.setUseExistingJobsFirst(true);