Support `_exportId` for bulk exports. (#4781)

* Full implementation, test, changelogs

* Add changelogs

* Add default method

* Update hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/WriteBinaryStep.java

Co-authored-by: James Agnew <jamesagnew@gmail.com>

* Code Review Comments

* Compilation failures

---------

Co-authored-by: James Agnew <jamesagnew@gmail.com>
This commit is contained in:
Tadgh 2023-04-27 17:46:09 -04:00 committed by GitHub
parent f9de5c918e
commit 78b3b148ba
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
24 changed files with 400 additions and 25 deletions

View File

@ -1140,12 +1140,23 @@ public class FhirContext {
}
// TODO KHS add the other primitive types
@Deprecated(since = "6.6.0", forRemoval = true)
public IPrimitiveType<Boolean> getPrimitiveBoolean(Boolean theValue) {
return newPrimitiveBoolean(theValue);
}
public IPrimitiveType<Boolean> newPrimitiveBoolean(Boolean theValue) {
IPrimitiveType<Boolean> retval = (IPrimitiveType<Boolean>) getElementDefinition("boolean").newInstance();
retval.setValue(theValue);
return retval;
}
public IPrimitiveType<String > newPrimitiveString(String theValue) {
IPrimitiveType<String> retval = (IPrimitiveType<String>) getElementDefinition("string").newInstance();
retval.setValue(theValue);
return retval;
}
private static boolean tryToInitParser(Runnable run) {
boolean retVal;
try {

View File

@ -2693,6 +2693,33 @@ public enum Pointcut implements IPointcut {
"ca.uhn.fhir.jpa.util.SqlQueryList"
),
/**
* <b> Binary Blob Prefix Assigning Hook:</b>
* <p>
* Immediately before a binary blob is stored to its eventual data sink, this hook is called.
* This hook allows implementers to provide a prefix to the binary blob's ID.
* This is helpful in cases where you want to identify this blob for later retrieval outside of HAPI-FHIR. Note that allowable characters will depend on the specific storage sink being used.
* <ul>
* <li>
* ca.uhn.fhir.rest.api.server.RequestDetails - A bean containing details about the request that is about to be processed, including details such as the
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
* pulled out of the servlet request. Note that the bean
* properties are not all guaranteed to be populated.
* </li>
* <li>
* org.hl7.fhir.instance.model.api.IBaseBinary - The binary resource that is about to be stored.
* </li>
* </ul>
* <p>
* Hooks should return <code>String</code>, which represents the full prefix to be applied to the blob.
* </p>
*/
STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX(String.class,
"ca.uhn.fhir.rest.api.server.RequestDetails",
"org.hl7.fhir.instance.model.api.IBaseResource"
),
/**
* This pointcut is used only for unit tests. Do not use in production code as it may be changed or
* removed at any time.

View File

@ -0,0 +1,5 @@
---
type: add
issue: 4774
title: "Bulk Export now supports a new `_exportId` parameter. If provided, any Binary resources generated by this export will have an extension in their `binary.meta` field which identifies this export. This can be used to correlate exported resources with the export job that generated them.
In addition, the `binary.meta` field of Bulk Export-generated binaries will also contain the job ID of the export job that generated them, as well as the resource type of the data contained within the binary."

View File

@ -0,0 +1,5 @@
---
type: add
issue: 4774
title: "A new Pointcut called `STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX` has been added. This pointcut is called when a binary blob is about to be stored,
and allows implementers to attach a prefix to the blob ID before it is stored."

View File

@ -471,7 +471,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
"Tag read/write failed: "
+ ex.getMessage() + ". "
+ "This is not a failure on its own, "
+ "but could be useful information in the result of an actual failure."
+ "but could be useful information in the result of an actual failure.", ex
);
throwables.add(ex);
}

View File

@ -162,6 +162,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
public static final String BASE_RESOURCE_NAME = "resource";
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiFhirResourceDao.class);
@Autowired
protected IInterceptorBroadcaster myInterceptorBroadcaster;
@Autowired
protected PlatformTransactionManager myPlatformTransactionManager;

View File

@ -13,6 +13,8 @@ import ca.uhn.fhir.jpa.dao.IResultIterator;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc;
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
import ca.uhn.fhir.jpa.dao.tx.NonTransactionalHapiTransactionService;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
@ -22,6 +24,9 @@ import ca.uhn.fhir.mdm.model.MdmPidTuple;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import ca.uhn.fhir.rest.api.server.storage.BaseResourcePersistentId;
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Group;
import org.hl7.fhir.r4.model.Observation;
@ -131,6 +136,9 @@ public class JpaBulkExportProcessorTest {
@Mock
private MdmExpansionCacheSvc myMdmExpansionCacheSvc;
@Spy
private IHapiTransactionService myTransactionService = new NonTransactionalHapiTransactionService();
@InjectMocks
private JpaBulkExportProcessor myProcessor;

View File

@ -196,6 +196,11 @@ public class JpaConstants {
* Parameter for the $export operation
*/
public static final String PARAM_EXPORT_TYPE_FILTER = "_typeFilter";
/**
* Parameter for the $export operation to identify binaries with a given identifier.
*/
public static final String PARAM_EXPORT_IDENTIFIER = "_exportId";
/**
* Parameter for the $export operation
*/
@ -205,6 +210,8 @@ public class JpaConstants {
*/
public static final String PARAM_EXPORT_PATIENT = "patient";
/**
* Parameter for the $import operation
*/
@ -288,6 +295,9 @@ public class JpaConstants {
* IPS Generation operation URL
*/
public static final String SUMMARY_OPERATION_URL = "http://hl7.org/fhir/uv/ips/OperationDefinition/summary";
public static final String BULK_META_EXTENSION_EXPORT_IDENTIFIER = "https://hapifhir.org/NamingSystem/bulk-export-identifier";
public static final String BULK_META_EXTENSION_JOB_ID = "https://hapifhir.org/NamingSystem/bulk-export-job-id";
public static final String BULK_META_EXTENSION_RESOURCE_TYPE = "https://hapifhir.org/NamingSystem/bulk-export-binary-resource-type";
/**
* Non-instantiable

View File

@ -13,6 +13,7 @@ import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository;
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository;
import ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity;
import ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.util.BulkExportUtils;
@ -27,6 +28,7 @@ import ca.uhn.fhir.util.SearchParameterUtil;
import com.google.common.collect.Sets;
import org.apache.commons.io.Charsets;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.Header;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
@ -37,6 +39,7 @@ import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.Coverage;
import org.hl7.fhir.r4.model.Encounter;
import org.hl7.fhir.r4.model.Enumerations;
import org.hl7.fhir.r4.model.Extension;
import org.hl7.fhir.r4.model.Group;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.InstantType;
@ -107,6 +110,49 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
@Nested
public class SpecConformanceTests {
@Test
public void testBulkExportJobsAreMetaTaggedWithJobIdAndExportId() throws IOException {
//Given a patient exists
Patient p = new Patient();
p.setId("Pat-1");
myClient.update().resource(p).execute();
//And Given we start a bulk export job with a specific export id
String pollingLocation = submitBulkExportForTypesWithExportId("im-an-export-identifier", "Patient");
String jobId = getJobIdFromPollingLocation(pollingLocation);
myBatch2JobHelper.awaitJobCompletion(jobId);
//Then: When the poll shows as complete, all attributes should be filled.
HttpGet statusGet = new HttpGet(pollingLocation);
String expectedOriginalUrl = myClient.getServerBase() + "/$export?_type=Patient&_exportId=im-an-export-identifier";
try (CloseableHttpResponse status = ourHttpClient.execute(statusGet)) {
assertEquals(200, status.getStatusLine().getStatusCode());
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
assertTrue(isNotBlank(responseContent), responseContent);
ourLog.info(responseContent);
BulkExportResponseJson result = JsonUtil.deserialize(responseContent, BulkExportResponseJson.class);
assertThat(result.getRequest(), is(equalTo(expectedOriginalUrl)));
assertThat(result.getOutput(), is(not(empty())));
String binary_url = result.getOutput().get(0).getUrl();
Binary binaryResource = myClient.read().resource(Binary.class).withUrl(binary_url).execute();
List<Extension> extension = binaryResource.getMeta().getExtension();
assertThat(extension, hasSize(3));
assertThat(extension.get(0).getUrl(), is(equalTo(JpaConstants.BULK_META_EXTENSION_EXPORT_IDENTIFIER)));
assertThat(extension.get(0).getValue().toString(), is(equalTo("im-an-export-identifier")));
assertThat(extension.get(1).getUrl(), is(equalTo(JpaConstants.BULK_META_EXTENSION_JOB_ID)));
assertThat(extension.get(1).getValue().toString(), is(equalTo(jobId)));
assertThat(extension.get(2).getUrl(), is(equalTo(JpaConstants.BULK_META_EXTENSION_RESOURCE_TYPE)));
assertThat(extension.get(2).getValue().toString(), is(equalTo("Patient")));
}
}
@Test
public void testBatchJobsAreOnlyReusedIfInProgress() throws IOException {
//Given a patient exists
@ -115,7 +161,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
myClient.update().resource(p).execute();
//And Given we start a bulk export job
String pollingLocation = submitBulkExportForTypes("Patient");
String pollingLocation = submitBulkExportForTypesWithExportId("my-export-id-","Patient");
String jobId = getJobIdFromPollingLocation(pollingLocation);
myBatch2JobHelper.awaitJobCompletion(jobId);
@ -291,8 +337,16 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
}
private String submitBulkExportForTypes(String... theTypes) throws IOException {
return submitBulkExportForTypesWithExportId(null, theTypes);
}
private String submitBulkExportForTypesWithExportId(String theExportId, String... theTypes) throws IOException {
String typeString = String.join(",", theTypes);
HttpGet httpGet = new HttpGet(myClient.getServerBase() + "/$export?_type=" + typeString);
String uri = myClient.getServerBase() + "/$export?_type=" + typeString;
if (!StringUtils.isBlank(theExportId)) {
uri += "&_exportId=" + theExportId;
}
HttpGet httpGet = new HttpGet(uri);
httpGet.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
String pollingLocation;
try (CloseableHttpResponse status = ourHttpClient.execute(httpGet)) {

View File

@ -1,6 +1,8 @@
package ca.uhn.fhir.jpa.provider.r4;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.api.Hook;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.binary.api.IBinaryStorageSvc;
@ -11,12 +13,16 @@ import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
import ca.uhn.fhir.rest.client.api.IClientInterceptor;
import ca.uhn.fhir.rest.client.api.IHttpRequest;
import ca.uhn.fhir.rest.client.api.IHttpResponse;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.util.HapiExtensions;
import org.hl7.fhir.instance.model.api.IBaseHasExtensions;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Binary;
import org.hl7.fhir.r4.model.DocumentReference;
import org.hl7.fhir.r4.model.Enumerations;
import org.hl7.fhir.r4.model.Extension;
import org.hl7.fhir.r4.model.StringType;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
@ -25,6 +31,8 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.stream.Collectors;
import java.io.IOException;
import static org.hamcrest.CoreMatchers.is;
@ -81,6 +89,42 @@ public class BinaryStorageInterceptorR4Test extends BaseResourceProviderR4Test {
myInterceptorRegistry.unregisterInterceptor(myBinaryStorageInterceptor);
}
class BinaryFilePrefixingInterceptor{
@Hook(Pointcut.STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX)
public String provideFilenameForBinary(RequestDetails theRequestDetails, IBaseResource theResource) {
ourLog.info("Received binary for prefixing!" + theResource.getIdElement());
String extensionValus = ((IBaseHasExtensions) theResource.getMeta()).getExtension().stream().map(ext -> ext.getValue().toString()).collect(Collectors.joining("-"));
return "prefix-" + extensionValus + "-";
}
}
@Test
public void testCreatingExternalizedBinaryTriggersPointcut() {
BinaryFilePrefixingInterceptor interceptor = new BinaryFilePrefixingInterceptor();
myInterceptorRegistry.registerInterceptor(interceptor);
// Create a resource with two metadata extensions on the binary
Binary binary = new Binary();
binary.setContentType("application/octet-stream");
Extension ext = binary.getMeta().addExtension();
ext.setUrl("http://foo");
ext.setValue(new StringType("bar"));
Extension ext2 = binary.getMeta().addExtension();
ext2.setUrl("http://foo2");
ext2.setValue(new StringType("bar2"));
binary.setData(SOME_BYTES);
DaoMethodOutcome outcome = myBinaryDao.create(binary, mySrd);
// Make sure it was externalized
IIdType id = outcome.getId().toUnqualifiedVersionless();
String encoded = myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome.getResource());
ourLog.info("Encoded: {}", encoded);
assertThat(encoded, containsString(HapiExtensions.EXT_EXTERNALIZED_BINARY_ID));
assertThat(encoded, (containsString("prefix-bar-bar2-")));
myInterceptorRegistry.unregisterInterceptor(interceptor);
}
@Test
public void testCreateAndRetrieveBinary_ServerAssignedId_ExternalizedBinary() {
@ -101,7 +145,6 @@ public class BinaryStorageInterceptorR4Test extends BaseResourceProviderR4Test {
Binary output = myBinaryDao.read(id, mySrd);
assertEquals("application/octet-stream", output.getContentType());
assertArrayEquals(SOME_BYTES, output.getData());
}

View File

@ -536,6 +536,8 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
myStorageSettings.setSuppressUpdatesWithNoChange(new JpaStorageSettings().isSuppressUpdatesWithNoChange());
myStorageSettings.setAllowContainsSearches(new JpaStorageSettings().isAllowContainsSearches());
myStorageSettings.setAutoCreatePlaceholderReferenceTargets(new JpaStorageSettings().isAutoCreatePlaceholderReferenceTargets());
myStorageSettings.setTagStorageMode(new JpaStorageSettings().getTagStorageMode());
myStorageSettings.setInlineResourceTextBelowSize(new JpaStorageSettings().getInlineResourceTextBelowSize());
myPagingProvider.setDefaultPageSize(BasePagingProvider.DEFAULT_DEFAULT_PAGE_SIZE);
myPagingProvider.setMaximumPageSize(BasePagingProvider.DEFAULT_MAX_PAGE_SIZE);

View File

@ -32,6 +32,7 @@ import java.util.Set;
// They don't seem to serve any distinct purpose so they should be collapsed into 1
public class BulkDataExportOptions {
public enum ExportStyle {
PATIENT,
GROUP,
@ -48,6 +49,8 @@ public class BulkDataExportOptions {
private IIdType myGroupId;
private Set<IIdType> myPatientIds;
private String myExportIdentifier;
public void setOutputFormat(String theOutputFormat) {
myOutputFormat = theOutputFormat;
}
@ -131,4 +134,12 @@ public class BulkDataExportOptions {
public void setPatientIds(Set<IIdType> thePatientIds) {
myPatientIds = thePatientIds;
}
public String getExportIdentifier() {
return myExportIdentifier;
}
public void setExportIdentifier(String theExportIdentifier) {
myExportIdentifier = theExportIdentifier;
}
}

View File

@ -22,12 +22,14 @@ package ca.uhn.fhir.batch2.jobs.export;
import ca.uhn.fhir.batch2.api.IJobParametersValidator;
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.binary.api.IBinaryStorageSvc;
import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult;
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryResourceMatcher;
import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import org.apache.commons.lang3.StringUtils;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import org.springframework.beans.factory.annotation.Autowired;
@ -35,6 +37,8 @@ import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static org.apache.commons.lang3.StringUtils.isBlank;
@ -48,6 +52,9 @@ public class BulkExportJobParametersValidator implements IJobParametersValidator
@Autowired
private InMemoryResourceMatcher myInMemoryResourceMatcher;
@Autowired
private IBinaryStorageSvc myBinaryStorageSvc;
@Nullable
@Override
public List<String> validate(@Nonnull BulkExportJobParameters theParameters) {
@ -69,6 +76,13 @@ public class BulkExportJobParametersValidator implements IJobParametersValidator
if (!Constants.CT_FHIR_NDJSON.equalsIgnoreCase(theParameters.getOutputFormat())) {
errorMsgs.add("The only allowed format for Bulk Export is currently " + Constants.CT_FHIR_NDJSON);
}
// validate the exportId
if (!StringUtils.isBlank(theParameters.getExportIdentifier())) {
if (!myBinaryStorageSvc.isValidBlobId(theParameters.getExportIdentifier())) {
errorMsgs.add("Export ID does not conform to the current blob storage implementation's limitations.");
}
}
// validate for group
BulkDataExportOptions.ExportStyle style = theParameters.getExportStyle();
@ -113,4 +127,5 @@ public class BulkExportJobParametersValidator implements IJobParametersValidator
return errorMsgs;
}
}

View File

@ -33,10 +33,14 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.util.BinaryUtil;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.IBaseBinary;
import org.hl7.fhir.instance.model.api.IBaseExtension;
import org.hl7.fhir.instance.model.api.IBaseHasExtensions;
import org.hl7.fhir.instance.model.api.IIdType;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
@ -73,6 +77,8 @@ public class WriteBinaryStep implements IJobStepWorker<BulkExportJobParameters,
IBaseBinary binary = BinaryUtil.newBinary(myFhirContext);
addMetadataExtensionsToBinary(theStepExecutionDetails, expandedResources, binary);
// TODO
// should be dependent on the
// output format in parameters
@ -122,6 +128,41 @@ public class WriteBinaryStep implements IJobStepWorker<BulkExportJobParameters,
return new RunOutcome(numResourcesProcessed);
}
/**
* Adds 3 extensions to the `binary.meta` element.
*
* 1. the _exportId provided at request time
* 2. the job_id of the job instance.
* 3. the resource type of the resources contained in the binary
*/
private void addMetadataExtensionsToBinary(@Nonnull StepExecutionDetails<BulkExportJobParameters, ExpandedResourcesList> theStepExecutionDetails, ExpandedResourcesList expandedResources, IBaseBinary binary) {
// Note that this applies only to hl7.org structures, so these extensions will not be added
// to DSTU2 structures
if (binary.getMeta() instanceof IBaseHasExtensions) {
IBaseHasExtensions meta = (IBaseHasExtensions) binary.getMeta();
//export identifier, potentially null.
String exportIdentifier = theStepExecutionDetails.getParameters().getExportIdentifier();
if (!StringUtils.isBlank(exportIdentifier)) {
IBaseExtension<?, ?> exportIdentifierExtension = meta.addExtension();
exportIdentifierExtension.setUrl(JpaConstants.BULK_META_EXTENSION_EXPORT_IDENTIFIER);
exportIdentifierExtension.setValue(myFhirContext.newPrimitiveString(exportIdentifier));
}
//job id
IBaseExtension<?, ?> jobExtension = meta.addExtension();
jobExtension.setUrl(JpaConstants.BULK_META_EXTENSION_JOB_ID);
jobExtension.setValue(myFhirContext.newPrimitiveString(theStepExecutionDetails.getInstance().getInstanceId()));
//resource type
IBaseExtension<?, ?> typeExtension = meta.addExtension();
typeExtension.setUrl(JpaConstants.BULK_META_EXTENSION_RESOURCE_TYPE);
typeExtension.setValue(myFhirContext.newPrimitiveString(expandedResources.getResourceType()));
} else {
ourLog.warn("Could not attach metadata extensions to binary resource, as this binary metadata does not support extensions");
}
}
/**
* Returns an output stream writer
* (exposed for testing)

View File

@ -47,6 +47,9 @@ public class BulkExportJobParameters extends BulkExportJobBase {
@JsonProperty("since")
private Date myStartDate;
@JsonProperty("exportId")
private String myExportId;
@JsonProperty("filters")
private List<String> myFilters;
@ -82,6 +85,7 @@ public class BulkExportJobParameters extends BulkExportJobBase {
BulkExportJobParameters params = new BulkExportJobParameters();
params.setResourceTypes(theParameters.getResourceTypes());
params.setExportStyle(theParameters.getExportStyle());
params.setExportIdentifier(theParameters.getExportIdentifier());
params.setFilters(theParameters.getFilters());
params.setPostFetchFilterUrls(theParameters.getPostFetchFilterUrls());
params.setGroupId(theParameters.getGroupId());
@ -94,10 +98,18 @@ public class BulkExportJobParameters extends BulkExportJobBase {
return params;
}
public String getExportIdentifier() {
return myExportId;
}
public List<String> getResourceTypes() {
return myResourceTypes;
}
public void setExportIdentifier(String theExportId) {
myExportId = theExportId;
}
public void setResourceTypes(List<String> theResourceTypes) {
myResourceTypes = theResourceTypes;
}

View File

@ -2,6 +2,7 @@ package ca.uhn.fhir.batch2.jobs.export;
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.binary.api.IBinaryStorageSvc;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import org.junit.jupiter.api.Test;
@ -18,6 +19,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.when;
@ -27,6 +29,9 @@ public class BulkExportJobParametersValidatorTest {
@Mock
private DaoRegistry myDaoRegistry;
@Mock
private IBinaryStorageSvc myIBinaryStorageSvc;
@InjectMocks
private BulkExportJobParametersValidator myValidator;
@ -55,6 +60,38 @@ public class BulkExportJobParametersValidatorTest {
assertTrue(result.isEmpty());
}
@Test
public void validate_exportId_illegal_characters() {
BulkExportJobParameters parameters = createSystemExportParameters();
parameters.setExportIdentifier("exportId&&&");
// when
when(myDaoRegistry.isResourceTypeSupported(anyString()))
.thenReturn(true);
when(myIBinaryStorageSvc.isValidBlobId(any())).thenReturn(false);
List<String> errors = myValidator.validate(parameters);
// verify
assertNotNull(errors);
assertEquals(1, errors.size());
assertEquals(errors.get(0), "Export ID does not conform to the current blob storage implementation's limitations.");
}
@Test
public void validate_exportId_legal_characters() {
BulkExportJobParameters parameters = createSystemExportParameters();
parameters.setExportIdentifier("HELLO!/WORLD/");
// when
when(myDaoRegistry.isResourceTypeSupported(anyString()))
.thenReturn(true);
when(myIBinaryStorageSvc.isValidBlobId(any())).thenReturn(true);
List<String> errors = myValidator.validate(parameters);
// verify
assertNotNull(errors);
assertEquals(0, errors.size());
}
@Test
public void validate_validParametersForPatient_returnsEmptyList() {
// setup

View File

@ -85,6 +85,9 @@ public class BulkExportParameters extends Batch2BaseJobParameters {
* The request which originated the request.
*/
private String myOriginalRequestUrl;
private String myExportIdentifier;
/**
* The partition for the request if applicable.
@ -107,6 +110,13 @@ public class BulkExportParameters extends Batch2BaseJobParameters {
return myResourceTypes;
}
public void setExportIdentifier(String theExportIdentifier) {
myExportIdentifier = theExportIdentifier;
}
public String getExportIdentifier() {
return myExportIdentifier;
}
public void setResourceTypes(List<String> theResourceTypes) {
myResourceTypes = theResourceTypes;
}

View File

@ -36,6 +36,16 @@ public interface IBinaryStorageSvc {
long getMaximumBinarySize();
/**
* Given a blob ID, return true if it is valid for the underlying storage mechanism, false otherwise.
*
* @param theNewBlobId the blob ID to validate
* @return true if the blob ID is valid, false otherwise.
*/
default boolean isValidBlobId(String theNewBlobId) {
return true;//default method here as we don't want to break existing implementations
}
/**
* Sets the maximum number of bytes that can be stored in a single binary
* file by this service. The default is {@link Long#MAX_VALUE}
*

View File

@ -24,6 +24,8 @@ import ca.uhn.fhir.context.BaseRuntimeElementDefinition;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.api.Hook;
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Interceptor;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.binary.api.IBinaryStorageSvc;
@ -31,18 +33,27 @@ import ca.uhn.fhir.jpa.binary.api.IBinaryTarget;
import ca.uhn.fhir.jpa.binary.api.StoredDetails;
import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails;
import ca.uhn.fhir.rest.api.server.IPreResourceShowDetails;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.SimplePreResourceAccessDetails;
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.util.HapiExtensions;
import ca.uhn.fhir.util.IModelVisitor2;
import org.apache.commons.io.FileUtils;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseBinary;
import org.hl7.fhir.instance.model.api.IBaseHasExtensions;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.Request;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -71,6 +82,9 @@ public class BinaryStorageInterceptor<T extends IPrimitiveType<byte[]>> {
private final FhirContext myCtx;
@Autowired
private BinaryAccessProvider myBinaryAccessProvider;
@Autowired
private IInterceptorBroadcaster myInterceptorBroadcaster;
private Class<T> myBinaryType;
private String myDeferredListKey;
private long myAutoInflateBinariesMaximumBytes = 10 * FileUtils.ONE_MB;
@ -123,14 +137,14 @@ public class BinaryStorageInterceptor<T extends IPrimitiveType<byte[]>> {
}
@Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED)
public void extractLargeBinariesBeforeCreate(TransactionDetails theTransactionDetails, IBaseResource theResource, Pointcut thePointcut) throws IOException {
extractLargeBinaries(theTransactionDetails, theResource, thePointcut);
public void extractLargeBinariesBeforeCreate(RequestDetails theRequestDetails, TransactionDetails theTransactionDetails, IBaseResource theResource, Pointcut thePointcut) throws IOException {
extractLargeBinaries(theRequestDetails, theTransactionDetails, theResource, thePointcut);
}
@Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED)
public void extractLargeBinariesBeforeUpdate(TransactionDetails theTransactionDetails, IBaseResource thePreviousResource, IBaseResource theResource, Pointcut thePointcut) throws IOException {
public void extractLargeBinariesBeforeUpdate(RequestDetails theRequestDetails, TransactionDetails theTransactionDetails, IBaseResource thePreviousResource, IBaseResource theResource, Pointcut thePointcut) throws IOException {
blockIllegalExternalBinaryIds(thePreviousResource, theResource);
extractLargeBinaries(theTransactionDetails, theResource, thePointcut);
extractLargeBinaries(theRequestDetails, theTransactionDetails, theResource, thePointcut);
}
/**
@ -180,7 +194,7 @@ public class BinaryStorageInterceptor<T extends IPrimitiveType<byte[]>> {
}
private void extractLargeBinaries(TransactionDetails theTransactionDetails, IBaseResource theResource, Pointcut thePointcut) throws IOException {
private void extractLargeBinaries(RequestDetails theRequestDetails, TransactionDetails theTransactionDetails, IBaseResource theResource, Pointcut thePointcut) throws IOException {
IIdType resourceId = theResource.getIdElement();
if (!resourceId.hasResourceType() && resourceId.hasIdPart()) {
@ -206,9 +220,18 @@ public class BinaryStorageInterceptor<T extends IPrimitiveType<byte[]>> {
} else {
assert thePointcut == Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED : thePointcut.name();
newBlobId = myBinaryStorageSvc.newBlobId();
List<DeferredBinaryTarget> deferredBinaryTargets = getOrCreateDeferredBinaryStorageMap(theTransactionDetails);
DeferredBinaryTarget newDeferredBinaryTarget = new DeferredBinaryTarget(newBlobId, nextTarget, data);
deferredBinaryTargets.add(newDeferredBinaryTarget);
String prefix = invokeAssignBlobPrefix(theRequestDetails, theResource);
if (isNotBlank(prefix)) {
newBlobId = prefix + newBlobId;
}
if (myBinaryStorageSvc.isValidBlobId(newBlobId)) {
List<DeferredBinaryTarget> deferredBinaryTargets = getOrCreateDeferredBinaryStorageMap(theTransactionDetails);
DeferredBinaryTarget newDeferredBinaryTarget = new DeferredBinaryTarget(newBlobId, nextTarget, data);
deferredBinaryTargets.add(newDeferredBinaryTarget);
} else {
throw new InternalErrorException(Msg.code(2341) + "Invalid blob ID for backing storage service.[blobId=" + newBlobId + ",service=" + myBinaryStorageSvc.getClass().getName() +"]");
}
}
myBinaryAccessProvider.replaceDataWithExtension(nextTarget, newBlobId);
@ -217,6 +240,21 @@ public class BinaryStorageInterceptor<T extends IPrimitiveType<byte[]>> {
}
}
/**
* This invokes the {@link Pointcut#STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX} hook and returns the prefix to use for the blob ID, or null if there are no implementers.
* @return A string, which will be used to prefix the blob ID. May be null.
*/
private String invokeAssignBlobPrefix(RequestDetails theRequest, IBaseResource theResource) {
if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX, myInterceptorBroadcaster, theRequest)) {
HookParams params = new HookParams()
.add(RequestDetails.class, theRequest)
.add(IBaseResource.class, theResource);
return (String) CompositeInterceptorBroadcaster.doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX, params);
} else {
return null;
}
}
@Nonnull
private List<DeferredBinaryTarget> getOrCreateDeferredBinaryStorageMap(TransactionDetails theTransactionDetails) {
return theTransactionDetails.getOrCreateUserData(getDeferredListKey(), () -> new ArrayList<>());

View File

@ -94,6 +94,14 @@ public abstract class BaseBinaryStorageSvcImpl implements IBinaryStorageSvc {
return b.toString();
}
/**
* Default implementation is to return true for any Blob ID.
*/
@Override
public boolean isValidBlobId(String theNewBlobId) {
return true;
}
@Override
public boolean shouldStoreBlob(long theSize, IIdType theResourceId, String theContentType) {
return theSize >= getMinimumBinarySize();

View File

@ -36,6 +36,11 @@ public class NullBinaryStorageSvcImpl implements IBinaryStorageSvc {
return 0;
}
@Override
public boolean isValidBlobId(String theNewBlobId) {
return true;
}
@Override
public void setMaximumBinarySize(long theMaximumBinarySize) {
// ignore

View File

@ -32,6 +32,7 @@ import com.google.common.hash.HashingInputStream;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.input.CountingInputStream;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IIdType;
import org.slf4j.Logger;
@ -50,6 +51,8 @@ import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.Reader;
import java.util.Date;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class FilesystemBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
@ -75,6 +78,14 @@ public class FilesystemBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
mkdir(myBasePath);
}
/**
* This implementation prevents: \ / | .
*/
@Override
public boolean isValidBlobId(String theNewBlobId) {
return !StringUtils.containsAny(theNewBlobId, '\\', '/', '|', '.');
}
@Override
public StoredDetails storeBlob(IIdType theResourceId, String theBlobIdOrNull, String theContentType, InputStream theInputStream) throws IOException {
String id = super.provideIdForNewBlob(theBlobIdOrNull);

View File

@ -125,12 +125,13 @@ public class BulkDataExportProvider {
@OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType<Date> theSince,
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theTypeFilter,
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_POST_FETCH_FILTER_URL, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theTypePostFetchFilterUrl,
@OperationParam(name = JpaConstants.PARAM_EXPORT_IDENTIFIER, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theExportId,
ServletRequestDetails theRequestDetails
) {
// JPA export provider
validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_EXPORT);
BulkDataExportOptions bulkDataExportOptions = buildSystemBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theTypePostFetchFilterUrl);
BulkDataExportOptions bulkDataExportOptions = buildSystemBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportId, theTypePostFetchFilterUrl);
startJob(theRequestDetails, bulkDataExportOptions);
}
@ -199,6 +200,7 @@ public class BulkDataExportProvider {
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theTypeFilter,
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_POST_FETCH_FILTER_URL, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theTypePostFetchFilterUrl,
@OperationParam(name = JpaConstants.PARAM_EXPORT_MDM, min = 0, max = 1, typeName = "boolean") IPrimitiveType<Boolean> theMdm,
@OperationParam(name = JpaConstants.PARAM_EXPORT_IDENTIFIER, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theExportIdentifier,
ServletRequestDetails theRequestDetails
) {
ourLog.debug("Received Group Bulk Export Request for Group {}", theIdParam);
@ -209,7 +211,7 @@ public class BulkDataExportProvider {
validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_EXPORT);
BulkDataExportOptions bulkDataExportOptions = buildGroupBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theIdParam, theMdm, theTypePostFetchFilterUrl);
BulkDataExportOptions bulkDataExportOptions = buildGroupBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theIdParam, theMdm, theExportIdentifier, theTypePostFetchFilterUrl);
if (isNotEmpty(bulkDataExportOptions.getResourceTypes())) {
validateResourceTypesAllContainPatientSearchParams(bulkDataExportOptions.getResourceTypes());
@ -253,10 +255,11 @@ public class BulkDataExportProvider {
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theTypeFilter,
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_POST_FETCH_FILTER_URL, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theTypePostFetchFilterUrl,
@OperationParam(name = JpaConstants.PARAM_EXPORT_PATIENT, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> thePatient,
@OperationParam(name = JpaConstants.PARAM_EXPORT_IDENTIFIER, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theExportIdentifier,
ServletRequestDetails theRequestDetails
) {
validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_EXPORT);
BulkDataExportOptions bulkDataExportOptions = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, thePatient, theTypePostFetchFilterUrl);
BulkDataExportOptions bulkDataExportOptions = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportIdentifier, thePatient, theTypePostFetchFilterUrl);
validateResourceTypesAllContainPatientSearchParams(bulkDataExportOptions.getResourceTypes());
startJob(theRequestDetails, bulkDataExportOptions);
@ -273,10 +276,11 @@ public class BulkDataExportProvider {
@OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType<Date> theSince,
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theTypeFilter,
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_POST_FETCH_FILTER_URL, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theTypePostFetchFilterUrl,
@OperationParam(name = JpaConstants.PARAM_EXPORT_IDENTIFIER, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theExportIdentifier,
ServletRequestDetails theRequestDetails
) {
validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_EXPORT);
BulkDataExportOptions bulkDataExportOptions = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theIdParam, theTypePostFetchFilterUrl);
BulkDataExportOptions bulkDataExportOptions = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportIdentifier, theIdParam, theTypePostFetchFilterUrl);
validateResourceTypesAllContainPatientSearchParams(bulkDataExportOptions.getResourceTypes());
startJob(theRequestDetails, bulkDataExportOptions);
@ -418,12 +422,12 @@ public class BulkDataExportProvider {
}
}
private BulkDataExportOptions buildSystemBulkExportOptions(IPrimitiveType<String> theOutputFormat, IPrimitiveType<String> theType, IPrimitiveType<Date> theSince, List<IPrimitiveType<String>> theTypeFilter, List<IPrimitiveType<String>> theTypePostFetchFilterUrl) {
return buildBulkDataExportOptions(theOutputFormat, theType, theSince, theTypeFilter, BulkDataExportOptions.ExportStyle.SYSTEM, theTypePostFetchFilterUrl);
private BulkDataExportOptions buildSystemBulkExportOptions(IPrimitiveType<String> theOutputFormat, IPrimitiveType<String> theType, IPrimitiveType<Date> theSince, List<IPrimitiveType<String>> theTypeFilter, IPrimitiveType<String> theExportId, List<IPrimitiveType<String>> theTypePostFetchFilterUrl) {
return buildBulkDataExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportId, BulkDataExportOptions.ExportStyle.SYSTEM, theTypePostFetchFilterUrl);
}
private BulkDataExportOptions buildGroupBulkExportOptions(IPrimitiveType<String> theOutputFormat, IPrimitiveType<String> theType, IPrimitiveType<Date> theSince, List<IPrimitiveType<String>> theTypeFilter, IIdType theGroupId, IPrimitiveType<Boolean> theExpandMdm, List<IPrimitiveType<String>> theTypePostFetchFilterUrl) {
BulkDataExportOptions bulkDataExportOptions = buildBulkDataExportOptions(theOutputFormat, theType, theSince, theTypeFilter, BulkDataExportOptions.ExportStyle.GROUP, theTypePostFetchFilterUrl);
private BulkDataExportOptions buildGroupBulkExportOptions(IPrimitiveType<String> theOutputFormat, IPrimitiveType<String> theType, IPrimitiveType<Date> theSince, List<IPrimitiveType<String>> theTypeFilter, IIdType theGroupId, IPrimitiveType<Boolean> theExpandMdm, IPrimitiveType<String> theExportId, List<IPrimitiveType<String>> theTypePostFetchFilterUrl) {
BulkDataExportOptions bulkDataExportOptions = buildBulkDataExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportId, BulkDataExportOptions.ExportStyle.GROUP, theTypePostFetchFilterUrl);
bulkDataExportOptions.setGroupId(theGroupId);
boolean mdm = false;
@ -435,26 +439,26 @@ public class BulkDataExportProvider {
return bulkDataExportOptions;
}
private BulkDataExportOptions buildPatientBulkExportOptions(IPrimitiveType<String> theOutputFormat, IPrimitiveType<String> theType, IPrimitiveType<Date> theSince, List<IPrimitiveType<String>> theTypeFilter, List<IPrimitiveType<String>> thePatientIds, List<IPrimitiveType<String>> theTypePostFetchFilterUrl) {
private BulkDataExportOptions buildPatientBulkExportOptions(IPrimitiveType<String> theOutputFormat, IPrimitiveType<String> theType, IPrimitiveType<Date> theSince, List<IPrimitiveType<String>> theTypeFilter, IPrimitiveType<String> theExportIdentifier, List<IPrimitiveType<String>> thePatientIds, List<IPrimitiveType<String>> theTypePostFetchFilterUrl) {
IPrimitiveType<String> type = theType;
if (type == null) {
// Type is optional, but the job requires it
type = new StringDt("Patient");
}
BulkDataExportOptions bulkDataExportOptions = buildBulkDataExportOptions(theOutputFormat, type, theSince, theTypeFilter, BulkDataExportOptions.ExportStyle.PATIENT, theTypePostFetchFilterUrl);
BulkDataExportOptions bulkDataExportOptions = buildBulkDataExportOptions(theOutputFormat, type, theSince, theTypeFilter, theExportIdentifier, BulkDataExportOptions.ExportStyle.PATIENT, theTypePostFetchFilterUrl);
if (thePatientIds != null) {
bulkDataExportOptions.setPatientIds(thePatientIds.stream().map((pid) -> new IdType(pid.getValueAsString())).collect(Collectors.toSet()));
}
return bulkDataExportOptions;
}
private BulkDataExportOptions buildPatientBulkExportOptions(IPrimitiveType<String> theOutputFormat, IPrimitiveType<String> theType, IPrimitiveType<Date> theSince, List<IPrimitiveType<String>> theTypeFilter, IIdType thePatientId, List<IPrimitiveType<String>> theTypePostFetchFilterUrl) {
BulkDataExportOptions bulkDataExportOptions = buildBulkDataExportOptions(theOutputFormat, theType, theSince, theTypeFilter, BulkDataExportOptions.ExportStyle.PATIENT, theTypePostFetchFilterUrl);
private BulkDataExportOptions buildPatientBulkExportOptions(IPrimitiveType<String> theOutputFormat, IPrimitiveType<String> theType, IPrimitiveType<Date> theSince, List<IPrimitiveType<String>> theTypeFilter, IPrimitiveType<String> theExportIdentifier, IIdType thePatientId, List<IPrimitiveType<String>> theTypePostFetchFilterUrl) {
BulkDataExportOptions bulkDataExportOptions = buildBulkDataExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportIdentifier, BulkDataExportOptions.ExportStyle.PATIENT, theTypePostFetchFilterUrl);
bulkDataExportOptions.setPatientIds(Collections.singleton(thePatientId));
return bulkDataExportOptions;
}
private BulkDataExportOptions buildBulkDataExportOptions(IPrimitiveType<String> theOutputFormat, IPrimitiveType<String> theType, IPrimitiveType<Date> theSince, List<IPrimitiveType<String>> theTypeFilter, BulkDataExportOptions.ExportStyle theExportStyle, List<IPrimitiveType<String>> theTypePostFetchFilterUrl) {
private BulkDataExportOptions buildBulkDataExportOptions(IPrimitiveType<String> theOutputFormat, IPrimitiveType<String> theType, IPrimitiveType<Date> theSince, List<IPrimitiveType<String>> theTypeFilter, IPrimitiveType<String> theExportIdentifier, BulkDataExportOptions.ExportStyle theExportStyle, List<IPrimitiveType<String>> theTypePostFetchFilterUrl) {
String outputFormat = theOutputFormat != null ? theOutputFormat.getValueAsString() : Constants.CT_FHIR_NDJSON;
Set<String> resourceTypes = null;
@ -466,6 +470,10 @@ public class BulkDataExportProvider {
if (theSince != null) {
since = theSince.getValue();
}
String exportIdentifier = null;
if (theExportIdentifier != null) {
exportIdentifier = theExportIdentifier.getValueAsString();
}
Set<String> typeFilters = splitTypeFilters(theTypeFilter);
Set<String> typePostFetchFilterUrls = splitTypeFilters(theTypePostFetchFilterUrl);
@ -474,6 +482,7 @@ public class BulkDataExportProvider {
bulkDataExportOptions.setFilters(typeFilters);
bulkDataExportOptions.setPostFetchFilterUrls(typePostFetchFilterUrls);
bulkDataExportOptions.setExportStyle(theExportStyle);
bulkDataExportOptions.setExportIdentifier(exportIdentifier);
bulkDataExportOptions.setSince(since);
bulkDataExportOptions.setResourceTypes(resourceTypes);
bulkDataExportOptions.setOutputFormat(outputFormat);

View File

@ -53,6 +53,7 @@ public class BulkExportUtils {
}
parameters.setExpandMdm(theOptions.isExpandMdm());
parameters.setUseExistingJobsFirst(true);
parameters.setExportIdentifier(theOptions.getExportIdentifier());
return parameters;
}