diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/FhirContext.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/FhirContext.java index 1792577edbc..6c3b541c46b 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/FhirContext.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/FhirContext.java @@ -1140,12 +1140,23 @@ public class FhirContext { } // TODO KHS add the other primitive types + @Deprecated(since = "6.6.0", forRemoval = true) public IPrimitiveType getPrimitiveBoolean(Boolean theValue) { + return newPrimitiveBoolean(theValue); + } + + public IPrimitiveType newPrimitiveBoolean(Boolean theValue) { IPrimitiveType retval = (IPrimitiveType) getElementDefinition("boolean").newInstance(); retval.setValue(theValue); return retval; } + public IPrimitiveType newPrimitiveString(String theValue) { + IPrimitiveType retval = (IPrimitiveType) getElementDefinition("string").newInstance(); + retval.setValue(theValue); + return retval; + } + private static boolean tryToInitParser(Runnable run) { boolean retVal; try { diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java index 9ef7a8dc177..c96b49cbe72 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java @@ -2693,6 +2693,33 @@ public enum Pointcut implements IPointcut { "ca.uhn.fhir.jpa.util.SqlQueryList" ), + /** + * Binary Blob Prefix Assigning Hook: + *

+ * Immediately before a binary blob is stored to its eventual data sink, this hook is called. + * This hook allows implementers to provide a prefix to the binary blob's ID. + * This is helpful in cases where you want to identify this blob for later retrieval outside of HAPI-FHIR. Note that allowable characters will depend on the specific storage sink being used. + *

    + *
  • + * ca.uhn.fhir.rest.api.server.RequestDetails - A bean containing details about the request that is about to be processed, including details such as the + * resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been + * pulled out of the servlet request. Note that the bean + * properties are not all guaranteed to be populated. + *
  • + *
  • + * org.hl7.fhir.instance.model.api.IBaseBinary - The binary resource that is about to be stored. + *
  • + *
+ *

+ * Hooks should return String, which represents the full prefix to be applied to the blob. + *

+ */ + STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX(String.class, + "ca.uhn.fhir.rest.api.server.RequestDetails", + "org.hl7.fhir.instance.model.api.IBaseResource" + ), + + /** * This pointcut is used only for unit tests. Do not use in production code as it may be changed or * removed at any time. diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4774-bulk-export-resource-metadata.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4774-bulk-export-resource-metadata.yaml new file mode 100644 index 00000000000..d3ddc3784d9 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4774-bulk-export-resource-metadata.yaml @@ -0,0 +1,5 @@ +--- +type: add +issue: 4774 +title: "Bulk Export now supports a new `_exportId` parameter. If provided, any Binary resources generated by this export will have an extension in their `binary.meta` field which identifies this export. This can be used to correlate exported resources with the export job that generated them. +In addition, the `binary.meta` field of Bulk Export-generated binaries will also contain the job ID of the export job that generated them, as well as the resource type of the data contained within the binary." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4774-interceptor-for-prefixing.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4774-interceptor-for-prefixing.yaml new file mode 100644 index 00000000000..bb200f1a059 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4774-interceptor-for-prefixing.yaml @@ -0,0 +1,5 @@ +--- +type: add +issue: 4774 +title: "A new Pointcut called `STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX` has been added. This pointcut is called when a binary blob is about to be stored, +and allows implementers to attach a prefix to the blob ID before it is stored." diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java index ed5e86aa464..aacfd954b83 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java @@ -471,7 +471,7 @@ public abstract class BaseHapiFhirDao extends BaseStora "Tag read/write failed: " + ex.getMessage() + ". " + "This is not a failure on its own, " - + "but could be useful information in the result of an actual failure." + + "but could be useful information in the result of an actual failure.", ex ); throwables.add(ex); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java index 76ca1c8a392..49fb7a2c487 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java @@ -162,6 +162,8 @@ public abstract class BaseHapiFhirResourceDao extends B public static final String BASE_RESOURCE_NAME = "resource"; private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiFhirResourceDao.class); + @Autowired + protected IInterceptorBroadcaster myInterceptorBroadcaster; @Autowired protected PlatformTransactionManager myPlatformTransactionManager; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/export/svc/JpaBulkExportProcessorTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/export/svc/JpaBulkExportProcessorTest.java index 99d8fba6036..b1c5c9ce73f 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/export/svc/JpaBulkExportProcessorTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/export/svc/JpaBulkExportProcessorTest.java @@ -13,6 +13,8 @@ import ca.uhn.fhir.jpa.dao.IResultIterator; import ca.uhn.fhir.jpa.dao.ISearchBuilder; import ca.uhn.fhir.jpa.dao.SearchBuilderFactory; import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc; +import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService; +import ca.uhn.fhir.jpa.dao.tx.NonTransactionalHapiTransactionService; import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; @@ -22,6 +24,9 @@ import ca.uhn.fhir.mdm.model.MdmPidTuple; import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions; +import ca.uhn.fhir.rest.api.server.storage.BaseResourcePersistentId; +import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.Group; import org.hl7.fhir.r4.model.Observation; @@ -131,6 +136,9 @@ public class JpaBulkExportProcessorTest { @Mock private MdmExpansionCacheSvc myMdmExpansionCacheSvc; + @Spy + private IHapiTransactionService myTransactionService = new NonTransactionalHapiTransactionService(); + @InjectMocks private JpaBulkExportProcessor myProcessor; diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java index a1458fb46cb..ac3df5f5336 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java @@ -196,6 +196,11 @@ public class JpaConstants { * Parameter for the $export operation */ public static final String PARAM_EXPORT_TYPE_FILTER = "_typeFilter"; + + /** + * Parameter for the $export operation to identify binaries with a given identifier. + */ + public static final String PARAM_EXPORT_IDENTIFIER = "_exportId"; /** * Parameter for the $export operation */ @@ -205,6 +210,8 @@ public class JpaConstants { */ public static final String PARAM_EXPORT_PATIENT = "patient"; + + /** * Parameter for the $import operation */ @@ -288,6 +295,9 @@ public class JpaConstants { * IPS Generation operation URL */ public static final String SUMMARY_OPERATION_URL = "http://hl7.org/fhir/uv/ips/OperationDefinition/summary"; + public static final String BULK_META_EXTENSION_EXPORT_IDENTIFIER = "https://hapifhir.org/NamingSystem/bulk-export-identifier"; + public static final String BULK_META_EXTENSION_JOB_ID = "https://hapifhir.org/NamingSystem/bulk-export-job-id"; + public static final String BULK_META_EXTENSION_RESOURCE_TYPE = "https://hapifhir.org/NamingSystem/bulk-export-binary-resource-type"; /** * Non-instantiable diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkExportUseCaseTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkExportUseCaseTest.java index 85025651f4f..0ce4515d868 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkExportUseCaseTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkExportUseCaseTest.java @@ -13,6 +13,7 @@ import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository; import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository; import ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity; import ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity; +import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.util.BulkExportUtils; @@ -27,6 +28,7 @@ import ca.uhn.fhir.util.SearchParameterUtil; import com.google.common.collect.Sets; import org.apache.commons.io.Charsets; import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; import org.apache.http.Header; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; @@ -37,6 +39,7 @@ import org.hl7.fhir.r4.model.Bundle; import org.hl7.fhir.r4.model.Coverage; import org.hl7.fhir.r4.model.Encounter; import org.hl7.fhir.r4.model.Enumerations; +import org.hl7.fhir.r4.model.Extension; import org.hl7.fhir.r4.model.Group; import org.hl7.fhir.r4.model.IdType; import org.hl7.fhir.r4.model.InstantType; @@ -107,6 +110,49 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test { @Nested public class SpecConformanceTests { + + @Test + public void testBulkExportJobsAreMetaTaggedWithJobIdAndExportId() throws IOException { + //Given a patient exists + Patient p = new Patient(); + p.setId("Pat-1"); + myClient.update().resource(p).execute(); + + //And Given we start a bulk export job with a specific export id + String pollingLocation = submitBulkExportForTypesWithExportId("im-an-export-identifier", "Patient"); + String jobId = getJobIdFromPollingLocation(pollingLocation); + myBatch2JobHelper.awaitJobCompletion(jobId); + + //Then: When the poll shows as complete, all attributes should be filled. + HttpGet statusGet = new HttpGet(pollingLocation); + String expectedOriginalUrl = myClient.getServerBase() + "/$export?_type=Patient&_exportId=im-an-export-identifier"; + try (CloseableHttpResponse status = ourHttpClient.execute(statusGet)) { + assertEquals(200, status.getStatusLine().getStatusCode()); + String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8); + assertTrue(isNotBlank(responseContent), responseContent); + + ourLog.info(responseContent); + + BulkExportResponseJson result = JsonUtil.deserialize(responseContent, BulkExportResponseJson.class); + assertThat(result.getRequest(), is(equalTo(expectedOriginalUrl))); + assertThat(result.getOutput(), is(not(empty()))); + String binary_url = result.getOutput().get(0).getUrl(); + Binary binaryResource = myClient.read().resource(Binary.class).withUrl(binary_url).execute(); + + List extension = binaryResource.getMeta().getExtension(); + assertThat(extension, hasSize(3)); + + assertThat(extension.get(0).getUrl(), is(equalTo(JpaConstants.BULK_META_EXTENSION_EXPORT_IDENTIFIER))); + assertThat(extension.get(0).getValue().toString(), is(equalTo("im-an-export-identifier"))); + + assertThat(extension.get(1).getUrl(), is(equalTo(JpaConstants.BULK_META_EXTENSION_JOB_ID))); + assertThat(extension.get(1).getValue().toString(), is(equalTo(jobId))); + + assertThat(extension.get(2).getUrl(), is(equalTo(JpaConstants.BULK_META_EXTENSION_RESOURCE_TYPE))); + assertThat(extension.get(2).getValue().toString(), is(equalTo("Patient"))); + } + } + @Test public void testBatchJobsAreOnlyReusedIfInProgress() throws IOException { //Given a patient exists @@ -115,7 +161,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test { myClient.update().resource(p).execute(); //And Given we start a bulk export job - String pollingLocation = submitBulkExportForTypes("Patient"); + String pollingLocation = submitBulkExportForTypesWithExportId("my-export-id-","Patient"); String jobId = getJobIdFromPollingLocation(pollingLocation); myBatch2JobHelper.awaitJobCompletion(jobId); @@ -291,8 +337,16 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test { } private String submitBulkExportForTypes(String... theTypes) throws IOException { + return submitBulkExportForTypesWithExportId(null, theTypes); + } + private String submitBulkExportForTypesWithExportId(String theExportId, String... theTypes) throws IOException { String typeString = String.join(",", theTypes); - HttpGet httpGet = new HttpGet(myClient.getServerBase() + "/$export?_type=" + typeString); + String uri = myClient.getServerBase() + "/$export?_type=" + typeString; + if (!StringUtils.isBlank(theExportId)) { + uri += "&_exportId=" + theExportId; + } + + HttpGet httpGet = new HttpGet(uri); httpGet.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); String pollingLocation; try (CloseableHttpResponse status = ourHttpClient.execute(httpGet)) { diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/BinaryStorageInterceptorR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/BinaryStorageInterceptorR4Test.java index 1138c0f0d23..8e87cf32701 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/BinaryStorageInterceptorR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/BinaryStorageInterceptorR4Test.java @@ -1,6 +1,8 @@ package ca.uhn.fhir.jpa.provider.r4; import ca.uhn.fhir.i18n.Msg; +import ca.uhn.fhir.interceptor.api.Hook; +import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; import ca.uhn.fhir.jpa.binary.api.IBinaryStorageSvc; @@ -11,12 +13,16 @@ import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test; import ca.uhn.fhir.rest.client.api.IClientInterceptor; import ca.uhn.fhir.rest.client.api.IHttpRequest; import ca.uhn.fhir.rest.client.api.IHttpResponse; +import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.util.HapiExtensions; +import org.hl7.fhir.instance.model.api.IBaseHasExtensions; +import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.Binary; import org.hl7.fhir.r4.model.DocumentReference; import org.hl7.fhir.r4.model.Enumerations; +import org.hl7.fhir.r4.model.Extension; import org.hl7.fhir.r4.model.StringType; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; @@ -25,6 +31,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; +import java.util.stream.Collectors; + import java.io.IOException; import static org.hamcrest.CoreMatchers.is; @@ -81,6 +89,42 @@ public class BinaryStorageInterceptorR4Test extends BaseResourceProviderR4Test { myInterceptorRegistry.unregisterInterceptor(myBinaryStorageInterceptor); } + class BinaryFilePrefixingInterceptor{ + + @Hook(Pointcut.STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX) + public String provideFilenameForBinary(RequestDetails theRequestDetails, IBaseResource theResource) { + ourLog.info("Received binary for prefixing!" + theResource.getIdElement()); + String extensionValus = ((IBaseHasExtensions) theResource.getMeta()).getExtension().stream().map(ext -> ext.getValue().toString()).collect(Collectors.joining("-")); + return "prefix-" + extensionValus + "-"; + } + } + @Test + public void testCreatingExternalizedBinaryTriggersPointcut() { + BinaryFilePrefixingInterceptor interceptor = new BinaryFilePrefixingInterceptor(); + myInterceptorRegistry.registerInterceptor(interceptor); + // Create a resource with two metadata extensions on the binary + Binary binary = new Binary(); + binary.setContentType("application/octet-stream"); + Extension ext = binary.getMeta().addExtension(); + ext.setUrl("http://foo"); + ext.setValue(new StringType("bar")); + + Extension ext2 = binary.getMeta().addExtension(); + ext2.setUrl("http://foo2"); + ext2.setValue(new StringType("bar2")); + + binary.setData(SOME_BYTES); + DaoMethodOutcome outcome = myBinaryDao.create(binary, mySrd); + + // Make sure it was externalized + IIdType id = outcome.getId().toUnqualifiedVersionless(); + String encoded = myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome.getResource()); + ourLog.info("Encoded: {}", encoded); + assertThat(encoded, containsString(HapiExtensions.EXT_EXTERNALIZED_BINARY_ID)); + assertThat(encoded, (containsString("prefix-bar-bar2-"))); + myInterceptorRegistry.unregisterInterceptor(interceptor); + } + @Test public void testCreateAndRetrieveBinary_ServerAssignedId_ExternalizedBinary() { @@ -101,7 +145,6 @@ public class BinaryStorageInterceptorR4Test extends BaseResourceProviderR4Test { Binary output = myBinaryDao.read(id, mySrd); assertEquals("application/octet-stream", output.getContentType()); assertArrayEquals(SOME_BYTES, output.getData()); - } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java index 1a067904d84..68e5cf7d96a 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java @@ -536,6 +536,8 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil myStorageSettings.setSuppressUpdatesWithNoChange(new JpaStorageSettings().isSuppressUpdatesWithNoChange()); myStorageSettings.setAllowContainsSearches(new JpaStorageSettings().isAllowContainsSearches()); myStorageSettings.setAutoCreatePlaceholderReferenceTargets(new JpaStorageSettings().isAutoCreatePlaceholderReferenceTargets()); + myStorageSettings.setTagStorageMode(new JpaStorageSettings().getTagStorageMode()); + myStorageSettings.setInlineResourceTextBelowSize(new JpaStorageSettings().getInlineResourceTextBelowSize()); myPagingProvider.setDefaultPageSize(BasePagingProvider.DEFAULT_DEFAULT_PAGE_SIZE); myPagingProvider.setMaximumPageSize(BasePagingProvider.DEFAULT_MAX_PAGE_SIZE); diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/bulk/BulkDataExportOptions.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/bulk/BulkDataExportOptions.java index 3b8995426c5..eb609f07d02 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/bulk/BulkDataExportOptions.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/bulk/BulkDataExportOptions.java @@ -32,6 +32,7 @@ import java.util.Set; // They don't seem to serve any distinct purpose so they should be collapsed into 1 public class BulkDataExportOptions { + public enum ExportStyle { PATIENT, GROUP, @@ -48,6 +49,8 @@ public class BulkDataExportOptions { private IIdType myGroupId; private Set myPatientIds; + private String myExportIdentifier; + public void setOutputFormat(String theOutputFormat) { myOutputFormat = theOutputFormat; } @@ -131,4 +134,12 @@ public class BulkDataExportOptions { public void setPatientIds(Set thePatientIds) { myPatientIds = thePatientIds; } + + public String getExportIdentifier() { + return myExportIdentifier; + } + + public void setExportIdentifier(String theExportIdentifier) { + myExportIdentifier = theExportIdentifier; + } } diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidator.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidator.java index 45d4160d69b..ab1e14f6ce9 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidator.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidator.java @@ -22,12 +22,14 @@ package ca.uhn.fhir.batch2.jobs.export; import ca.uhn.fhir.batch2.api.IJobParametersValidator; import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; +import ca.uhn.fhir.jpa.binary.api.IBinaryStorageSvc; import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider; import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult; import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryResourceMatcher; import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions; +import org.apache.commons.lang3.StringUtils; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import org.springframework.beans.factory.annotation.Autowired; @@ -35,6 +37,8 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.ArrayList; import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -48,6 +52,9 @@ public class BulkExportJobParametersValidator implements IJobParametersValidator @Autowired private InMemoryResourceMatcher myInMemoryResourceMatcher; + @Autowired + private IBinaryStorageSvc myBinaryStorageSvc; + @Nullable @Override public List validate(@Nonnull BulkExportJobParameters theParameters) { @@ -69,6 +76,13 @@ public class BulkExportJobParametersValidator implements IJobParametersValidator if (!Constants.CT_FHIR_NDJSON.equalsIgnoreCase(theParameters.getOutputFormat())) { errorMsgs.add("The only allowed format for Bulk Export is currently " + Constants.CT_FHIR_NDJSON); } + // validate the exportId + if (!StringUtils.isBlank(theParameters.getExportIdentifier())) { + + if (!myBinaryStorageSvc.isValidBlobId(theParameters.getExportIdentifier())) { + errorMsgs.add("Export ID does not conform to the current blob storage implementation's limitations."); + } + } // validate for group BulkDataExportOptions.ExportStyle style = theParameters.getExportStyle(); @@ -113,4 +127,5 @@ public class BulkExportJobParametersValidator implements IJobParametersValidator return errorMsgs; } + } diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/WriteBinaryStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/WriteBinaryStep.java index 02deacb436c..bc7f85088c2 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/WriteBinaryStep.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/WriteBinaryStep.java @@ -33,10 +33,14 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; +import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import ca.uhn.fhir.util.BinaryUtil; +import org.apache.commons.lang3.StringUtils; import org.hl7.fhir.instance.model.api.IBaseBinary; +import org.hl7.fhir.instance.model.api.IBaseExtension; +import org.hl7.fhir.instance.model.api.IBaseHasExtensions; import org.hl7.fhir.instance.model.api.IIdType; import org.slf4j.Logger; import org.springframework.beans.factory.annotation.Autowired; @@ -73,6 +77,8 @@ public class WriteBinaryStep implements IJobStepWorker theStepExecutionDetails, ExpandedResourcesList expandedResources, IBaseBinary binary) { + // Note that this applies only to hl7.org structures, so these extensions will not be added + // to DSTU2 structures + if (binary.getMeta() instanceof IBaseHasExtensions) { + IBaseHasExtensions meta = (IBaseHasExtensions) binary.getMeta(); + + //export identifier, potentially null. + String exportIdentifier = theStepExecutionDetails.getParameters().getExportIdentifier(); + if (!StringUtils.isBlank(exportIdentifier)) { + IBaseExtension exportIdentifierExtension = meta.addExtension(); + exportIdentifierExtension.setUrl(JpaConstants.BULK_META_EXTENSION_EXPORT_IDENTIFIER); + exportIdentifierExtension.setValue(myFhirContext.newPrimitiveString(exportIdentifier)); + } + + //job id + IBaseExtension jobExtension = meta.addExtension(); + jobExtension.setUrl(JpaConstants.BULK_META_EXTENSION_JOB_ID); + jobExtension.setValue(myFhirContext.newPrimitiveString(theStepExecutionDetails.getInstance().getInstanceId())); + + //resource type + IBaseExtension typeExtension = meta.addExtension(); + typeExtension.setUrl(JpaConstants.BULK_META_EXTENSION_RESOURCE_TYPE); + typeExtension.setValue(myFhirContext.newPrimitiveString(expandedResources.getResourceType())); + } else { + ourLog.warn("Could not attach metadata extensions to binary resource, as this binary metadata does not support extensions"); + } + } + /** * Returns an output stream writer * (exposed for testing) diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/models/BulkExportJobParameters.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/models/BulkExportJobParameters.java index c89779283dd..e11830678fb 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/models/BulkExportJobParameters.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/models/BulkExportJobParameters.java @@ -47,6 +47,9 @@ public class BulkExportJobParameters extends BulkExportJobBase { @JsonProperty("since") private Date myStartDate; + @JsonProperty("exportId") + private String myExportId; + @JsonProperty("filters") private List myFilters; @@ -82,6 +85,7 @@ public class BulkExportJobParameters extends BulkExportJobBase { BulkExportJobParameters params = new BulkExportJobParameters(); params.setResourceTypes(theParameters.getResourceTypes()); params.setExportStyle(theParameters.getExportStyle()); + params.setExportIdentifier(theParameters.getExportIdentifier()); params.setFilters(theParameters.getFilters()); params.setPostFetchFilterUrls(theParameters.getPostFetchFilterUrls()); params.setGroupId(theParameters.getGroupId()); @@ -94,10 +98,18 @@ public class BulkExportJobParameters extends BulkExportJobBase { return params; } + public String getExportIdentifier() { + return myExportId; + } + public List getResourceTypes() { return myResourceTypes; } + public void setExportIdentifier(String theExportId) { + myExportId = theExportId; + } + public void setResourceTypes(List theResourceTypes) { myResourceTypes = theResourceTypes; } diff --git a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidatorTest.java b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidatorTest.java index 9da414afa05..f2bd5482de3 100644 --- a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidatorTest.java +++ b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidatorTest.java @@ -2,6 +2,7 @@ package ca.uhn.fhir.batch2.jobs.export; import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; +import ca.uhn.fhir.jpa.binary.api.IBinaryStorageSvc; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions; import org.junit.jupiter.api.Test; @@ -18,6 +19,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.when; @@ -27,6 +29,9 @@ public class BulkExportJobParametersValidatorTest { @Mock private DaoRegistry myDaoRegistry; + @Mock + private IBinaryStorageSvc myIBinaryStorageSvc; + @InjectMocks private BulkExportJobParametersValidator myValidator; @@ -55,6 +60,38 @@ public class BulkExportJobParametersValidatorTest { assertTrue(result.isEmpty()); } + + @Test + public void validate_exportId_illegal_characters() { + BulkExportJobParameters parameters = createSystemExportParameters(); + parameters.setExportIdentifier("exportId&&&"); + // when + when(myDaoRegistry.isResourceTypeSupported(anyString())) + .thenReturn(true); + when(myIBinaryStorageSvc.isValidBlobId(any())).thenReturn(false); + List errors = myValidator.validate(parameters); + + // verify + assertNotNull(errors); + assertEquals(1, errors.size()); + assertEquals(errors.get(0), "Export ID does not conform to the current blob storage implementation's limitations."); + } + + @Test + public void validate_exportId_legal_characters() { + BulkExportJobParameters parameters = createSystemExportParameters(); + parameters.setExportIdentifier("HELLO!/WORLD/"); + // when + when(myDaoRegistry.isResourceTypeSupported(anyString())) + .thenReturn(true); + + when(myIBinaryStorageSvc.isValidBlobId(any())).thenReturn(true); + List errors = myValidator.validate(parameters); + + // verify + assertNotNull(errors); + assertEquals(0, errors.size()); + } @Test public void validate_validParametersForPatient_returnsEmptyList() { // setup diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/BulkExportParameters.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/BulkExportParameters.java index 6151c2340ee..2bf8d46f0ba 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/BulkExportParameters.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/BulkExportParameters.java @@ -85,6 +85,9 @@ public class BulkExportParameters extends Batch2BaseJobParameters { * The request which originated the request. */ private String myOriginalRequestUrl; + private String myExportIdentifier; + + /** * The partition for the request if applicable. @@ -107,6 +110,13 @@ public class BulkExportParameters extends Batch2BaseJobParameters { return myResourceTypes; } + public void setExportIdentifier(String theExportIdentifier) { + myExportIdentifier = theExportIdentifier; + } + public String getExportIdentifier() { + return myExportIdentifier; + } + public void setResourceTypes(List theResourceTypes) { myResourceTypes = theResourceTypes; } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/IBinaryStorageSvc.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/IBinaryStorageSvc.java index 312735cec7b..047cc29f99f 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/IBinaryStorageSvc.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/IBinaryStorageSvc.java @@ -36,6 +36,16 @@ public interface IBinaryStorageSvc { long getMaximumBinarySize(); /** + * Given a blob ID, return true if it is valid for the underlying storage mechanism, false otherwise. + * + * @param theNewBlobId the blob ID to validate + * @return true if the blob ID is valid, false otherwise. + */ + default boolean isValidBlobId(String theNewBlobId) { + return true;//default method here as we don't want to break existing implementations + } + + /** * Sets the maximum number of bytes that can be stored in a single binary * file by this service. The default is {@link Long#MAX_VALUE} * diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/interceptor/BinaryStorageInterceptor.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/interceptor/BinaryStorageInterceptor.java index 95c56bae310..6129e5bd7fd 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/interceptor/BinaryStorageInterceptor.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/interceptor/BinaryStorageInterceptor.java @@ -24,6 +24,8 @@ import ca.uhn.fhir.context.BaseRuntimeElementDefinition; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.api.Hook; +import ca.uhn.fhir.interceptor.api.HookParams; +import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.interceptor.api.Interceptor; import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.jpa.binary.api.IBinaryStorageSvc; @@ -31,18 +33,27 @@ import ca.uhn.fhir.jpa.binary.api.IBinaryTarget; import ca.uhn.fhir.jpa.binary.api.StoredDetails; import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider; import ca.uhn.fhir.jpa.model.util.JpaConstants; +import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails; import ca.uhn.fhir.rest.api.server.IPreResourceShowDetails; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.api.server.SimplePreResourceAccessDetails; import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; +import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; +import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; import ca.uhn.fhir.util.HapiExtensions; import ca.uhn.fhir.util.IModelVisitor2; import org.apache.commons.io.FileUtils; import org.hl7.fhir.instance.model.api.IBase; +import org.hl7.fhir.instance.model.api.IBaseBinary; import org.hl7.fhir.instance.model.api.IBaseHasExtensions; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.hl7.fhir.r4.model.IdType; +import org.hl7.fhir.r4.model.Request; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -71,6 +82,9 @@ public class BinaryStorageInterceptor> { private final FhirContext myCtx; @Autowired private BinaryAccessProvider myBinaryAccessProvider; + + @Autowired + private IInterceptorBroadcaster myInterceptorBroadcaster; private Class myBinaryType; private String myDeferredListKey; private long myAutoInflateBinariesMaximumBytes = 10 * FileUtils.ONE_MB; @@ -123,14 +137,14 @@ public class BinaryStorageInterceptor> { } @Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED) - public void extractLargeBinariesBeforeCreate(TransactionDetails theTransactionDetails, IBaseResource theResource, Pointcut thePointcut) throws IOException { - extractLargeBinaries(theTransactionDetails, theResource, thePointcut); + public void extractLargeBinariesBeforeCreate(RequestDetails theRequestDetails, TransactionDetails theTransactionDetails, IBaseResource theResource, Pointcut thePointcut) throws IOException { + extractLargeBinaries(theRequestDetails, theTransactionDetails, theResource, thePointcut); } @Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED) - public void extractLargeBinariesBeforeUpdate(TransactionDetails theTransactionDetails, IBaseResource thePreviousResource, IBaseResource theResource, Pointcut thePointcut) throws IOException { + public void extractLargeBinariesBeforeUpdate(RequestDetails theRequestDetails, TransactionDetails theTransactionDetails, IBaseResource thePreviousResource, IBaseResource theResource, Pointcut thePointcut) throws IOException { blockIllegalExternalBinaryIds(thePreviousResource, theResource); - extractLargeBinaries(theTransactionDetails, theResource, thePointcut); + extractLargeBinaries(theRequestDetails, theTransactionDetails, theResource, thePointcut); } /** @@ -180,7 +194,7 @@ public class BinaryStorageInterceptor> { } - private void extractLargeBinaries(TransactionDetails theTransactionDetails, IBaseResource theResource, Pointcut thePointcut) throws IOException { + private void extractLargeBinaries(RequestDetails theRequestDetails, TransactionDetails theTransactionDetails, IBaseResource theResource, Pointcut thePointcut) throws IOException { IIdType resourceId = theResource.getIdElement(); if (!resourceId.hasResourceType() && resourceId.hasIdPart()) { @@ -206,9 +220,18 @@ public class BinaryStorageInterceptor> { } else { assert thePointcut == Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED : thePointcut.name(); newBlobId = myBinaryStorageSvc.newBlobId(); - List deferredBinaryTargets = getOrCreateDeferredBinaryStorageMap(theTransactionDetails); - DeferredBinaryTarget newDeferredBinaryTarget = new DeferredBinaryTarget(newBlobId, nextTarget, data); - deferredBinaryTargets.add(newDeferredBinaryTarget); + + String prefix = invokeAssignBlobPrefix(theRequestDetails, theResource); + if (isNotBlank(prefix)) { + newBlobId = prefix + newBlobId; + } + if (myBinaryStorageSvc.isValidBlobId(newBlobId)) { + List deferredBinaryTargets = getOrCreateDeferredBinaryStorageMap(theTransactionDetails); + DeferredBinaryTarget newDeferredBinaryTarget = new DeferredBinaryTarget(newBlobId, nextTarget, data); + deferredBinaryTargets.add(newDeferredBinaryTarget); + } else { + throw new InternalErrorException(Msg.code(2341) + "Invalid blob ID for backing storage service.[blobId=" + newBlobId + ",service=" + myBinaryStorageSvc.getClass().getName() +"]"); + } } myBinaryAccessProvider.replaceDataWithExtension(nextTarget, newBlobId); @@ -217,6 +240,21 @@ public class BinaryStorageInterceptor> { } } + /** + * This invokes the {@link Pointcut#STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX} hook and returns the prefix to use for the blob ID, or null if there are no implementers. + * @return A string, which will be used to prefix the blob ID. May be null. + */ + private String invokeAssignBlobPrefix(RequestDetails theRequest, IBaseResource theResource) { + if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX, myInterceptorBroadcaster, theRequest)) { + HookParams params = new HookParams() + .add(RequestDetails.class, theRequest) + .add(IBaseResource.class, theResource); + return (String) CompositeInterceptorBroadcaster.doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX, params); + } else { + return null; + } + } + @Nonnull private List getOrCreateDeferredBinaryStorageMap(TransactionDetails theTransactionDetails) { return theTransactionDetails.getOrCreateUserData(getDeferredListKey(), () -> new ArrayList<>()); diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/svc/BaseBinaryStorageSvcImpl.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/svc/BaseBinaryStorageSvcImpl.java index e7415adcda3..e2f8ca17915 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/svc/BaseBinaryStorageSvcImpl.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/svc/BaseBinaryStorageSvcImpl.java @@ -94,6 +94,14 @@ public abstract class BaseBinaryStorageSvcImpl implements IBinaryStorageSvc { return b.toString(); } + /** + * Default implementation is to return true for any Blob ID. + */ + @Override + public boolean isValidBlobId(String theNewBlobId) { + return true; + } + @Override public boolean shouldStoreBlob(long theSize, IIdType theResourceId, String theContentType) { return theSize >= getMinimumBinarySize(); diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/svc/NullBinaryStorageSvcImpl.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/svc/NullBinaryStorageSvcImpl.java index 3d90c9969db..1cf73711cc8 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/svc/NullBinaryStorageSvcImpl.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/svc/NullBinaryStorageSvcImpl.java @@ -36,6 +36,11 @@ public class NullBinaryStorageSvcImpl implements IBinaryStorageSvc { return 0; } + @Override + public boolean isValidBlobId(String theNewBlobId) { + return true; + } + @Override public void setMaximumBinarySize(long theMaximumBinarySize) { // ignore diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binstore/FilesystemBinaryStorageSvcImpl.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binstore/FilesystemBinaryStorageSvcImpl.java index ca8d45adc8b..790b0a533cc 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binstore/FilesystemBinaryStorageSvcImpl.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binstore/FilesystemBinaryStorageSvcImpl.java @@ -32,6 +32,7 @@ import com.google.common.hash.HashingInputStream; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.io.input.CountingInputStream; +import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IIdType; import org.slf4j.Logger; @@ -50,6 +51,8 @@ import java.io.InputStreamReader; import java.io.OutputStream; import java.io.Reader; import java.util.Date; +import java.util.regex.Matcher; +import java.util.regex.Pattern; public class FilesystemBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl { @@ -75,6 +78,14 @@ public class FilesystemBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl { mkdir(myBasePath); } + /** + * This implementation prevents: \ / | . + */ + @Override + public boolean isValidBlobId(String theNewBlobId) { + return !StringUtils.containsAny(theNewBlobId, '\\', '/', '|', '.'); + + } @Override public StoredDetails storeBlob(IIdType theResourceId, String theBlobIdOrNull, String theContentType, InputStream theInputStream) throws IOException { String id = super.provideIdForNewBlob(theBlobIdOrNull); diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/provider/BulkDataExportProvider.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/provider/BulkDataExportProvider.java index 33c05efecb8..785734f8cf4 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/provider/BulkDataExportProvider.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/provider/BulkDataExportProvider.java @@ -125,12 +125,13 @@ public class BulkDataExportProvider { @OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType theSince, @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List> theTypeFilter, @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_POST_FETCH_FILTER_URL, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List> theTypePostFetchFilterUrl, + @OperationParam(name = JpaConstants.PARAM_EXPORT_IDENTIFIER, min = 0, max = 1, typeName = "string") IPrimitiveType theExportId, ServletRequestDetails theRequestDetails ) { // JPA export provider validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_EXPORT); - BulkDataExportOptions bulkDataExportOptions = buildSystemBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theTypePostFetchFilterUrl); + BulkDataExportOptions bulkDataExportOptions = buildSystemBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportId, theTypePostFetchFilterUrl); startJob(theRequestDetails, bulkDataExportOptions); } @@ -199,6 +200,7 @@ public class BulkDataExportProvider { @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List> theTypeFilter, @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_POST_FETCH_FILTER_URL, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List> theTypePostFetchFilterUrl, @OperationParam(name = JpaConstants.PARAM_EXPORT_MDM, min = 0, max = 1, typeName = "boolean") IPrimitiveType theMdm, + @OperationParam(name = JpaConstants.PARAM_EXPORT_IDENTIFIER, min = 0, max = 1, typeName = "string") IPrimitiveType theExportIdentifier, ServletRequestDetails theRequestDetails ) { ourLog.debug("Received Group Bulk Export Request for Group {}", theIdParam); @@ -209,7 +211,7 @@ public class BulkDataExportProvider { validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_EXPORT); - BulkDataExportOptions bulkDataExportOptions = buildGroupBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theIdParam, theMdm, theTypePostFetchFilterUrl); + BulkDataExportOptions bulkDataExportOptions = buildGroupBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theIdParam, theMdm, theExportIdentifier, theTypePostFetchFilterUrl); if (isNotEmpty(bulkDataExportOptions.getResourceTypes())) { validateResourceTypesAllContainPatientSearchParams(bulkDataExportOptions.getResourceTypes()); @@ -253,10 +255,11 @@ public class BulkDataExportProvider { @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List> theTypeFilter, @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_POST_FETCH_FILTER_URL, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List> theTypePostFetchFilterUrl, @OperationParam(name = JpaConstants.PARAM_EXPORT_PATIENT, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List> thePatient, + @OperationParam(name = JpaConstants.PARAM_EXPORT_IDENTIFIER, min = 0, max = 1, typeName = "string") IPrimitiveType theExportIdentifier, ServletRequestDetails theRequestDetails ) { validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_EXPORT); - BulkDataExportOptions bulkDataExportOptions = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, thePatient, theTypePostFetchFilterUrl); + BulkDataExportOptions bulkDataExportOptions = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportIdentifier, thePatient, theTypePostFetchFilterUrl); validateResourceTypesAllContainPatientSearchParams(bulkDataExportOptions.getResourceTypes()); startJob(theRequestDetails, bulkDataExportOptions); @@ -273,10 +276,11 @@ public class BulkDataExportProvider { @OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType theSince, @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List> theTypeFilter, @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_POST_FETCH_FILTER_URL, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List> theTypePostFetchFilterUrl, + @OperationParam(name = JpaConstants.PARAM_EXPORT_IDENTIFIER, min = 0, max = 1, typeName = "string") IPrimitiveType theExportIdentifier, ServletRequestDetails theRequestDetails ) { validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_EXPORT); - BulkDataExportOptions bulkDataExportOptions = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theIdParam, theTypePostFetchFilterUrl); + BulkDataExportOptions bulkDataExportOptions = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportIdentifier, theIdParam, theTypePostFetchFilterUrl); validateResourceTypesAllContainPatientSearchParams(bulkDataExportOptions.getResourceTypes()); startJob(theRequestDetails, bulkDataExportOptions); @@ -418,12 +422,12 @@ public class BulkDataExportProvider { } } - private BulkDataExportOptions buildSystemBulkExportOptions(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, List> theTypeFilter, List> theTypePostFetchFilterUrl) { - return buildBulkDataExportOptions(theOutputFormat, theType, theSince, theTypeFilter, BulkDataExportOptions.ExportStyle.SYSTEM, theTypePostFetchFilterUrl); + private BulkDataExportOptions buildSystemBulkExportOptions(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, List> theTypeFilter, IPrimitiveType theExportId, List> theTypePostFetchFilterUrl) { + return buildBulkDataExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportId, BulkDataExportOptions.ExportStyle.SYSTEM, theTypePostFetchFilterUrl); } - private BulkDataExportOptions buildGroupBulkExportOptions(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, List> theTypeFilter, IIdType theGroupId, IPrimitiveType theExpandMdm, List> theTypePostFetchFilterUrl) { - BulkDataExportOptions bulkDataExportOptions = buildBulkDataExportOptions(theOutputFormat, theType, theSince, theTypeFilter, BulkDataExportOptions.ExportStyle.GROUP, theTypePostFetchFilterUrl); + private BulkDataExportOptions buildGroupBulkExportOptions(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, List> theTypeFilter, IIdType theGroupId, IPrimitiveType theExpandMdm, IPrimitiveType theExportId, List> theTypePostFetchFilterUrl) { + BulkDataExportOptions bulkDataExportOptions = buildBulkDataExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportId, BulkDataExportOptions.ExportStyle.GROUP, theTypePostFetchFilterUrl); bulkDataExportOptions.setGroupId(theGroupId); boolean mdm = false; @@ -435,26 +439,26 @@ public class BulkDataExportProvider { return bulkDataExportOptions; } - private BulkDataExportOptions buildPatientBulkExportOptions(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, List> theTypeFilter, List> thePatientIds, List> theTypePostFetchFilterUrl) { + private BulkDataExportOptions buildPatientBulkExportOptions(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, List> theTypeFilter, IPrimitiveType theExportIdentifier, List> thePatientIds, List> theTypePostFetchFilterUrl) { IPrimitiveType type = theType; if (type == null) { // Type is optional, but the job requires it type = new StringDt("Patient"); } - BulkDataExportOptions bulkDataExportOptions = buildBulkDataExportOptions(theOutputFormat, type, theSince, theTypeFilter, BulkDataExportOptions.ExportStyle.PATIENT, theTypePostFetchFilterUrl); + BulkDataExportOptions bulkDataExportOptions = buildBulkDataExportOptions(theOutputFormat, type, theSince, theTypeFilter, theExportIdentifier, BulkDataExportOptions.ExportStyle.PATIENT, theTypePostFetchFilterUrl); if (thePatientIds != null) { bulkDataExportOptions.setPatientIds(thePatientIds.stream().map((pid) -> new IdType(pid.getValueAsString())).collect(Collectors.toSet())); } return bulkDataExportOptions; } - private BulkDataExportOptions buildPatientBulkExportOptions(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, List> theTypeFilter, IIdType thePatientId, List> theTypePostFetchFilterUrl) { - BulkDataExportOptions bulkDataExportOptions = buildBulkDataExportOptions(theOutputFormat, theType, theSince, theTypeFilter, BulkDataExportOptions.ExportStyle.PATIENT, theTypePostFetchFilterUrl); + private BulkDataExportOptions buildPatientBulkExportOptions(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, List> theTypeFilter, IPrimitiveType theExportIdentifier, IIdType thePatientId, List> theTypePostFetchFilterUrl) { + BulkDataExportOptions bulkDataExportOptions = buildBulkDataExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportIdentifier, BulkDataExportOptions.ExportStyle.PATIENT, theTypePostFetchFilterUrl); bulkDataExportOptions.setPatientIds(Collections.singleton(thePatientId)); return bulkDataExportOptions; } - private BulkDataExportOptions buildBulkDataExportOptions(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, List> theTypeFilter, BulkDataExportOptions.ExportStyle theExportStyle, List> theTypePostFetchFilterUrl) { + private BulkDataExportOptions buildBulkDataExportOptions(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, List> theTypeFilter, IPrimitiveType theExportIdentifier, BulkDataExportOptions.ExportStyle theExportStyle, List> theTypePostFetchFilterUrl) { String outputFormat = theOutputFormat != null ? theOutputFormat.getValueAsString() : Constants.CT_FHIR_NDJSON; Set resourceTypes = null; @@ -466,6 +470,10 @@ public class BulkDataExportProvider { if (theSince != null) { since = theSince.getValue(); } + String exportIdentifier = null; + if (theExportIdentifier != null) { + exportIdentifier = theExportIdentifier.getValueAsString(); + } Set typeFilters = splitTypeFilters(theTypeFilter); Set typePostFetchFilterUrls = splitTypeFilters(theTypePostFetchFilterUrl); @@ -474,6 +482,7 @@ public class BulkDataExportProvider { bulkDataExportOptions.setFilters(typeFilters); bulkDataExportOptions.setPostFetchFilterUrls(typePostFetchFilterUrls); bulkDataExportOptions.setExportStyle(theExportStyle); + bulkDataExportOptions.setExportIdentifier(exportIdentifier); bulkDataExportOptions.setSince(since); bulkDataExportOptions.setResourceTypes(resourceTypes); bulkDataExportOptions.setOutputFormat(outputFormat); diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/BulkExportUtils.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/BulkExportUtils.java index 1ba62eaec60..cb97085d990 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/BulkExportUtils.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/BulkExportUtils.java @@ -53,6 +53,7 @@ public class BulkExportUtils { } parameters.setExpandMdm(theOptions.isExpandMdm()); parameters.setUseExistingJobsFirst(true); + parameters.setExportIdentifier(theOptions.getExportIdentifier()); return parameters; }