diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java index d2c995a435e..016512e537d 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java @@ -2387,13 +2387,19 @@ public enum Pointcut implements IPointcut { *
  • * ca.uhn.fhir.mdm.model.mdmevents.MdmMergeEvent - Contains information about the from and to resources. *
  • + *
  • + * ca.uhn.fhir.mdm.model.mdmevents.MdmTransactionContext - Contains information about the Transaction context, e.g. merge or link. + *
  • * *

    * Hooks should return void. *

    */ MDM_POST_MERGE_GOLDEN_RESOURCES( - void.class, "ca.uhn.fhir.rest.api.server.RequestDetails", "ca.uhn.fhir.mdm.model.mdmevents.MdmMergeEvent"), + void.class, + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.mdm.model.mdmevents.MdmMergeEvent", + "ca.uhn.fhir.mdm.model.MdmTransactionContext"), /** * MDM Link History Hook: diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/UploadTerminologyCommand.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/UploadTerminologyCommand.java index e073c2e2c60..6085508f4c2 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/UploadTerminologyCommand.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/UploadTerminologyCommand.java @@ -19,6 +19,7 @@ */ package ca.uhn.fhir.cli; +import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider; @@ -31,6 +32,7 @@ import ca.uhn.fhir.system.HapiSystemProperties; import ca.uhn.fhir.util.AttachmentUtil; import ca.uhn.fhir.util.FileUtil; import ca.uhn.fhir.util.ParametersUtil; +import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Charsets; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Options; @@ -265,7 +267,7 @@ public class UploadTerminologyCommand extends BaseRequestGeneratingCommand { "Response:\n{}", myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(response)); } - private void addFileToRequestBundle(IBaseParameters theInputParameters, String theFileName, byte[] theBytes) { + protected void addFileToRequestBundle(IBaseParameters theInputParameters, String theFileName, byte[] theBytes) { byte[] bytes = theBytes; String fileName = theFileName; @@ -277,7 +279,7 @@ public class UploadTerminologyCommand extends BaseRequestGeneratingCommand { FileUtil.formatFileSize(ourTransferSizeLimit)); try { - File tempFile = File.createTempFile("hapi-fhir-cli", suffix); + File tempFile = File.createTempFile("hapi-fhir-cli", "." + suffix); tempFile.deleteOnExit(); try (OutputStream fileOutputStream = new FileOutputStream(tempFile, false)) { fileOutputStream.write(bytes); @@ -363,4 +365,9 @@ public class UploadTerminologyCommand extends BaseRequestGeneratingCommand { } return retVal; } + + @VisibleForTesting + void setFhirContext(FhirContext theFhirContext) { + myFhirCtx = theFhirContext; + } } diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/test/java/ca/uhn/fhir/cli/UploadTerminologyCommandTest.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/test/java/ca/uhn/fhir/cli/UploadTerminologyCommandTest.java index f0e08800425..2511ef9db91 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/test/java/ca/uhn/fhir/cli/UploadTerminologyCommandTest.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/test/java/ca/uhn/fhir/cli/UploadTerminologyCommandTest.java @@ -22,6 +22,10 @@ import org.hl7.fhir.common.hapi.validation.support.CommonCodeSystemsTerminologyS import org.hl7.fhir.common.hapi.validation.support.InMemoryTerminologyServerValidationSupport; import org.hl7.fhir.common.hapi.validation.support.ValidationSupportChain; import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator; +import org.hl7.fhir.instance.model.api.IBaseParameters; +import org.hl7.fhir.r4.model.Attachment; +import org.hl7.fhir.r4.model.Parameters; +import org.hl7.fhir.r4.model.Type; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -43,6 +47,7 @@ import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; import java.util.List; +import java.util.Optional; import java.util.stream.Stream; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; @@ -54,6 +59,8 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.matchesPattern; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyList; @@ -479,6 +486,86 @@ public class UploadTerminologyCommandTest { uploadICD10UsingCompressedFile(theFhirVersion, theIncludeTls); } + @ParameterizedTest + @MethodSource("paramsProvider") + @SuppressWarnings("unused") // Both params for @BeforeEach + void testZipFileInParameters(String theFhirVersion, boolean theIncludeTls) { + final IBaseParameters inputParameters = switch (myCtx.getVersion().getVersion()) { + case DSTU2, DSTU2_HL7ORG, DSTU2_1 -> new org.hl7.fhir.dstu2.model.Parameters(); + case DSTU3 -> new org.hl7.fhir.dstu3.model.Parameters(); + case R4 -> new Parameters(); + case R4B -> new org.hl7.fhir.r4b.model.Parameters(); + case R5 -> new org.hl7.fhir.r5.model.Parameters(); + }; + + final UploadTerminologyCommand uploadTerminologyCommand = new UploadTerminologyCommand(); + uploadTerminologyCommand.setFhirContext(myCtx); + uploadTerminologyCommand.setTransferSizeBytes(1); + + uploadTerminologyCommand.addFileToRequestBundle(inputParameters, "something.zip", new byte[] {1,2}); + + final String actualAttachmentUrl = getAttachmentUrl(inputParameters, myCtx); + assertTrue(actualAttachmentUrl.endsWith(".zip")); + } + + private static String getAttachmentUrl(IBaseParameters theInputParameters, FhirContext theCtx) { + switch (theCtx.getVersion().getVersion()) { + case DSTU2: + case DSTU2_HL7ORG: + case DSTU2_1: { + assertInstanceOf(org.hl7.fhir.dstu2.model.Parameters.class, theInputParameters); + final org.hl7.fhir.dstu2.model.Parameters dstu2Parameters = (org.hl7.fhir.dstu2.model.Parameters) theInputParameters; + final List dstu2ParametersList = dstu2Parameters.getParameter(); + final Optional optDstu2FileParam = dstu2ParametersList.stream().filter(param -> TerminologyUploaderProvider.PARAM_FILE.equals(param.getName())).findFirst(); + assertTrue(optDstu2FileParam.isPresent()); + final org.hl7.fhir.dstu2.model.Type dstu2Value = optDstu2FileParam.get().getValue(); + assertInstanceOf(org.hl7.fhir.dstu2.model.Attachment.class, dstu2Value); + final org.hl7.fhir.dstu2.model.Attachment dstu2Attachment = (org.hl7.fhir.dstu2.model.Attachment) dstu2Value; + return dstu2Attachment.getUrl(); + } + case DSTU3: { + assertInstanceOf(org.hl7.fhir.dstu3.model.Parameters.class, theInputParameters); + final org.hl7.fhir.dstu3.model.Parameters dstu3Parameters = (org.hl7.fhir.dstu3.model.Parameters) theInputParameters; + final List dstu3ParametersList = dstu3Parameters.getParameter(); + final Optional optDstu3FileParam = dstu3ParametersList.stream().filter(param -> TerminologyUploaderProvider.PARAM_FILE.equals(param.getName())).findFirst(); + assertTrue(optDstu3FileParam.isPresent()); + final org.hl7.fhir.dstu3.model.Type dstu3Value = optDstu3FileParam.get().getValue(); + assertInstanceOf(org.hl7.fhir.dstu3.model.Attachment.class, dstu3Value); + final org.hl7.fhir.dstu3.model.Attachment dstu3Attachment = (org.hl7.fhir.dstu3.model.Attachment) dstu3Value; + return dstu3Attachment.getUrl(); + } + case R4: { + assertInstanceOf(Parameters.class, theInputParameters); + final Parameters r4Parameters = (Parameters) theInputParameters; + final Parameters.ParametersParameterComponent r4Parameter = r4Parameters.getParameter(TerminologyUploaderProvider.PARAM_FILE); + final Type r4Value = r4Parameter.getValue(); + assertInstanceOf(Attachment.class, r4Value); + final Attachment r4Attachment = (Attachment) r4Value; + return r4Attachment.getUrl(); + } + case R4B: { + assertInstanceOf(org.hl7.fhir.r4b.model.Parameters.class, theInputParameters); + final org.hl7.fhir.r4b.model.Parameters r4bParameters = (org.hl7.fhir.r4b.model.Parameters) theInputParameters; + final org.hl7.fhir.r4b.model.Parameters.ParametersParameterComponent r4bParameter = r4bParameters.getParameter(TerminologyUploaderProvider.PARAM_FILE); + final org.hl7.fhir.r4b.model.DataType value = r4bParameter.getValue(); + assertInstanceOf(org.hl7.fhir.r4b.model.Attachment.class, value); + final org.hl7.fhir.r4b.model.Attachment r4bAttachment = (org.hl7.fhir.r4b.model.Attachment) value; + return r4bAttachment.getUrl(); + } + case R5: { + assertInstanceOf(org.hl7.fhir.r5.model.Parameters.class, theInputParameters); + final org.hl7.fhir.r5.model.Parameters r4Parameters = (org.hl7.fhir.r5.model.Parameters) theInputParameters; + final org.hl7.fhir.r5.model.Parameters.ParametersParameterComponent parameter = r4Parameters.getParameter(TerminologyUploaderProvider.PARAM_FILE); + final org.hl7.fhir.r5.model.DataType value = parameter.getValue(); + assertInstanceOf(org.hl7.fhir.r5.model.Attachment.class, value); + final org.hl7.fhir.r5.model.Attachment attachment = (org.hl7.fhir.r5.model.Attachment) value; + return attachment.getUrl(); + } + default: + throw new IllegalStateException("Unknown FHIR version: " + theCtx.getVersion().getVersion()); + } + } + private void uploadICD10UsingCompressedFile(String theFhirVersion, boolean theIncludeTls) throws IOException { if (FHIR_VERSION_DSTU3.equals(theFhirVersion)) { when(myTermLoaderSvc.loadIcd10cm(anyList(), any())).thenReturn(new UploadStatistics(100, new org.hl7.fhir.dstu3.model.IdType("CodeSystem/101"))); diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5861-enhance-rule-builder-code-support-multiple-ids.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5861-enhance-rule-builder-code-support-multiple-ids.yaml new file mode 100644 index 00000000000..c44aca386c9 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5861-enhance-rule-builder-code-support-multiple-ids.yaml @@ -0,0 +1,4 @@ +--- +type: add +issue: 5861 +title: "Enhance RuleBuilder code to support multiple instance IDs." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5865-updating-name-of-full-text-field-search.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5865-updating-name-of-full-text-field-search.yaml new file mode 100644 index 00000000000..358a0e4276a --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5865-updating-name-of-full-text-field-search.yaml @@ -0,0 +1,5 @@ +--- +type: fix +issue: 5865 +title: "Moving the Hibernate.Search annotation for text indexing from the lob column to the column added as part of the + PostgreSql LOB migration." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5877-exception-when-updating-token-param-with-large-value.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5877-exception-when-updating-token-param-with-large-value.yaml new file mode 100644 index 00000000000..ad6bbd02d1f --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5877-exception-when-updating-token-param-with-large-value.yaml @@ -0,0 +1,5 @@ +--- +type: fix +issue: 5877 +title: "Previously, updating a tokenParam with a value greater than 200 characters would raise a SQLException. +This issue has been fixed." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5886-mdm-apply-survivorship-number-format-exception.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5886-mdm-apply-survivorship-number-format-exception.yaml new file mode 100644 index 00000000000..9dae5a3b34a --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5886-mdm-apply-survivorship-number-format-exception.yaml @@ -0,0 +1,6 @@ +--- +type: fix +issue: 5886 +title: "Previously, either updating links on, or deleting one of two patients with non-numeric IDs linked to a golden + patient would result in a HAPI-0389 if there were survivorship rules. + This issue has been fixed for both the update links and delete cases." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5888-update-binary-security-interceptor-documentation.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5888-update-binary-security-interceptor-documentation.yaml new file mode 100644 index 00000000000..966fbe3d9c8 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5888-update-binary-security-interceptor-documentation.yaml @@ -0,0 +1,7 @@ +--- +type: fix +issue: 5888 +title: "Updated documentation on binary_security_interceptor to specify using + `STORAGE_PRE_INITIATE_BULK_EXPORT` not `STORAGE_INITIATE_BULK_EXPORT` pointcut + to change bulk export parameters. +" diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5890-add-possibility-to-persist-data-to-lob-columns.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5890-add-possibility-to-persist-data-to-lob-columns.yaml new file mode 100644 index 00000000000..bdca0eeba3c --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5890-add-possibility-to-persist-data-to-lob-columns.yaml @@ -0,0 +1,5 @@ +--- +type: add +issue: 5890 +title: "As part of the migration from LOB, provided the capability to force persisting data to LOB columns. The default +behavior is to not persist in lob columns." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5893-hapi-fhir-cli-upload-terminology-zip-hapi-0862.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5893-hapi-fhir-cli-upload-terminology-zip-hapi-0862.yaml new file mode 100644 index 00000000000..12d853e5842 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5893-hapi-fhir-cli-upload-terminology-zip-hapi-0862.yaml @@ -0,0 +1,6 @@ +--- +type: fix +issue: 5893 +title: "Previously, hapi-fhir-cli: upload-terminology failed with a HAPI-0862 error when uploading LOINC. + This has been fixed." + diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5898-ld-megascale-meta-operation-fails-hapi-0389.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5898-ld-megascale-meta-operation-fails-hapi-0389.yaml new file mode 100644 index 00000000000..dfe2f459515 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5898-ld-megascale-meta-operation-fails-hapi-0389.yaml @@ -0,0 +1,5 @@ +--- +type: fix +issue: 5898 +title: "Previously, triggering a `$meta` via GET on a new patient with Megascale configured resulted in error HAPI-0389. This has been corrected + This has been fixed." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5899-enhance-mdm-interceptor.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5899-enhance-mdm-interceptor.yaml new file mode 100644 index 00000000000..8746c27e136 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5899-enhance-mdm-interceptor.yaml @@ -0,0 +1,4 @@ +--- +type: add +issue: 5899 +title: "The `MDM_POST_MERGE_GOLDEN_RESOURCES` now supports an additional parameter, of type `ca.uhn.fhir.mdm.model.MdmTransactionContext`. Thanks to Jens Villadsen for the contribution." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5904-query-chained-sort-returns-less-results.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5904-query-chained-sort-returns-less-results.yaml new file mode 100644 index 00000000000..9c8c9873f6d --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5904-query-chained-sort-returns-less-results.yaml @@ -0,0 +1,4 @@ +--- +type: fix +issue: 5904 +title: "Chained sort would exclude results that did not have resources matching the sort chain. These are now included, and sorted at the end." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5915-bulk-export-security-woes.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5915-bulk-export-security-woes.yaml new file mode 100644 index 00000000000..8bd43c7a5c0 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5915-bulk-export-security-woes.yaml @@ -0,0 +1,4 @@ +--- +type: fix +issue: 5915 +title: "Previously, in some edge case scenarios the Bulk Export Rule Applier could accidentally permit a Patient type level bulk export request, even if the calling user only had permissions to a subset of patients. This has been corrected." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5917-chain-sort-mssql.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5917-chain-sort-mssql.yaml new file mode 100644 index 00000000000..1e94d65c58d --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5917-chain-sort-mssql.yaml @@ -0,0 +1,4 @@ +--- +type: fix +issue: 5917 +title: "Fix chained sorts on strings when using MS Sql" diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/security/binary_security_interceptor.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/security/binary_security_interceptor.md index 2fea34f786b..412efaf27a8 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/security/binary_security_interceptor.md +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/security/binary_security_interceptor.md @@ -14,4 +14,9 @@ This interceptor is intended to be subclassed. A simple example is shown below: ## Combining with Bulk Export -The `setBinarySecurityContextIdentifierSystem(..)` and `setBinarySecurityContextIdentifierValue(..)` properties on the `BulkExportJobParameters` object can be used to automatically populate the security context on Binary resources created by Bulk Export jobs with values that can be verified by this interceptor. An interceptor on the `STORAGE_INITIATE_BULK_EXPORT` pointcut is the easiest way to set these properties when a new Bulk Export job is being kicked off. +The `setBinarySecurityContextIdentifierSystem(..)` and `setBinarySecurityContextIdentifierValue(..)` properties on the `BulkExportJobParameters` object can be used to automatically populate the security context on Binary resources created by Bulk Export jobs with values that can be verified by this interceptor. +An interceptor on the `STORAGE_PRE_INITIATE_BULK_EXPORT` pointcut is the recommended way to set these properties when a new Bulk Export job is being kicked off. + +NB: Previous versions recommended using the `STORAGE_INITIATE_BULK_EXPORT` pointcut, but this is no longer the recommended way. +`STORAGE_PRE_INITIATE_BULK_EXPORT` pointcut is called before `STORAGE_INITIATE_BULK_EXPORT` and is thus guaranteed to be called before +any AuthorizationInterceptors. diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/binstore/DatabaseBinaryContentStorageSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/binstore/DatabaseBinaryContentStorageSvcImpl.java index 8f79e37ecb9..94eddb83955 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/binstore/DatabaseBinaryContentStorageSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/binstore/DatabaseBinaryContentStorageSvcImpl.java @@ -26,6 +26,7 @@ import ca.uhn.fhir.jpa.dao.data.IBinaryStorageEntityDao; import ca.uhn.fhir.jpa.model.entity.BinaryStorageEntity; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; +import com.google.common.annotations.VisibleForTesting; import com.google.common.hash.HashingInputStream; import com.google.common.io.ByteStreams; import jakarta.annotation.Nonnull; @@ -59,6 +60,8 @@ public class DatabaseBinaryContentStorageSvcImpl extends BaseBinaryStorageSvcImp @Autowired private IBinaryStorageEntityDao myBinaryStorageEntityDao; + private boolean mySupportLegacyLobServer = false; + @Nonnull @Override @Transactional(propagation = Propagation.REQUIRED) @@ -96,9 +99,10 @@ public class DatabaseBinaryContentStorageSvcImpl extends BaseBinaryStorageSvcImp entity.setContentId(id); entity.setStorageContentBin(loadedStream); - // TODO: remove writing Blob in a future release - Blob dataBlob = lobHelper.createBlob(loadedStream); - entity.setBlob(dataBlob); + if (mySupportLegacyLobServer) { + Blob dataBlob = lobHelper.createBlob(loadedStream); + entity.setBlob(dataBlob); + } // Update the entity with the final byte count and hash long bytes = countingInputStream.getByteCount(); @@ -169,6 +173,11 @@ public class DatabaseBinaryContentStorageSvcImpl extends BaseBinaryStorageSvcImp return copyBinaryContentToByteArray(entityOpt); } + public DatabaseBinaryContentStorageSvcImpl setSupportLegacyLobServer(boolean theSupportLegacyLobServer) { + mySupportLegacyLobServer = theSupportLegacyLobServer; + return this; + } + void copyBinaryContentToOutputStream(OutputStream theOutputStream, BinaryStorageEntity theEntity) throws IOException { @@ -212,4 +221,10 @@ public class DatabaseBinaryContentStorageSvcImpl extends BaseBinaryStorageSvcImp return retVal; } + + @VisibleForTesting + public DatabaseBinaryContentStorageSvcImpl setEntityManagerForTesting(EntityManager theEntityManager) { + myEntityManager = theEntityManager; + return this; + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java index e60974ae642..67a7f55d28e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java @@ -73,6 +73,7 @@ import ca.uhn.fhir.jpa.delete.DeleteConflictFinderService; import ca.uhn.fhir.jpa.delete.DeleteConflictService; import ca.uhn.fhir.jpa.delete.ThreadSafeResourceDeleterSvc; import ca.uhn.fhir.jpa.entity.Search; +import ca.uhn.fhir.jpa.entity.TermValueSet; import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceServiceRegistry; import ca.uhn.fhir.jpa.graphql.DaoRegistryGraphQLStorageServices; import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor; @@ -154,6 +155,8 @@ import ca.uhn.fhir.jpa.term.TermCodeSystemStorageSvcImpl; import ca.uhn.fhir.jpa.term.TermConceptMappingSvcImpl; import ca.uhn.fhir.jpa.term.TermReadSvcImpl; import ca.uhn.fhir.jpa.term.TermReindexingSvcImpl; +import ca.uhn.fhir.jpa.term.ValueSetConceptAccumulator; +import ca.uhn.fhir.jpa.term.ValueSetConceptAccumulatorFactory; import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc; import ca.uhn.fhir.jpa.term.api.ITermReadSvc; @@ -822,6 +825,17 @@ public class JpaConfig { return new TermReadSvcImpl(); } + @Bean + public ValueSetConceptAccumulatorFactory valueSetConceptAccumulatorFactory() { + return new ValueSetConceptAccumulatorFactory(); + } + + @Bean + @Scope("prototype") + public ValueSetConceptAccumulator valueSetConceptAccumulator(TermValueSet theTermValueSet) { + return valueSetConceptAccumulatorFactory().create(theTermValueSet); + } + @Bean public ITermCodeSystemStorageSvc termCodeSystemStorageSvc() { return new TermCodeSystemStorageSvcImpl(); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java index 686667b2813..1e786ad5070 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java @@ -1410,19 +1410,20 @@ public abstract class BaseHapiFhirResourceDao extends B } @Override - @Transactional public MT metaGetOperation(Class theType, IIdType theId, RequestDetails theRequest) { - Set tagDefs = new HashSet<>(); - BaseHasResource entity = readEntity(theId, theRequest); - for (BaseTag next : entity.getTags()) { - tagDefs.add(next.getTag()); - } - MT retVal = toMetaDt(theType, tagDefs); + return myTransactionService.withRequest(theRequest).execute(() -> { + Set tagDefs = new HashSet<>(); + BaseHasResource entity = readEntity(theId, theRequest); + for (BaseTag next : entity.getTags()) { + tagDefs.add(next.getTag()); + } + MT retVal = toMetaDt(theType, tagDefs); - retVal.setLastUpdated(entity.getUpdatedDate()); - retVal.setVersionId(Long.toString(entity.getVersion())); + retVal.setLastUpdated(entity.getUpdatedDate()); + retVal.setVersionId(Long.toString(entity.getVersion())); - return retVal; + return retVal; + }); } @Override diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java index 57674d4cfc7..d238278bcfe 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java @@ -24,6 +24,7 @@ import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum; import ca.uhn.fhir.jpa.search.DeferConceptIndexingRoutingBinder; import ca.uhn.fhir.util.ValidateUtil; +import com.google.common.annotations.VisibleForTesting; import jakarta.annotation.Nonnull; import jakarta.persistence.Column; import jakarta.persistence.Entity; @@ -58,10 +59,7 @@ import org.hibernate.search.mapper.pojo.bridge.mapping.annotation.RoutingBinderR import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField; import org.hibernate.search.mapper.pojo.mapping.definition.annotation.GenericField; import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed; -import org.hibernate.search.mapper.pojo.mapping.definition.annotation.IndexingDependency; -import org.hibernate.search.mapper.pojo.mapping.definition.annotation.ObjectPath; import org.hibernate.search.mapper.pojo.mapping.definition.annotation.PropertyBinding; -import org.hibernate.search.mapper.pojo.mapping.definition.annotation.PropertyValue; import org.hl7.fhir.r4.model.Coding; import java.io.Serializable; @@ -177,6 +175,11 @@ public class TermConcept implements Serializable { @Column(name = "PARENT_PIDS", nullable = true) private String myParentPids; + @FullTextField( + name = "myParentPids", + searchable = Searchable.YES, + projectable = Projectable.YES, + analyzer = "conceptParentPidsAnalyzer") @Column(name = "PARENT_PIDS_VC", nullable = true, length = Length.LONG32) private String myParentPidsVc; @@ -189,6 +192,9 @@ public class TermConcept implements Serializable { @Column(name = "CODE_SEQUENCE", nullable = true) private Integer mySequence; + @Transient + private boolean mySupportLegacyLob = false; + public TermConcept() { super(); } @@ -362,13 +368,6 @@ public class TermConcept implements Serializable { return this; } - @Transient - @FullTextField( - name = "myParentPids", - searchable = Searchable.YES, - projectable = Projectable.YES, - analyzer = "conceptParentPidsAnalyzer") - @IndexingDependency(derivedFrom = @ObjectPath({@PropertyValue(propertyName = "myParentPidsVc")})) public String getParentPidsAsString() { return nonNull(myParentPidsVc) ? myParentPidsVc : myParentPids; } @@ -458,6 +457,10 @@ public class TermConcept implements Serializable { ourLog.trace("Code {}/{} has parents {}", entity.getId(), entity.getCode(), entity.getParentPidsAsString()); } + + if (!mySupportLegacyLob) { + clearParentPidsLob(); + } } private void setParentPids(Set theParentPids) { @@ -519,4 +522,17 @@ public class TermConcept implements Serializable { public List getChildCodes() { return getChildren().stream().map(TermConceptParentChildLink::getChild).collect(Collectors.toList()); } + + public void flagForLegacyLobSupport(boolean theSupportLegacyLob) { + mySupportLegacyLob = theSupportLegacyLob; + } + + private void clearParentPidsLob() { + myParentPids = null; + } + + @VisibleForTesting + public boolean hasParentPidsLobForTesting() { + return nonNull(myParentPids); + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptProperty.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptProperty.java index 6bbc3b178a4..790dc9048c7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptProperty.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptProperty.java @@ -54,6 +54,7 @@ import org.hibernate.validator.constraints.NotBlank; import java.io.Serializable; import java.nio.charset.StandardCharsets; +import static java.util.Objects.nonNull; import static org.apache.commons.lang3.StringUtils.left; import static org.apache.commons.lang3.StringUtils.length; @@ -307,9 +308,15 @@ public class TermConceptProperty implements Serializable { return myId; } + public void performLegacyLobSupport(boolean theSupportLegacyLob) { + if (!theSupportLegacyLob) { + myValueLob = null; + } + } + @VisibleForTesting - public byte[] getValueBlobForTesting() { - return myValueLob; + public boolean hasValueBlobForTesting() { + return nonNull(myValueLob); } @VisibleForTesting @@ -318,8 +325,8 @@ public class TermConceptProperty implements Serializable { } @VisibleForTesting - public byte[] getValueBinForTesting() { - return myValueBin; + public boolean hasValueBinForTesting() { + return nonNull(myValueBin); } @VisibleForTesting diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConcept.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConcept.java index 2e17bc6ae9c..c822c56046c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConcept.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConcept.java @@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.entity; import ca.uhn.fhir.util.ValidateUtil; +import com.google.common.annotations.VisibleForTesting; import jakarta.annotation.Nonnull; import jakarta.persistence.Column; import jakarta.persistence.Entity; @@ -46,6 +47,7 @@ import java.io.Serializable; import java.util.ArrayList; import java.util.List; +import static java.util.Objects.nonNull; import static org.apache.commons.lang3.StringUtils.isNotEmpty; import static org.apache.commons.lang3.StringUtils.left; import static org.apache.commons.lang3.StringUtils.length; @@ -296,4 +298,13 @@ public class TermValueSetConcept implements Serializable { ? mySourceConceptDirectParentPidsVc : mySourceConceptDirectParentPids; } + + public void clearSourceConceptDirectParentPidsLob() { + mySourceConceptDirectParentPids = null; + } + + @VisibleForTesting + public boolean hasSourceConceptDirectParentPidsLob() { + return nonNull(mySourceConceptDirectParentPids); + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptView.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptView.java index c66c41a9b7e..28f80e15c62 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptView.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptView.java @@ -43,20 +43,21 @@ import java.sql.SQLException; * because hibernate won't allow the view the function without it, but */ "SELECT CONCAT_WS(' ', vsc.PID, vscd.PID) AS PID, " + " vsc.PID AS CONCEPT_PID, " - + " vsc.VALUESET_PID AS CONCEPT_VALUESET_PID, " - + " vsc.VALUESET_ORDER AS CONCEPT_VALUESET_ORDER, " - + " vsc.SYSTEM_URL AS CONCEPT_SYSTEM_URL, " - + " vsc.CODEVAL AS CONCEPT_CODEVAL, " - + " vsc.DISPLAY AS CONCEPT_DISPLAY, " - + " vsc.SYSTEM_VER AS SYSTEM_VER, " - + " vsc.SOURCE_PID AS SOURCE_PID, " - + " vsc.SOURCE_DIRECT_PARENT_PIDS AS SOURCE_DIRECT_PARENT_PIDS, " - + " vscd.PID AS DESIGNATION_PID, " - + " vscd.LANG AS DESIGNATION_LANG, " - + " vscd.USE_SYSTEM AS DESIGNATION_USE_SYSTEM, " - + " vscd.USE_CODE AS DESIGNATION_USE_CODE, " - + " vscd.USE_DISPLAY AS DESIGNATION_USE_DISPLAY, " - + " vscd.VAL AS DESIGNATION_VAL " + + " vsc.VALUESET_PID AS CONCEPT_VALUESET_PID, " + + " vsc.VALUESET_ORDER AS CONCEPT_VALUESET_ORDER, " + + " vsc.SYSTEM_URL AS CONCEPT_SYSTEM_URL, " + + " vsc.CODEVAL AS CONCEPT_CODEVAL, " + + " vsc.DISPLAY AS CONCEPT_DISPLAY, " + + " vsc.SYSTEM_VER AS SYSTEM_VER, " + + " vsc.SOURCE_PID AS SOURCE_PID, " + + " vsc.SOURCE_DIRECT_PARENT_PIDS AS SOURCE_DIRECT_PARENT_PIDS, " + + " vsc.SOURCE_DIRECT_PARENT_PIDS_VC AS SOURCE_DIRECT_PARENT_PIDS_VC, " + + " vscd.PID AS DESIGNATION_PID, " + + " vscd.LANG AS DESIGNATION_LANG, " + + " vscd.USE_SYSTEM AS DESIGNATION_USE_SYSTEM, " + + " vscd.USE_CODE AS DESIGNATION_USE_CODE, " + + " vscd.USE_DISPLAY AS DESIGNATION_USE_DISPLAY, " + + " vscd.VAL AS DESIGNATION_VAL " + "FROM TRM_VALUESET_CONCEPT vsc " + "LEFT OUTER JOIN TRM_VALUESET_C_DESIGNATION vscd ON vsc.PID = vscd.VALUESET_CONCEPT_PID") public class TermValueSetConceptView implements Serializable, ITermValueSetConceptView { @@ -112,6 +113,9 @@ public class TermValueSetConceptView implements Serializable, ITermValueSetConce @Column(name = "SOURCE_DIRECT_PARENT_PIDS", nullable = true) private Clob mySourceConceptDirectParentPids; + @Column(name = "SOURCE_DIRECT_PARENT_PIDS_VC", nullable = true) + private String mySourceConceptDirectParentPidsVc; + @Override public Long getSourceConceptPid() { return mySourceConceptPid; @@ -119,14 +123,19 @@ public class TermValueSetConceptView implements Serializable, ITermValueSetConce @Override public String getSourceConceptDirectParentPids() { + String retVal = null; + if (mySourceConceptDirectParentPids != null) { try (Reader characterStream = mySourceConceptDirectParentPids.getCharacterStream()) { - return IOUtils.toString(characterStream); + retVal = IOUtils.toString(characterStream); } catch (IOException | SQLException e) { throw new InternalErrorException(Msg.code(828) + e); } + } else if (mySourceConceptDirectParentPidsVc != null) { + retVal = mySourceConceptDirectParentPidsVc; } - return null; + + return retVal; } @Override diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java index 3c421278e10..03a34c96468 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java @@ -25,6 +25,7 @@ import ca.uhn.fhir.jpa.entity.BulkImportJobEntity; import ca.uhn.fhir.jpa.entity.Search; import ca.uhn.fhir.jpa.migrate.DriverTypeEnum; import ca.uhn.fhir.jpa.migrate.taskdef.ArbitrarySqlTask; +import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask; import ca.uhn.fhir.jpa.migrate.taskdef.CalculateHashesTask; import ca.uhn.fhir.jpa.migrate.taskdef.CalculateOrdinalDatesTask; import ca.uhn.fhir.jpa.migrate.taskdef.ColumnTypeEnum; @@ -146,8 +147,16 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { binaryStorageBlobTable .renameColumn("20240404.1", "BLOB_ID", "CONTENT_ID") + .getLastAddedTask() + .ifPresent(BaseTask::doNothing); + binaryStorageBlobTable .renameColumn("20240404.2", "BLOB_SIZE", "CONTENT_SIZE") - .renameColumn("20240404.3", "BLOB_HASH", "CONTENT_HASH"); + .getLastAddedTask() + .ifPresent(BaseTask::doNothing); + binaryStorageBlobTable + .renameColumn("20240404.3", "BLOB_HASH", "CONTENT_HASH") + .getLastAddedTask() + .ifPresent(BaseTask::doNothing); binaryStorageBlobTable .modifyColumn("20240404.4", "BLOB_DATA") @@ -159,9 +168,23 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { .nullable() .type(ColumnTypeEnum.BINARY); - binaryStorageBlobTable.migrateBlobToBinary("20240404.6", "BLOB_DATA", "STORAGE_CONTENT_BIN"); + binaryStorageBlobTable + .migrateBlobToBinary("20240404.6", "BLOB_DATA", "STORAGE_CONTENT_BIN") + .doNothing(); - binaryStorageBlobTable.renameTable("20240404.7", "HFJ_BINARY_STORAGE"); + binaryStorageBlobTable + .renameTable("20240404.7", "HFJ_BINARY_STORAGE") + .doNothing(); + + Builder.BuilderWithTableName binaryStorageTableFix = version.onTable("HFJ_BINARY_STORAGE"); + + binaryStorageTableFix.renameColumn("20240404.10", "CONTENT_ID", "BLOB_ID", true, true); + binaryStorageTableFix.renameColumn("20240404.20", "CONTENT_SIZE", "BLOB_SIZE", true, true); + binaryStorageTableFix.renameColumn("20240404.30", "CONTENT_HASH", "BLOB_HASH", true, true); + + binaryStorageTableFix + .renameTable("20240404.40", "HFJ_BINARY_STORAGE_BLOB") + .failureAllowed(); } { @@ -172,7 +195,9 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { .nullable() .type(ColumnTypeEnum.BINARY); - termConceptPropertyTable.migrateBlobToBinary("20240409.2", "PROP_VAL_LOB", "PROP_VAL_BIN"); + termConceptPropertyTable + .migrateBlobToBinary("20240409.2", "PROP_VAL_LOB", "PROP_VAL_BIN") + .doNothing(); } { @@ -182,8 +207,9 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { .nullable() .type(ColumnTypeEnum.TEXT); - termValueSetConceptTable.migrateClobToText( - "20240409.4", "SOURCE_DIRECT_PARENT_PIDS", "SOURCE_DIRECT_PARENT_PIDS_VC"); + termValueSetConceptTable + .migrateClobToText("20240409.4", "SOURCE_DIRECT_PARENT_PIDS", "SOURCE_DIRECT_PARENT_PIDS_VC") + .doNothing(); } { @@ -193,7 +219,9 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { .nullable() .type(ColumnTypeEnum.TEXT); - termConceptTable.migrateClobToText("20240410.2", "PARENT_PIDS", "PARENT_PIDS_VC"); + termConceptTable + .migrateClobToText("20240410.2", "PARENT_PIDS", "PARENT_PIDS_VC") + .doNothing(); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java index bbc74d27dd5..6202049515d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java @@ -37,7 +37,7 @@ public class RequestPartitionHelperSvc extends BaseRequestPartitionHelperSvc { IPartitionLookupSvc myPartitionConfigSvc; @Override - protected RequestPartitionId validateAndNormalizePartitionIds(RequestPartitionId theRequestPartitionId) { + public RequestPartitionId validateAndNormalizePartitionIds(RequestPartitionId theRequestPartitionId) { List names = null; for (int i = 0; i < theRequestPartitionId.getPartitionIds().size(); i++) { @@ -59,7 +59,7 @@ public class RequestPartitionHelperSvc extends BaseRequestPartitionHelperSvc { } } - if (theRequestPartitionId.getPartitionNames() != null) { + if (theRequestPartitionId.hasPartitionNames()) { if (partition == null) { Validate.isTrue( theRequestPartitionId.getPartitionIds().get(i) == null, @@ -68,8 +68,8 @@ public class RequestPartitionHelperSvc extends BaseRequestPartitionHelperSvc { } else { Validate.isTrue( Objects.equals( - theRequestPartitionId.getPartitionIds().get(i), partition.getId()), - "Partition name %s does not match ID %n", + theRequestPartitionId.getPartitionNames().get(i), partition.getName()), + "Partition name %s does not match ID %s", theRequestPartitionId.getPartitionNames().get(i), theRequestPartitionId.getPartitionIds().get(i)); } @@ -94,7 +94,7 @@ public class RequestPartitionHelperSvc extends BaseRequestPartitionHelperSvc { } @Override - protected RequestPartitionId validateAndNormalizePartitionNames(RequestPartitionId theRequestPartitionId) { + public RequestPartitionId validateAndNormalizePartitionNames(RequestPartitionId theRequestPartitionId) { List ids = null; for (int i = 0; i < theRequestPartitionId.getPartitionNames().size(); i++) { @@ -122,9 +122,9 @@ public class RequestPartitionHelperSvc extends BaseRequestPartitionHelperSvc { Validate.isTrue( Objects.equals( theRequestPartitionId.getPartitionIds().get(i), partition.getId()), - "Partition name %s does not match ID %n", - theRequestPartitionId.getPartitionNames().get(i), - theRequestPartitionId.getPartitionIds().get(i)); + "Partition ID %s does not match name %s", + theRequestPartitionId.getPartitionIds().get(i), + theRequestPartitionId.getPartitionNames().get(i)); } } else { if (ids == null) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java index 4f0b3e60778..685f96c16f8 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java @@ -353,26 +353,39 @@ public class QueryStack { throw new InvalidRequestException(Msg.code(2289) + msg); } - BaseSearchParamPredicateBuilder chainedPredicateBuilder; - DbColumn[] sortColumn; + // add a left-outer join to a predicate for the target type, then sort on value columns(s). switch (targetSearchParameter.getParamType()) { case STRING: StringPredicateBuilder stringPredicateBuilder = mySqlBuilder.createStringPredicateBuilder(); - sortColumn = new DbColumn[] {stringPredicateBuilder.getColumnValueNormalized()}; - chainedPredicateBuilder = stringPredicateBuilder; - break; + addSortCustomJoin( + resourceLinkPredicateBuilder.getColumnTargetResourceId(), + stringPredicateBuilder, + stringPredicateBuilder.createHashIdentityPredicate(targetType, theChain)); + + mySqlBuilder.addSortString( + stringPredicateBuilder.getColumnValueNormalized(), theAscending, myUseAggregate); + return; + case TOKEN: TokenPredicateBuilder tokenPredicateBuilder = mySqlBuilder.createTokenPredicateBuilder(); - sortColumn = - new DbColumn[] {tokenPredicateBuilder.getColumnSystem(), tokenPredicateBuilder.getColumnValue() - }; - chainedPredicateBuilder = tokenPredicateBuilder; - break; + addSortCustomJoin( + resourceLinkPredicateBuilder.getColumnTargetResourceId(), + tokenPredicateBuilder, + tokenPredicateBuilder.createHashIdentityPredicate(targetType, theChain)); + + mySqlBuilder.addSortString(tokenPredicateBuilder.getColumnSystem(), theAscending, myUseAggregate); + mySqlBuilder.addSortString(tokenPredicateBuilder.getColumnValue(), theAscending, myUseAggregate); + return; + case DATE: DatePredicateBuilder datePredicateBuilder = mySqlBuilder.createDatePredicateBuilder(); - sortColumn = new DbColumn[] {datePredicateBuilder.getColumnValueLow()}; - chainedPredicateBuilder = datePredicateBuilder; - break; + addSortCustomJoin( + resourceLinkPredicateBuilder.getColumnTargetResourceId(), + datePredicateBuilder, + datePredicateBuilder.createHashIdentityPredicate(targetType, theChain)); + + mySqlBuilder.addSortDate(datePredicateBuilder.getColumnValueLow(), theAscending, myUseAggregate); + return; /* * Note that many of the options below aren't implemented because they @@ -417,14 +430,6 @@ public class QueryStack { + theParamName + "." + theChain + " as this parameter. Can not sort on chains of target type: " + targetSearchParameter.getParamType().name()); } - - addSortCustomJoin(resourceLinkPredicateBuilder.getColumnTargetResourceId(), chainedPredicateBuilder, null); - Condition predicate = chainedPredicateBuilder.createHashIdentityPredicate(targetType, theChain); - mySqlBuilder.addPredicate(predicate); - - for (DbColumn next : sortColumn) { - mySqlBuilder.addSortNumeric(next, theAscending, myUseAggregate); - } } public void addSortOnString(String theResourceName, String theParamName, boolean theAscending) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java index 61fbea5830e..15998b0e8e6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java @@ -73,7 +73,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; -import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.IdentityHashMap; @@ -685,26 +684,6 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { } } - private int ensureParentsSaved(Collection theParents) { - ourLog.trace("Checking {} parents", theParents.size()); - int retVal = 0; - - for (TermConceptParentChildLink nextLink : theParents) { - if (nextLink.getRelationshipType() == TermConceptParentChildLink.RelationshipTypeEnum.ISA) { - TermConcept nextParent = nextLink.getParent(); - retVal += ensureParentsSaved(nextParent.getParents()); - if (nextParent.getId() == null) { - nextParent.setUpdated(new Date()); - myConceptDao.saveAndFlush(nextParent); - retVal++; - ourLog.debug("Saved parent code {} and got id {}", nextParent.getCode(), nextParent.getId()); - } - } - } - - return retVal; - } - @Nonnull private TermCodeSystem getOrCreateDistinctTermCodeSystem( IResourcePersistentId theCodeSystemResourcePid, diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermConceptDaoSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermConceptDaoSvc.java index 9bcf47aebb0..1927de43500 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermConceptDaoSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermConceptDaoSvc.java @@ -46,6 +46,8 @@ public class TermConceptDaoSvc { @Autowired protected ITermConceptDesignationDao myConceptDesignationDao; + private boolean mySupportLegacyLob = false; + public int saveConcept(TermConcept theConcept) { int retVal = 0; @@ -70,9 +72,11 @@ public class TermConceptDaoSvc { retVal++; theConcept.setIndexStatus(BaseHapiFhirDao.INDEX_STATUS_INDEXED); theConcept.setUpdated(new Date()); + theConcept.flagForLegacyLobSupport(mySupportLegacyLob); myConceptDao.save(theConcept); for (TermConceptProperty next : theConcept.getProperties()) { + next.performLegacyLobSupport(mySupportLegacyLob); myConceptPropertyDao.save(next); } @@ -85,6 +89,11 @@ public class TermConceptDaoSvc { return retVal; } + public TermConceptDaoSvc setSupportLegacyLob(boolean theSupportLegacyLob) { + mySupportLegacyLob = theSupportLegacyLob; + return this; + } + private int ensureParentsSaved(Collection theParents) { ourLog.trace("Checking {} parents", theParents.size()); int retVal = 0; @@ -95,6 +104,7 @@ public class TermConceptDaoSvc { retVal += ensureParentsSaved(nextParent.getParents()); if (nextParent.getId() == null) { nextParent.setUpdated(new Date()); + nextParent.flagForLegacyLobSupport(mySupportLegacyLob); myConceptDao.saveAndFlush(nextParent); retVal++; ourLog.debug("Saved parent code {} and got id {}", nextParent.getCode(), nextParent.getId()); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcImpl.java index 86235e0403b..2705ed40fe5 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcImpl.java @@ -293,6 +293,9 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { @Autowired private InMemoryTerminologyServerValidationSupport myInMemoryTerminologyServerValidationSupport; + @Autowired + private ValueSetConceptAccumulatorFactory myValueSetConceptAccumulatorFactory; + @Override public boolean isCodeSystemSupported(ValidationSupportContext theValidationSupportContext, String theSystem) { TermCodeSystemVersionDetails cs = getCurrentCodeSystemVersion(theSystem); @@ -2393,11 +2396,11 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { }); assert valueSet != null; - ValueSetConceptAccumulator accumulator = new ValueSetConceptAccumulator( - valueSetToExpand, myTermValueSetDao, myValueSetConceptDao, myValueSetConceptDesignationDao); + ValueSetConceptAccumulator valueSetConceptAccumulator = + myValueSetConceptAccumulatorFactory.create(valueSetToExpand); ValueSetExpansionOptions options = new ValueSetExpansionOptions(); options.setIncludeHierarchy(true); - expandValueSet(options, valueSet, accumulator); + expandValueSet(options, valueSet, valueSetConceptAccumulator); // We are done with this ValueSet. txTemplate.executeWithoutResult(t -> { @@ -2412,7 +2415,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { "Pre-expanded ValueSet[{}] with URL[{}] - Saved {} concepts in {}", valueSet.getId(), valueSet.getUrl(), - accumulator.getConceptsSaved(), + valueSetConceptAccumulator.getConceptsSaved(), sw); } catch (Exception e) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java index 6561495ad45..9aa38413bb7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java @@ -48,6 +48,8 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator { private int myDesignationsSaved; private int myConceptsExcluded; + private boolean mySupportLegacyLob = false; + public ValueSetConceptAccumulator( @Nonnull TermValueSet theTermValueSet, @Nonnull ITermValueSetDao theValueSetDao, @@ -184,6 +186,10 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator { concept.setSourceConceptPid(theSourceConceptPid); concept.setSourceConceptDirectParentPids(theSourceConceptDirectParentPids); + if (!mySupportLegacyLob) { + concept.clearSourceConceptDirectParentPidsLob(); + } + myValueSetConceptDao.save(concept); myValueSetDao.save(myTermValueSet.incrementTotalConcepts()); @@ -253,4 +259,9 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator { // TODO: DM 2019-07-16 - If so, we should also populate TermValueSetConceptProperty entities here. // TODO: DM 2019-07-30 - Expansions don't include the properties themselves; they may be needed to facilitate // filters and parameterized expansions. + + public ValueSetConceptAccumulator setSupportLegacyLob(boolean theSupportLegacyLob) { + mySupportLegacyLob = theSupportLegacyLob; + return this; + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulatorFactory.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulatorFactory.java new file mode 100644 index 00000000000..e61da193b8a --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulatorFactory.java @@ -0,0 +1,51 @@ +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.term; + +import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; +import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDao; +import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDesignationDao; +import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao; +import ca.uhn.fhir.jpa.entity.TermValueSet; +import org.springframework.beans.factory.annotation.Autowired; + +public class ValueSetConceptAccumulatorFactory { + + @Autowired + private ITermValueSetDao myValueSetDao; + + @Autowired + private ITermValueSetConceptDao myValueSetConceptDao; + + @Autowired + private ITermValueSetConceptDesignationDao myValueSetConceptDesignationDao; + + @Autowired + private JpaStorageSettings myStorageSettings; + + public ValueSetConceptAccumulator create(TermValueSet theTermValueSet) { + ValueSetConceptAccumulator valueSetConceptAccumulator = new ValueSetConceptAccumulator( + theTermValueSet, myValueSetDao, myValueSetConceptDao, myValueSetConceptDesignationDao); + + valueSetConceptAccumulator.setSupportLegacyLob(myStorageSettings.isWriteToLegacyLobColumns()); + + return valueSetConceptAccumulator; + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/config/TermCodeSystemConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/config/TermCodeSystemConfig.java index 297d0726ce3..44132dbdbd6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/config/TermCodeSystemConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/config/TermCodeSystemConfig.java @@ -19,6 +19,7 @@ */ package ca.uhn.fhir.jpa.term.config; +import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.term.TermConceptDaoSvc; import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl; import ca.uhn.fhir.jpa.term.api.ITermCodeSystemDeleteJobSvc; @@ -41,7 +42,7 @@ public class TermCodeSystemConfig { } @Bean - public TermConceptDaoSvc termConceptDaoSvc() { - return new TermConceptDaoSvc(); + public TermConceptDaoSvc termConceptDaoSvc(JpaStorageSettings theJpaStorageSettings) { + return new TermConceptDaoSvc().setSupportLegacyLob(theJpaStorageSettings.isWriteToLegacyLobColumns()); } } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/entity/TermConceptPropertyTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/entity/TermConceptPropertyTest.java index 44a01649cfd..cda9c4554a9 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/entity/TermConceptPropertyTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/entity/TermConceptPropertyTest.java @@ -2,8 +2,11 @@ package ca.uhn.fhir.jpa.entity; import com.google.common.base.Strings; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.startsWith; @@ -21,8 +24,8 @@ public class TermConceptPropertyTest { termConceptProperty.setValue(ourVeryLongString); // then - assertThat(termConceptProperty.getValueBlobForTesting(), notNullValue()); - assertThat(termConceptProperty.getValueBinForTesting(), notNullValue()); + assertThat(termConceptProperty.hasValueBlobForTesting(), equalTo(true)); + assertThat(termConceptProperty.hasValueBinForTesting(), equalTo(true)); } @Test @@ -78,4 +81,19 @@ public class TermConceptPropertyTest { assertThat(value, startsWith("a")); } + @ParameterizedTest + @ValueSource(booleans = {false, true}) + public void testSetValue_withSupportLegacyLob(boolean theSupportLegacyLob){ + // given + TermConceptProperty termConceptProperty = new TermConceptProperty(); + + // when + termConceptProperty.setValue(ourVeryLongString); + termConceptProperty.performLegacyLobSupport(theSupportLegacyLob); + + // then + assertThat(termConceptProperty.hasValueBinForTesting(), equalTo(true)); + assertThat(termConceptProperty.hasValueBlobForTesting(), equalTo(theSupportLegacyLob)); + } + } diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/GoldenResourceMergerSvcImpl.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/GoldenResourceMergerSvcImpl.java index 5165581e3ef..690d2cc061d 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/GoldenResourceMergerSvcImpl.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/GoldenResourceMergerSvcImpl.java @@ -166,6 +166,7 @@ public class GoldenResourceMergerSvcImpl implements IGoldenResourceMergerSvc { HookParams params = new HookParams(); params.add(MdmMergeEvent.class, event); params.add(RequestDetails.class, theParams.getRequestDetails()); + params.add(MdmTransactionContext.class, theParams.getMdmTransactionContext()); myInterceptorBroadcaster.callHooks(Pointcut.MDM_POST_MERGE_GOLDEN_RESOURCES, params); } } diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmSurvivorshipSvcImplIT.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmSurvivorshipSvcImplIT.java index febcaf4c6e9..5e3aaad9f42 100644 --- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmSurvivorshipSvcImplIT.java +++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmSurvivorshipSvcImplIT.java @@ -2,11 +2,16 @@ package ca.uhn.fhir.jpa.mdm.svc; import ca.uhn.fhir.jpa.mdm.BaseMdmR4Test; import ca.uhn.fhir.mdm.api.IMdmSurvivorshipService; +import ca.uhn.fhir.mdm.api.MdmLinkSourceEnum; +import ca.uhn.fhir.mdm.api.MdmMatchOutcome; import ca.uhn.fhir.mdm.model.MdmTransactionContext; +import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import org.hl7.fhir.r4.model.Patient; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; +import java.util.List; + import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNull; @@ -55,4 +60,21 @@ class MdmSurvivorshipSvcImplIT extends BaseMdmR4Test { assertEquals(p1.getTelecom().size(), p1.getTelecom().size()); assertTrue(p2.getTelecomFirstRep().equalsDeep(p1.getTelecomFirstRep())); } + + @Test + public void matchingPatientsWith_NON_Numeric_Ids_matches_doesNotThrow_NumberFormatException() { + final Patient frankPatient1 = buildFrankPatient(); + frankPatient1.setId("patA"); + myPatientDao.update(frankPatient1, new SystemRequestDetails()); + final Patient frankPatient2 = buildFrankPatient(); + frankPatient2.setId("patB"); + myPatientDao.update(frankPatient2, new SystemRequestDetails()); + final Patient goldenPatient = buildFrankPatient(); + myPatientDao.create(goldenPatient, new SystemRequestDetails()); + + myMdmLinkDaoSvc.createOrUpdateLinkEntity(goldenPatient, frankPatient1, MdmMatchOutcome.NEW_GOLDEN_RESOURCE_MATCH, MdmLinkSourceEnum.MANUAL, createContextForCreate("Patient")); + myMdmLinkDaoSvc.createOrUpdateLinkEntity(goldenPatient, frankPatient2, MdmMatchOutcome.NEW_GOLDEN_RESOURCE_MATCH, MdmLinkSourceEnum.MANUAL, createContextForCreate("Patient")); + + myMdmSurvivorshipService.rebuildGoldenResourceWithSurvivorshipRules(goldenPatient, new MdmTransactionContext(MdmTransactionContext.OperationType.UPDATE_LINK)); + } } diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmSurvivorshipSvcImplTest.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmSurvivorshipSvcImplTest.java index a6502687415..65cc7bde0c4 100644 --- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmSurvivorshipSvcImplTest.java +++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmSurvivorshipSvcImplTest.java @@ -25,8 +25,9 @@ import ca.uhn.fhir.rest.api.server.RequestDetails; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.r4.model.Patient; import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; import org.mockito.Mock; import org.mockito.Spy; import org.mockito.junit.jupiter.MockitoExtension; @@ -96,8 +97,9 @@ public class MdmSurvivorshipSvcImplTest { } @SuppressWarnings({"rawtypes", "unchecked"}) - @Test - public void rebuildGoldenResourceCurrentLinksUsingSurvivorshipRules_withManyLinks_rebuildsInUpdateOrder() { + @ParameterizedTest + @ValueSource(booleans = {true,false}) + public void rebuildGoldenResourceCurrentLinksUsingSurvivorshipRules_withManyLinks_rebuildsInUpdateOrder(boolean theIsUseNonNumericId) { // setup // create resources Patient goldenPatient = new Patient(); @@ -126,7 +128,7 @@ public class MdmSurvivorshipSvcImplTest { patient.addIdentifier() .setSystem("http://example.com") .setValue("Value" + i); - patient.setId("Patient/" + i); + patient.setId("Patient/" + (theIsUseNonNumericId ? "pat"+i : Integer.toString(i))); resources.add(patient); MdmLinkJson link = createLinkJson( @@ -149,8 +151,13 @@ public class MdmSurvivorshipSvcImplTest { when(myDaoRegistry.getResourceDao(eq("Patient"))) .thenReturn(resourceDao); AtomicInteger counter = new AtomicInteger(); - when(resourceDao.readByPid(any())) - .thenAnswer(params -> resources.get(counter.getAndIncrement())); + if (theIsUseNonNumericId) { + when(resourceDao.read(any(), any())) + .thenAnswer(params -> resources.get(counter.getAndIncrement())); + } else { + when(resourceDao.readByPid(any())) + .thenAnswer(params -> resources.get(counter.getAndIncrement())); + } Page linkPage = mock(Page.class); when(myMdmLinkQuerySvc.queryLinks(any(), any())) .thenReturn(linkPage); diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BinaryStorageEntity.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BinaryStorageEntity.java index c71b8e29fda..c6b046e0bff 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BinaryStorageEntity.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BinaryStorageEntity.java @@ -34,23 +34,26 @@ import java.util.Date; import static java.util.Objects.nonNull; @Entity -@Table(name = "HFJ_BINARY_STORAGE") +@Table(name = "HFJ_BINARY_STORAGE_BLOB") public class BinaryStorageEntity { @Id - @Column(name = "CONTENT_ID", length = 200, nullable = false) + @Column(name = "BLOB_ID", length = 200, nullable = false) // N.B GGG: Note that the `content id` is the same as the `externalized binary id`. private String myContentId; @Column(name = "RESOURCE_ID", length = 100, nullable = false) private String myResourceId; - @Column(name = "CONTENT_SIZE", nullable = true) + @Column(name = "BLOB_SIZE", nullable = true) private long mySize; @Column(name = "CONTENT_TYPE", nullable = false, length = 100) private String myContentType; + /** + * @deprecated + */ @Deprecated(since = "7.2.0") @Lob // TODO: VC column added in 7.2.0 - Remove non-VC column later @Column(name = "BLOB_DATA", nullable = true, insertable = true, updatable = false) @@ -63,7 +66,7 @@ public class BinaryStorageEntity { @Column(name = "PUBLISHED_DATE", nullable = false) private Date myPublished; - @Column(name = "CONTENT_HASH", length = 128, nullable = true) + @Column(name = "BLOB_HASH", length = 128, nullable = true) private String myHash; public Date getPublished() { diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/partition/IRequestPartitionHelperSvc.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/partition/IRequestPartitionHelperSvc.java index cf17cf5af41..ede4b39e7ca 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/partition/IRequestPartitionHelperSvc.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/partition/IRequestPartitionHelperSvc.java @@ -156,4 +156,30 @@ public interface IRequestPartitionHelperSvc { Set toReadPartitions(@Nonnull RequestPartitionId theRequestPartitionId); boolean isResourcePartitionable(String theResourceType); + + /** + * No interceptors should be invoked by this method. It should ONLY be used when partition ids are + * known, but partition names are not. + *

    + * Ensures the list of partition ids inside the given {@link RequestPartitionId} correctly map to the + * list of partition names. If the list of partition names is empty, this method will map the correct + * partition names and return a normalized {@link RequestPartitionId}. + *

    + * @param theRequestPartitionId - An unvalidated and unnormalized {@link RequestPartitionId}. + * @return - A {@link RequestPartitionId} with a normalized list of partition ids and partition names. + */ + RequestPartitionId validateAndNormalizePartitionIds(RequestPartitionId theRequestPartitionId); + + /** + * No interceptors should be invoked by this method. It should ONLY be used when partition names are + * known, but partition ids are not. + *

    + * Ensures the list of partition names inside the given {@link RequestPartitionId} correctly map to the + * list of partition ids. If the list of partition ids is empty, this method will map the correct + * partition ids and return a normalized {@link RequestPartitionId}. + *

    + * @param theRequestPartitionId - An unvalidated and unnormalized {@link RequestPartitionId}. + * @return - A {@link RequestPartitionId} with a normalized list of partition ids and partition names. + */ + RequestPartitionId validateAndNormalizePartitionNames(RequestPartitionId theRequestPartitionId); } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/config/SubscriptionConfig.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/config/SubscriptionConfig.java new file mode 100644 index 00000000000..4d95a057c54 --- /dev/null +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/config/SubscriptionConfig.java @@ -0,0 +1,35 @@ +/*- + * #%L + * HAPI FHIR Subscription Server + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.subscription.config; + +import ca.uhn.fhir.jpa.api.dao.DaoRegistry; +import ca.uhn.fhir.jpa.subscription.match.matcher.matching.SubscriptionStrategyEvaluator; +import ca.uhn.fhir.jpa.subscription.submit.interceptor.SubscriptionQueryValidator; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +@Configuration +public class SubscriptionConfig { + @Bean + public SubscriptionQueryValidator subscriptionQueryValidator( + DaoRegistry theDaoRegistry, SubscriptionStrategyEvaluator theSubscriptionStrategyEvaluator) { + return new SubscriptionQueryValidator(theDaoRegistry, theSubscriptionStrategyEvaluator); + } +} diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/config/SubscriptionSubmitterConfig.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/config/SubscriptionSubmitterConfig.java index 65e0d2010a7..ff056ad9207 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/config/SubscriptionSubmitterConfig.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/config/SubscriptionSubmitterConfig.java @@ -19,15 +19,13 @@ */ package ca.uhn.fhir.jpa.subscription.submit.config; -import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService; import ca.uhn.fhir.jpa.model.entity.StorageSettings; import ca.uhn.fhir.jpa.subscription.async.AsyncResourceModifiedProcessingSchedulerSvc; import ca.uhn.fhir.jpa.subscription.async.AsyncResourceModifiedSubmitterSvc; import ca.uhn.fhir.jpa.subscription.channel.subscription.SubscriptionChannelFactory; -import ca.uhn.fhir.jpa.subscription.match.matcher.matching.SubscriptionStrategyEvaluator; +import ca.uhn.fhir.jpa.subscription.config.SubscriptionConfig; import ca.uhn.fhir.jpa.subscription.model.config.SubscriptionModelConfig; -import ca.uhn.fhir.jpa.subscription.submit.interceptor.SubscriptionQueryValidator; import ca.uhn.fhir.jpa.subscription.submit.interceptor.SubscriptionSubmitInterceptorLoader; import ca.uhn.fhir.jpa.subscription.submit.interceptor.SubscriptionValidatingInterceptor; import ca.uhn.fhir.jpa.subscription.submit.svc.ResourceModifiedSubmitterSvc; @@ -45,7 +43,7 @@ import org.springframework.context.annotation.Lazy; * matching queue for processing */ @Configuration -@Import({SubscriptionModelConfig.class, SubscriptionMatcherInterceptorConfig.class}) +@Import({SubscriptionModelConfig.class, SubscriptionMatcherInterceptorConfig.class, SubscriptionConfig.class}) public class SubscriptionSubmitterConfig { @Bean @@ -53,12 +51,6 @@ public class SubscriptionSubmitterConfig { return new SubscriptionValidatingInterceptor(); } - @Bean - public SubscriptionQueryValidator subscriptionQueryValidator( - DaoRegistry theDaoRegistry, SubscriptionStrategyEvaluator theSubscriptionStrategyEvaluator) { - return new SubscriptionQueryValidator(theDaoRegistry, theSubscriptionStrategyEvaluator); - } - @Bean public SubscriptionSubmitInterceptorLoader subscriptionMatcherInterceptorLoader() { return new SubscriptionSubmitInterceptorLoader(); diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicConfig.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicConfig.java index 4b571c7bd33..802f43a47e5 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicConfig.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicConfig.java @@ -22,13 +22,15 @@ package ca.uhn.fhir.jpa.topic; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.searchparam.matcher.SearchParamMatcher; -import ca.uhn.fhir.jpa.subscription.match.matcher.matching.SubscriptionStrategyEvaluator; +import ca.uhn.fhir.jpa.subscription.config.SubscriptionConfig; import ca.uhn.fhir.jpa.subscription.submit.interceptor.SubscriptionQueryValidator; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Lazy; @Configuration +@Import(SubscriptionConfig.class) public class SubscriptionTopicConfig { @Bean SubscriptionTopicMatchingSubscriber subscriptionTopicMatchingSubscriber(FhirContext theFhirContext) { @@ -76,13 +78,6 @@ public class SubscriptionTopicConfig { } } - @Bean - @Lazy - public SubscriptionQueryValidator subscriptionQueryValidator( - DaoRegistry theDaoRegistry, SubscriptionStrategyEvaluator theSubscriptionStrategyEvaluator) { - return new SubscriptionQueryValidator(theDaoRegistry, theSubscriptionStrategyEvaluator); - } - @Bean SubscriptionTopicValidatingInterceptor subscriptionTopicValidatingInterceptor( FhirContext theFhirContext, SubscriptionQueryValidator theSubscriptionQueryValidator) { diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/binstore/DatabaseBinaryContentStorageSvcImplTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/binstore/DatabaseBinaryContentStorageSvcImplTest.java index a0c6b7c4279..ecca07f2b98 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/binstore/DatabaseBinaryContentStorageSvcImplTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/binstore/DatabaseBinaryContentStorageSvcImplTest.java @@ -7,8 +7,12 @@ import ca.uhn.fhir.jpa.model.entity.BinaryStorageEntity; import ca.uhn.fhir.jpa.test.BaseJpaR4Test; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; +import jakarta.persistence.EntityManager; +import org.hibernate.LobHelper; +import org.hibernate.Session; import org.hl7.fhir.r4.model.IdType; import org.junit.jupiter.api.Test; +import org.mockito.ArgumentCaptor; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; @@ -24,6 +28,7 @@ import java.sql.SQLException; import java.util.Optional; import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.matchesPattern; @@ -34,6 +39,7 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -281,7 +287,7 @@ public class DatabaseBinaryContentStorageSvcImplTest extends BaseJpaR4Test { } @Test - public void testStoreBinaryContent_writesBlobAndByteArray() throws IOException { + public void testStoreBinaryContent_byDefault_writesByteArrayOnly() throws IOException { // given ByteArrayInputStream inputStream = new ByteArrayInputStream(SOME_BYTES); String contentType = "image/png"; @@ -296,11 +302,41 @@ public class DatabaseBinaryContentStorageSvcImplTest extends BaseJpaR4Test { // then assertThat(binaryStorageEntity.hasStorageContent(), is(true)); - assertThat(binaryStorageEntity.hasBlob(), is(true)); + assertThat(binaryStorageEntity.hasBlob(), is(false)); }); } + @Test + public void testStoreBinaryContent_whenSupportingLegacyBlobServer_willStoreToBlobAndBinaryArray() throws IOException { + ArgumentCaptor captor = ArgumentCaptor.forClass(BinaryStorageEntity.class); + EntityManager mockedEntityManager = mock(EntityManager.class); + Session mockedSession = mock(Session.class); + LobHelper mockedLobHelper = mock(LobHelper.class); + when(mockedEntityManager.getDelegate()).thenReturn(mockedSession); + when(mockedSession.getLobHelper()).thenReturn(mockedLobHelper); + when(mockedLobHelper.createBlob(any())).thenReturn(mock(Blob.class)); + + // given + DatabaseBinaryContentStorageSvcImpl svc = new DatabaseBinaryContentStorageSvcImpl() + .setSupportLegacyLobServer(true) + .setEntityManagerForTesting(mockedEntityManager); + + ByteArrayInputStream inputStream = new ByteArrayInputStream(SOME_BYTES); + String contentType = "image/png"; + IdType resourceId = new IdType("Binary/123"); + + // when + svc.storeBinaryContent(resourceId, null, contentType, inputStream, new ServletRequestDetails()); + + // then + verify(mockedEntityManager, times(1)).persist(captor.capture()); + BinaryStorageEntity capturedBinaryStorageEntity = captor.getValue(); + + assertThat(capturedBinaryStorageEntity.hasBlob(), equalTo(true)); + assertThat(capturedBinaryStorageEntity.hasStorageContent(), equalTo(true)); + } + @Configuration public static class MyConfig { diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QuerySandbox.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QuerySandbox.java index 7efc920ccaf..95ab54ddb19 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QuerySandbox.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QuerySandbox.java @@ -9,8 +9,11 @@ import ca.uhn.fhir.jpa.test.config.TestHSearchAddInConfig; import ca.uhn.fhir.jpa.test.config.TestR4Config; import ca.uhn.fhir.jpa.util.SqlQuery; import ca.uhn.fhir.jpa.util.SqlQueryList; +import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.storage.test.DaoTestDataBuilder; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -33,6 +36,8 @@ import java.util.List; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.not; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * Sandbox for implementing queries. @@ -137,6 +142,28 @@ public class FhirResourceDaoR4QuerySandbox extends BaseJpaTest { myTestDaoSearch.assertSearchFindsInOrder("reverse sort by server assigned id", "Patient?family=smith&_sort=-_pid", id3,id2,id1); } + @Test + void testChainedSort() { + final IIdType practitionerId = myDataBuilder.createPractitioner(myDataBuilder.withFamily("Jones")); + + final String id1 = myDataBuilder.createPatient(myDataBuilder.withFamily("Smithy")).getIdPart(); + final String id2 = myDataBuilder.createPatient(myDataBuilder.withFamily("Smithwick")).getIdPart(); + final String id3 = myDataBuilder.createPatient( + myDataBuilder.withFamily("Smith"), + myDataBuilder.withReference("generalPractitioner", practitionerId)).getIdPart(); + + + final IBundleProvider iBundleProvider = myTestDaoSearch.searchForBundleProvider("Patient?_total=ACCURATE&_sort=Practitioner:general-practitioner.family"); + assertEquals(3, iBundleProvider.size()); + + final List allResources = iBundleProvider.getAllResources(); + assertEquals(3, iBundleProvider.size()); + assertEquals(3, allResources.size()); + + final List actualIds = allResources.stream().map(IBaseResource::getIdElement).map(IIdType::getIdPart).toList(); + assertTrue(actualIds.containsAll(List.of(id1, id2, id3))); + } + public static final class TestDirtiesContextTestExecutionListener extends DirtiesContextTestExecutionListener { @Override diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvcTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvcTest.java index 4f90f46bc41..53ff09b50c6 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvcTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvcTest.java @@ -1,74 +1,189 @@ package ca.uhn.fhir.jpa.partition; -import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.jpa.dao.data.IPartitionDao; import ca.uhn.fhir.jpa.entity.PartitionEntity; import ca.uhn.fhir.jpa.model.config.PartitionSettings; +import ca.uhn.fhir.jpa.test.BaseJpaR4Test; import ca.uhn.fhir.rest.api.server.SystemRequestDetails; +import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import org.hl7.fhir.r4.model.IdType; import org.hl7.fhir.r4.model.Patient; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.InjectMocks; -import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.Set; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.when; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; @ExtendWith(MockitoExtension.class) -class RequestPartitionHelperSvcTest { - static final Integer PARTITION_ID = 2401; - static final String PARTITION_NAME = "JIMMY"; - static final PartitionEntity ourPartitionEntity = new PartitionEntity().setName(PARTITION_NAME); +class RequestPartitionHelperSvcTest extends BaseJpaR4Test { - @Mock + static final int PARTITION_ID_1 = 1; + static final String PARTITION_NAME_1 = "SOME-PARTITION-1"; + + static final int PARTITION_ID_2 = 2; + static final String PARTITION_NAME_2 = "SOME-PARTITION-2"; + + static final int UNKNOWN_PARTITION_ID = 1_000_000; + static final String UNKNOWN_PARTITION_NAME = "UNKNOWN"; + + @Autowired + IPartitionDao myPartitionDao; + @Autowired PartitionSettings myPartitionSettings; - @Mock - IPartitionLookupSvc myPartitionLookupSvc; - @Mock - FhirContext myFhirContext; - @Mock - IInterceptorBroadcaster myInterceptorBroadcaster; + @Autowired + RequestPartitionHelperSvc mySvc; - @InjectMocks - RequestPartitionHelperSvc mySvc = new RequestPartitionHelperSvc(); + Patient myPatient; - @Test - public void determineReadPartitionForSystemRequest() { - // setup - SystemRequestDetails srd = new SystemRequestDetails(); - RequestPartitionId requestPartitionId = RequestPartitionId.fromPartitionId(PARTITION_ID); - srd.setRequestPartitionId(requestPartitionId); - when(myPartitionSettings.isPartitioningEnabled()).thenReturn(true); - when(myPartitionLookupSvc.getPartitionById(PARTITION_ID)).thenReturn(ourPartitionEntity); + @BeforeEach + public void before(){ + myPartitionDao.deleteAll(); + myPartitionSettings.setPartitioningEnabled(true); - // execute - RequestPartitionId result = mySvc.determineReadPartitionForRequestForRead(srd, "Patient", new IdType("Patient/123")); - - // verify - assertEquals(PARTITION_ID, result.getFirstPartitionIdOrNull()); - assertEquals(PARTITION_NAME, result.getFirstPartitionNameOrNull()); + myPatient = new Patient(); + myPatient.setId(new IdType("Patient", "123", "1")); } @Test - public void determineCreatePartitionForSystemRequest() { + public void testDetermineReadPartitionForSystemRequest_withPartitionIdOnly_returnsCorrectPartition() { // setup + PartitionEntity partitionEntity = createPartition1(); SystemRequestDetails srd = new SystemRequestDetails(); - RequestPartitionId requestPartitionId = RequestPartitionId.fromPartitionId(PARTITION_ID); - srd.setRequestPartitionId(requestPartitionId); - when(myPartitionSettings.isPartitioningEnabled()).thenReturn(true); - when(myPartitionLookupSvc.getPartitionById(PARTITION_ID)).thenReturn(ourPartitionEntity); - Patient resource = new Patient(); - when(myFhirContext.getResourceType(resource)).thenReturn("Patient"); + srd.setRequestPartitionId(RequestPartitionId.fromPartitionId(partitionEntity.getId())); // execute - RequestPartitionId result = mySvc.determineCreatePartitionForRequest(srd, resource, "Patient"); + RequestPartitionId result = mySvc.determineReadPartitionForRequestForRead(srd, myPatient.fhirType(), myPatient.getIdElement()); // verify - assertEquals(PARTITION_ID, result.getFirstPartitionIdOrNull()); - assertEquals(PARTITION_NAME, result.getFirstPartitionNameOrNull()); + assertEquals(PARTITION_ID_1, result.getFirstPartitionIdOrNull()); + assertEquals(PARTITION_NAME_1, result.getFirstPartitionNameOrNull()); } + @Test + public void testDetermineCreatePartitionForRequest_withPartitionIdOnly_returnsCorrectPartition() { + // setup + PartitionEntity partitionEntity = createPartition1(); + SystemRequestDetails srd = new SystemRequestDetails(); + srd.setRequestPartitionId(RequestPartitionId.fromPartitionId(partitionEntity.getId())); + + // execute + Patient patient = new Patient(); + RequestPartitionId result = mySvc.determineCreatePartitionForRequest(srd, patient, patient.fhirType()); + + // verify + assertEquals(PARTITION_ID_1, result.getFirstPartitionIdOrNull()); + assertEquals(PARTITION_NAME_1, result.getFirstPartitionNameOrNull()); + } + + @Test + public void testValidateAndNormalizePartitionIds_withPartitionIdOnly_populatesPartitionName(){ + PartitionEntity partitionEntity = createPartition1(); + RequestPartitionId partitionId = RequestPartitionId.fromPartitionId(partitionEntity.getId()); + RequestPartitionId result = mySvc.validateAndNormalizePartitionIds(partitionId); + + assertEquals(PARTITION_ID_1, result.getFirstPartitionIdOrNull()); + assertEquals(PARTITION_NAME_1, result.getFirstPartitionNameOrNull()); + } + + @Test + public void testValidateAndNormalizePartitionIds_withUnknownId_throwsException(){ + RequestPartitionId partitionId = RequestPartitionId.fromPartitionId(UNKNOWN_PARTITION_ID); + + try{ + mySvc.validateAndNormalizePartitionIds(partitionId); + fail(); + } catch (ResourceNotFoundException e){ + assertTrue(e.getMessage().contains("No partition exists with ID 1,000,000")); + } + } + + @Test + public void testValidateAndNormalizePartitionIds_withIdAndInvalidName_throwsException(){ + createPartition1(); + RequestPartitionId partitionId = RequestPartitionId.fromPartitionIdAndName(PARTITION_ID_1, UNKNOWN_PARTITION_NAME); + + try{ + mySvc.validateAndNormalizePartitionIds(partitionId); + fail(); + } catch (IllegalArgumentException e){ + assertTrue(e.getMessage().contains("Partition name UNKNOWN does not match ID 1")); + } + } + + @Test + public void testValidateAndNormalizePartitionIds_withMultiplePartitionIdOnly_populatesPartitionNames(){ + PartitionEntity partitionEntity1 = createPartition1(); + PartitionEntity partitionEntity2 = createPartition2(); + + RequestPartitionId partitionId = RequestPartitionId.fromPartitionIds(partitionEntity1.getId(), partitionEntity2.getId()); + RequestPartitionId result = mySvc.validateAndNormalizePartitionIds(partitionId); + + assertTrue(result.getPartitionIds().containsAll(Set.of(PARTITION_ID_1, PARTITION_ID_2))); + assertNotNull(result.getPartitionNames()); + assertTrue(result.getPartitionNames().containsAll(Set.of(PARTITION_NAME_1, PARTITION_NAME_2))); + } + + @Test + public void testValidateAndNormalizePartitionNames_withPartitionNameOnly_populatesPartitionId(){ + PartitionEntity partitionEntity = createPartition1(); + RequestPartitionId partitionId = RequestPartitionId.fromPartitionName(partitionEntity.getName()); + RequestPartitionId result = mySvc.validateAndNormalizePartitionNames(partitionId); + + assertEquals(PARTITION_ID_1, result.getFirstPartitionIdOrNull()); + assertEquals(PARTITION_NAME_1, result.getFirstPartitionNameOrNull()); + } + + @Test + public void testValidateAndNormalizePartitionNames_withMultiplePartitionNamesOnly_populatesPartitionIds(){ + PartitionEntity partitionEntity1 = createPartition1(); + PartitionEntity partitionEntity2 = createPartition2(); + + RequestPartitionId partitionId = RequestPartitionId.fromPartitionNames(partitionEntity1.getName(), partitionEntity2.getName()); + RequestPartitionId result = mySvc.validateAndNormalizePartitionNames(partitionId); + + assertTrue(result.getPartitionIds().containsAll(Set.of(PARTITION_ID_1, PARTITION_ID_2))); + assertNotNull(result.getPartitionNames()); + assertTrue(result.getPartitionNames().containsAll(Set.of(PARTITION_NAME_1, PARTITION_NAME_2))); + } + + @Test + public void testValidateAndNormalizePartitionNames_withUnknownName_throwsException(){ + RequestPartitionId partitionId = RequestPartitionId.fromPartitionName(UNKNOWN_PARTITION_NAME); + + try{ + mySvc.validateAndNormalizePartitionNames(partitionId); + fail(); + } catch (ResourceNotFoundException e){ + assertTrue(e.getMessage().contains("Partition name \"UNKNOWN\" is not valid")); + } + } + + @Test + public void testValidateAndNormalizePartitionNames_withNameAndInvalidId_throwsException(){ + createPartition1(); + RequestPartitionId partitionId = RequestPartitionId.fromPartitionIdAndName(UNKNOWN_PARTITION_ID, PARTITION_NAME_1); + + try{ + mySvc.validateAndNormalizePartitionNames(partitionId); + fail(); + } catch (IllegalArgumentException e){ + assertTrue(e.getMessage().contains("Partition ID 1000000 does not match name SOME-PARTITION-1")); + } + } + + private PartitionEntity createPartition1() { + return myPartitionDao.save(new PartitionEntity().setId(PARTITION_ID_1).setName(PARTITION_NAME_1)); + } + + private PartitionEntity createPartition2() { + return myPartitionDao.save(new PartitionEntity().setId(PARTITION_ID_2).setName(PARTITION_NAME_2)); + } } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java index dcce168fa09..c237dddcd6a 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java @@ -884,6 +884,16 @@ public class GiantTransactionPerfTest { return true; } + @Override + public RequestPartitionId validateAndNormalizePartitionIds(RequestPartitionId theRequestPartitionId) { + return RequestPartitionId.defaultPartition(); + } + + @Override + public RequestPartitionId validateAndNormalizePartitionNames(RequestPartitionId theRequestPartitionId) { + return RequestPartitionId.defaultPartition(); + } + } diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/UpliftedRefchainsAndChainedSortingR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/UpliftedRefchainsAndChainedSortingR5Test.java index 5cf0e2a4127..50e2c2787c0 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/UpliftedRefchainsAndChainedSortingR5Test.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/UpliftedRefchainsAndChainedSortingR5Test.java @@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.dao.r5; import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; +import ca.uhn.fhir.jpa.dao.TestDaoSearch; import ca.uhn.fhir.jpa.model.entity.StorageSettings; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.test.config.TestHSearchAddInConfig; @@ -12,6 +13,8 @@ import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.util.BundleBuilder; import ca.uhn.fhir.util.HapiExtensions; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r5.model.Composition; import org.hl7.fhir.r5.model.IdType; import org.hl7.fhir.r5.model.Bundle; @@ -29,6 +32,7 @@ import org.hl7.fhir.r5.model.SearchParameter; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import jakarta.annotation.Nonnull; @@ -41,9 +45,10 @@ import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; -@ContextConfiguration(classes = TestHSearchAddInConfig.NoFT.class) +@ContextConfiguration(classes = { TestHSearchAddInConfig.NoFT.class, TestDaoSearch.Config.class }) @SuppressWarnings({"Duplicates"}) public class UpliftedRefchainsAndChainedSortingR5Test extends BaseJpaR5Test { public static final String PRACTITIONER_PR1 = "Practitioner/PR1"; @@ -54,6 +59,8 @@ public class UpliftedRefchainsAndChainedSortingR5Test extends BaseJpaR5Test { public static final String ENCOUNTER_E2 = "Encounter/E2"; public static final String ENCOUNTER_E3 = "Encounter/E3"; public static final String ORGANIZATION_O1 = "Organization/O1"; + @Autowired + protected TestDaoSearch myTestDaoSearch; @Override @BeforeEach @@ -867,6 +874,47 @@ public class UpliftedRefchainsAndChainedSortingR5Test extends BaseJpaR5Test { assertEquals(1, countMatches(querySql, "HFJ_RES_LINK"), querySql); } + + @Test + void testChainedSortWithNulls() { + final IIdType practitionerId1 = createPractitioner(withFamily("Chan")); + final IIdType practitionerId2 = createPractitioner(withFamily("Jones")); + + final String id1 = createPatient(withFamily("Smithy")).getIdPart(); + final String id2 = createPatient(withFamily("Smithwick"), + withReference("generalPractitioner", practitionerId2)).getIdPart(); + final String id3 = createPatient( + withFamily("Smith"), + withReference("generalPractitioner", practitionerId1)).getIdPart(); + + + final IBundleProvider iBundleProvider = myTestDaoSearch.searchForBundleProvider("Patient?_total=ACCURATE&_sort=Practitioner:general-practitioner.family"); + final List allResources = iBundleProvider.getAllResources(); + assertEquals(3, iBundleProvider.size()); + assertEquals(3, allResources.size()); + + final List actualIds = allResources.stream().map(IBaseResource::getIdElement).map(IIdType::getIdPart).toList(); + assertTrue(actualIds.containsAll(List.of(id1, id2, id3))); + } + + @Test + void testChainedReverseStringSort() { + final IIdType practitionerId = createPractitioner(withFamily("Jones")); + + final String id1 = createPatient(withFamily("Smithy")).getIdPart(); + final String id2 = createPatient(withFamily("Smithwick")).getIdPart(); + final String id3 = createPatient( + withFamily("Smith"), + withReference("generalPractitioner", practitionerId)).getIdPart(); + + final IBundleProvider iBundleProvider = myTestDaoSearch.searchForBundleProvider("Patient?_total=ACCURATE&_sort=-Practitioner:general-practitioner.family"); + assertEquals(3, iBundleProvider.size()); + + final List allResources = iBundleProvider.getAllResources(); + final List actualIds = allResources.stream().map(IBaseResource::getIdElement).map(IIdType::getIdPart).toList(); + assertTrue(actualIds.containsAll(List.of(id3, id2, id1))); + } + /** * Observation:focus is a Reference(Any) so it can't be used in a sort chain because * this would be horribly, horribly inefficient. diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/database/BaseDatabaseVerificationIT.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/database/BaseDatabaseVerificationIT.java index e010f25669a..beeef22ff91 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/database/BaseDatabaseVerificationIT.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/database/BaseDatabaseVerificationIT.java @@ -1,38 +1,24 @@ package ca.uhn.fhir.jpa.dao.r5.database; -import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider; -import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeProvider; -import ca.uhn.fhir.batch2.jobs.reindex.ReindexProvider; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; -import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoPatient; -import ca.uhn.fhir.jpa.api.dao.PatientEverythingParameters; +import ca.uhn.fhir.jpa.dao.TestDaoSearch; import ca.uhn.fhir.jpa.embedded.JpaEmbeddedDatabase; -import ca.uhn.fhir.jpa.fql.provider.HfqlRestProvider; -import ca.uhn.fhir.jpa.graphql.GraphQLProvider; import ca.uhn.fhir.jpa.migrate.HapiMigrationStorageSvc; import ca.uhn.fhir.jpa.migrate.MigrationTaskList; import ca.uhn.fhir.jpa.migrate.SchemaMigrator; import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao; import ca.uhn.fhir.jpa.migrate.tasks.HapiFhirJpaMigrationTasks; -import ca.uhn.fhir.jpa.provider.DiffProvider; -import ca.uhn.fhir.jpa.provider.JpaCapabilityStatementProvider; -import ca.uhn.fhir.jpa.provider.ProcessMessageProvider; -import ca.uhn.fhir.jpa.provider.SubscriptionTriggeringProvider; -import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider; -import ca.uhn.fhir.jpa.provider.ValueSetOperationProvider; import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; +import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.test.BaseJpaTest; import ca.uhn.fhir.jpa.test.config.TestR5Config; -import ca.uhn.fhir.narrative.DefaultThymeleafNarrativeGenerator; import ca.uhn.fhir.rest.api.EncodingEnum; -import ca.uhn.fhir.rest.api.server.IBundleProvider; +import ca.uhn.fhir.rest.api.SortSpec; import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import ca.uhn.fhir.rest.client.api.IGenericClient; -import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor; import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException; -import ca.uhn.fhir.rest.server.interceptor.CorsInterceptor; import ca.uhn.fhir.rest.server.provider.ResourceProviderFactory; import ca.uhn.fhir.test.utilities.ITestDataBuilder; import ca.uhn.fhir.test.utilities.server.RestfulServerConfigurerExtension; @@ -44,7 +30,6 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r5.model.Bundle; import org.hl7.fhir.r5.model.IdType; -import org.hl7.fhir.r5.model.IntegerType; import org.hl7.fhir.r5.model.Parameters; import org.hl7.fhir.r5.model.Patient; import org.junit.jupiter.api.Test; @@ -55,7 +40,6 @@ import org.junit.jupiter.params.provider.ValueSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.data.envers.repository.support.EnversRevisionRepositoryFactoryBean; @@ -63,10 +47,8 @@ import org.springframework.data.jpa.repository.config.EnableJpaRepositories; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit.jupiter.SpringExtension; import org.springframework.transaction.PlatformTransactionManager; -import org.springframework.web.cors.CorsConfiguration; import javax.sql.DataSource; -import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Properties; @@ -80,7 +62,7 @@ import static org.junit.jupiter.api.Assertions.assertThrows; @ExtendWith(SpringExtension.class) @EnableJpaRepositories(repositoryFactoryBeanClass = EnversRevisionRepositoryFactoryBean.class) -@ContextConfiguration(classes = {BaseDatabaseVerificationIT.TestConfig.class}) +@ContextConfiguration(classes = {BaseDatabaseVerificationIT.TestConfig.class, TestDaoSearch.Config.class}) public abstract class BaseDatabaseVerificationIT extends BaseJpaTest implements ITestDataBuilder { private static final Logger ourLog = LoggerFactory.getLogger(BaseDatabaseVerificationIT.class); private static final String MIGRATION_TABLENAME = "MIGRATIONS"; @@ -109,6 +91,9 @@ public abstract class BaseDatabaseVerificationIT extends BaseJpaTest implements @Autowired private DatabaseBackedPagingProvider myPagingProvider; + @Autowired + TestDaoSearch myTestDaoSearch; + @RegisterExtension protected RestfulServerExtension myServer = new RestfulServerExtension(FhirContext.forR5Cached()); @@ -175,6 +160,20 @@ public abstract class BaseDatabaseVerificationIT extends BaseJpaTest implements assertThat(values.toString(), values, containsInAnyOrder(expectedIds.toArray(new String[0]))); } + @Test + void testChainedSort() { + // given + + // when + SearchParameterMap map = SearchParameterMap + .newSynchronous() + .setSort(new SortSpec("Practitioner:general-practitioner.family")); + myCaptureQueriesListener.clear(); + myPatientDao.search(map, mySrd); + + } + + @Configuration @@ -222,8 +221,8 @@ public abstract class BaseDatabaseVerificationIT extends BaseJpaTest implements } public static class JpaDatabaseContextConfigParamObject { - private JpaEmbeddedDatabase myJpaEmbeddedDatabase; - private String myDialect; + final JpaEmbeddedDatabase myJpaEmbeddedDatabase; + final String myDialect; public JpaDatabaseContextConfigParamObject(JpaEmbeddedDatabase theJpaEmbeddedDatabase, String theDialect) { myJpaEmbeddedDatabase = theJpaEmbeddedDatabase; diff --git a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/term/TermConceptDaoSvcTest.java b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/term/TermConceptDaoSvcTest.java new file mode 100644 index 00000000000..ea05bc1ea9f --- /dev/null +++ b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/term/TermConceptDaoSvcTest.java @@ -0,0 +1,57 @@ +package ca.uhn.fhir.jpa.term; + +import ca.uhn.fhir.jpa.dao.data.ITermConceptDao; +import ca.uhn.fhir.jpa.entity.TermConcept; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; +import org.mockito.ArgumentCaptor; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@ExtendWith(MockitoExtension.class) +public class TermConceptDaoSvcTest { + + @Mock + private ITermConceptDao myConceptDao; + + @InjectMocks + private TermConceptDaoSvc myTermConceptDaoSvc; + + @ParameterizedTest + @ValueSource(booleans = {false, true}) + public void testSaveConcept_withSupportLegacyLob(boolean theSupportLegacyLob){ + final String parentPids = "1 2 3 4 5 6 7 8 9"; + when(myConceptDao.save(any())).thenAnswer(t ->{ + TermConcept codeSystem = (TermConcept) t.getArguments()[0]; + codeSystem.prePersist(); + + return codeSystem; + }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(TermConcept.class); + + // given + TermConcept termConcept = new TermConcept().setParentPids(parentPids); + + // when + myTermConceptDaoSvc.setSupportLegacyLob(theSupportLegacyLob); + myTermConceptDaoSvc.saveConcept(termConcept); + + // then + verify(myConceptDao, times(1)).save(captor.capture()); + TermConcept capturedTermConcept = captor.getValue(); + + assertThat(capturedTermConcept.hasParentPidsLobForTesting(), equalTo(theSupportLegacyLob)); + assertThat(capturedTermConcept.getParentPidsAsString(), equalTo(parentPids)); + } + +} diff --git a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulatorTest.java b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulatorTest.java index 972a533ef2e..dbb455d6e87 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulatorTest.java +++ b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulatorTest.java @@ -9,11 +9,16 @@ import ca.uhn.fhir.jpa.entity.TermValueSetConceptDesignation; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; +import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; import java.util.Optional; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.times; @@ -74,4 +79,22 @@ public class ValueSetConceptAccumulatorTest { } + @ParameterizedTest + @ValueSource(booleans = {false, true}) + public void testPersistValueSetConcept_whenSupportLegacyLob(boolean theSupportLegacyLob){ + final String sourceConceptDirectParentPids = "1 2 3 4 5 6 7"; + ArgumentCaptor captor = ArgumentCaptor.forClass(TermValueSetConcept.class); + + myAccumulator.setSupportLegacyLob(theSupportLegacyLob); + myAccumulator.includeConcept("sys", "code", "display", null, sourceConceptDirectParentPids, null); + + verify(myValueSetConceptDao, times(1)).save(captor.capture()); + + TermValueSetConcept capturedTermValueSetConcept = captor.getValue(); + + assertThat(capturedTermValueSetConcept.hasSourceConceptDirectParentPidsLob(), equalTo(theSupportLegacyLob)); + assertThat(capturedTermValueSetConcept.getSourceConceptDirectParentPids(), equalTo(sourceConceptDirectParentPids)); + + } + } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmSurvivorshipSvcImpl.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmSurvivorshipSvcImpl.java index 27d89757a9c..403db4605b6 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmSurvivorshipSvcImpl.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmSurvivorshipSvcImpl.java @@ -31,17 +31,22 @@ import ca.uhn.fhir.mdm.api.params.MdmQuerySearchParameters; import ca.uhn.fhir.mdm.model.MdmTransactionContext; import ca.uhn.fhir.mdm.model.mdmevents.MdmLinkJson; import ca.uhn.fhir.mdm.util.GoldenResourceHelper; +import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; import ca.uhn.fhir.util.TerserUtil; +import org.apache.commons.lang3.StringUtils; import org.hl7.fhir.instance.model.api.IAnyResource; import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseResource; import org.springframework.data.domain.Page; +import java.util.regex.Pattern; import java.util.stream.Stream; public class MdmSurvivorshipSvcImpl implements IMdmSurvivorshipService { + private static final Pattern IS_UUID = + Pattern.compile("[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}"); protected final FhirContext myFhirContext; @@ -133,16 +138,30 @@ public class MdmSurvivorshipSvcImpl implements IMdmSurvivorshipService { String sourceId = link.getSourceId(); // +1 because of "/" in id: "ResourceType/Id" - IResourcePersistentId pid = getResourcePID(sourceId.substring(resourceType.length() + 1), resourceType); + final String sourceIdUnqualified = sourceId.substring(resourceType.length() + 1); - // this might be a bit unperformant - // but it depends how many links there are - // per golden resource (unlikely to be thousands) - return dao.readByPid(pid); + // myMdmLinkQuerySvc.queryLinks populates sourceId with the FHIR_ID, not the RES_ID, so if we don't + // add this conditional logic, on JPA, myIIdHelperService.newPidFromStringIdAndResourceName will fail with + // NumberFormatException + if (isNumericOrUuid(sourceIdUnqualified)) { + IResourcePersistentId pid = getResourcePID(sourceIdUnqualified, resourceType); + + // this might be a bit unperformant + // but it depends how many links there are + // per golden resource (unlikely to be thousands) + return dao.readByPid(pid); + } else { + return dao.read(new IdDt(sourceId), new SystemRequestDetails()); + } }); } private IResourcePersistentId getResourcePID(String theId, String theResourceType) { return myIIdHelperService.newPidFromStringIdAndResourceName(theId, theResourceType); } + + private boolean isNumericOrUuid(String theLongCandidate) { + return StringUtils.isNumeric(theLongCandidate) + || IS_UUID.matcher(theLongCandidate).matches(); + } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderOperationNamed.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderOperationNamed.java index 04b897e8a08..ffb08829bfe 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderOperationNamed.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderOperationNamed.java @@ -22,6 +22,8 @@ package ca.uhn.fhir.rest.server.interceptor.auth; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; +import java.util.Collection; + public interface IAuthRuleBuilderOperationNamed { /** @@ -44,6 +46,8 @@ public interface IAuthRuleBuilderOperationNamed { */ IAuthRuleBuilderOperationNamedAndScoped onInstance(IIdType theInstanceId); + IAuthRuleBuilderOperationNamedAndScoped onInstances(Collection theInstanceIds); + /** * Rule applies to invocations of this operation at the instance level on any instance of the given type */ diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleBulkExport.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleBulkExport.java index 8eb40512001..f45ab580540 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleBulkExport.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleBulkExport.java @@ -22,6 +22,9 @@ package ca.uhn.fhir.rest.server.interceptor.auth; import jakarta.annotation.Nonnull; import org.hl7.fhir.instance.model.api.IIdType; +import java.util.Collection; +import java.util.stream.Collectors; + /** * @since 5.5.0 */ @@ -54,10 +57,20 @@ public interface IAuthRuleBuilderRuleBulkExport { IAuthRuleBuilderRuleBulkExportWithTarget patientExportOnPatient(@Nonnull String theFocusResourceId); + IAuthRuleBuilderRuleBulkExportWithTarget patientExportOnAllPatients(); + default IAuthRuleBuilderRuleBulkExportWithTarget patientExportOnPatient(@Nonnull IIdType theFocusResourceId) { return patientExportOnPatient(theFocusResourceId.getValue()); } + default IAuthRuleBuilderRuleBulkExportWithTarget patientExportOnPatients( + @Nonnull Collection theFocusResourceIds) { + return patientExportOnPatientStrings( + theFocusResourceIds.stream().map(IIdType::getValue).collect(Collectors.toList())); + } + + IAuthRuleBuilderRuleBulkExportWithTarget patientExportOnPatientStrings(Collection theFocusResourceIds); + /** * Allow/deny patient-level export rule applies to the Group with the given resource ID, e.g. Group/123 * diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBuilder.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBuilder.java index 9316cd89d35..a98e2f1e32a 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBuilder.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBuilder.java @@ -736,6 +736,20 @@ public class RuleBuilder implements IAuthRuleBuilder { return new RuleBuilderOperationNamedAndScoped(rule); } + @Override + public IAuthRuleBuilderOperationNamedAndScoped onInstances(Collection theInstanceIds) { + Validate.notNull(theInstanceIds, "theInstanceIds must not be null"); + theInstanceIds.forEach(instanceId -> Validate.notBlank( + instanceId.getResourceType(), + "at least one of theInstanceIds does not have a resource type")); + theInstanceIds.forEach(instanceId -> Validate.notBlank( + instanceId.getIdPart(), "at least one of theInstanceIds does not have an ID part")); + + final OperationRule rule = createRule(); + rule.appliesToInstances(new ArrayList<>(theInstanceIds)); + return new RuleBuilderOperationNamedAndScoped(rule); + } + @Override public IAuthRuleBuilderOperationNamedAndScoped onInstancesOfType( Class theType) { @@ -869,7 +883,7 @@ public class RuleBuilder implements IAuthRuleBuilder { } private class RuleBuilderBulkExport implements IAuthRuleBuilderRuleBulkExport { - private RuleBulkExportImpl ruleBulkExport; + private RuleBulkExportImpl myRuleBulkExport; @Override public IAuthRuleBuilderRuleBulkExportWithTarget groupExportOnGroup(@Nonnull String theFocusResourceId) { @@ -881,23 +895,60 @@ public class RuleBuilder implements IAuthRuleBuilder { return new RuleBuilderBulkExportWithTarget(rule); } + @Override + public IAuthRuleBuilderRuleBulkExportWithTarget patientExportOnAllPatients() { + if (myRuleBulkExport == null) { + RuleBulkExportImpl rule = new RuleBulkExportImpl(myRuleName); + rule.setMode(myRuleMode); + myRuleBulkExport = rule; + } + myRuleBulkExport.setAppliesToPatientExportAllPatients(); + + // prevent duplicate rules being added + if (!myRules.contains(myRuleBulkExport)) { + myRules.add(myRuleBulkExport); + } + + return new RuleBuilderBulkExportWithTarget(myRuleBulkExport); + } + @Override public IAuthRuleBuilderRuleBulkExportWithTarget patientExportOnPatient(@Nonnull String theFocusResourceId) { - if (ruleBulkExport == null) { + if (myRuleBulkExport == null) { RuleBulkExportImpl rule = new RuleBulkExportImpl(myRuleName); rule.setAppliesToPatientExport(theFocusResourceId); rule.setMode(myRuleMode); - ruleBulkExport = rule; + myRuleBulkExport = rule; } else { - ruleBulkExport.setAppliesToPatientExport(theFocusResourceId); + myRuleBulkExport.setAppliesToPatientExport(theFocusResourceId); } // prevent duplicate rules being added - if (!myRules.contains(ruleBulkExport)) { - myRules.add(ruleBulkExport); + if (!myRules.contains(myRuleBulkExport)) { + myRules.add(myRuleBulkExport); } - return new RuleBuilderBulkExportWithTarget(ruleBulkExport); + return new RuleBuilderBulkExportWithTarget(myRuleBulkExport); + } + + @Override + public IAuthRuleBuilderRuleBulkExportWithTarget patientExportOnPatientStrings( + @Nonnull Collection theFocusResourceIds) { + if (myRuleBulkExport == null) { + RuleBulkExportImpl rule = new RuleBulkExportImpl(myRuleName); + rule.setAppliesToPatientExport(theFocusResourceIds); + rule.setMode(myRuleMode); + myRuleBulkExport = rule; + } else { + myRuleBulkExport.setAppliesToPatientExport(theFocusResourceIds); + } + + // prevent duplicate rules being added + if (!myRules.contains(myRuleBulkExport)) { + myRules.add(myRuleBulkExport); + } + + return new RuleBuilderBulkExportWithTarget(myRuleBulkExport); } @Override diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBulkExportImpl.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBulkExportImpl.java index 1686c157032..eadb0ee4c99 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBulkExportImpl.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBulkExportImpl.java @@ -24,12 +24,12 @@ import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters; +import com.google.common.annotations.VisibleForTesting; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import java.util.ArrayList; import java.util.Collection; -import java.util.List; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; @@ -42,6 +42,7 @@ public class RuleBulkExportImpl extends BaseRule { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(RuleBulkExportImpl.class); private String myGroupId; private final Collection myPatientIds; + private boolean myAppliesToAllPatients; private BulkExportJobParameters.ExportStyle myWantExportStyle; private Collection myResourceTypes; private boolean myWantAnyStyle; @@ -69,115 +70,86 @@ public class RuleBulkExportImpl extends BaseRule { return null; } - BulkExportJobParameters options = (BulkExportJobParameters) + BulkExportJobParameters inboundBulkExportRequestOptions = (BulkExportJobParameters) theRequestDetails.getAttribute(AuthorizationInterceptor.REQUEST_ATTRIBUTE_BULK_DATA_EXPORT_OPTIONS); - - if (!myWantAnyStyle && options.getExportStyle() != myWantExportStyle) { + // if style doesn't match - abstain + if (!myWantAnyStyle && inboundBulkExportRequestOptions.getExportStyle() != myWantExportStyle) { return null; } + // Do we only authorize some types? If so, make sure requested types are a subset if (isNotEmpty(myResourceTypes)) { - if (isEmpty(options.getResourceTypes())) { - return null; + if (isEmpty(inboundBulkExportRequestOptions.getResourceTypes())) { + return new AuthorizationInterceptor.Verdict(PolicyEnum.DENY, this); } - for (String next : options.getResourceTypes()) { - if (!myResourceTypes.contains(next)) { + if (!myResourceTypes.containsAll(inboundBulkExportRequestOptions.getResourceTypes())) { + return new AuthorizationInterceptor.Verdict(PolicyEnum.DENY, this); + } + } + + // system only supports filtering by resource type. So if we are system, or any(), then allow, since we have + // done resource type checking + // above + AuthorizationInterceptor.Verdict allowVerdict = newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); + + if (myWantAnyStyle || myWantExportStyle == BulkExportJobParameters.ExportStyle.SYSTEM) { + return allowVerdict; + } + + // assume myGroupId not empty->myStyle is group. If target group matches, then allow. + if (isNotBlank(myGroupId) && inboundBulkExportRequestOptions.getGroupId() != null) { + String expectedGroupId = + new IdDt(myGroupId).toUnqualifiedVersionless().getValue(); + String actualGroupId = new IdDt(inboundBulkExportRequestOptions.getGroupId()) + .toUnqualifiedVersionless() + .getValue(); + if (Objects.equals(expectedGroupId, actualGroupId)) { + return allowVerdict; + } + } + // patient export mode - instance or type. type can have 0..n patient ids. + // myPatientIds == the rules built by the auth interceptor rule builder + // options.getPatientIds() == the requested IDs in the export job. + + // 1. If each of the requested resource IDs in the parameters are present in the users permissions, Approve + // 2. If any requested ID is not present in the users permissions, Deny. + if (myWantExportStyle == BulkExportJobParameters.ExportStyle.PATIENT) + // Unfiltered Type Level + if (myAppliesToAllPatients) { + return allowVerdict; + } + + // Instance level, or filtered type level + if (isNotEmpty(myPatientIds)) { + // If bulk export options defines no patient IDs, return null. + if (inboundBulkExportRequestOptions.getPatientIds().isEmpty()) { + return null; + } else { + ourLog.debug("options.getPatientIds() != null"); + Set requestedPatientIds = sanitizeIds(inboundBulkExportRequestOptions.getPatientIds()); + Set permittedPatientIds = sanitizeIds(myPatientIds); + if (permittedPatientIds.containsAll(requestedPatientIds)) { + return allowVerdict; + } else { return new AuthorizationInterceptor.Verdict(PolicyEnum.DENY, this); } } } - - if (myWantAnyStyle || myWantExportStyle == BulkExportJobParameters.ExportStyle.SYSTEM) { - return newVerdict( - theOperation, - theRequestDetails, - theInputResource, - theInputResourceId, - theOutputResource, - theRuleApplier); - } - - if (isNotBlank(myGroupId) && options.getGroupId() != null) { - String expectedGroupId = - new IdDt(myGroupId).toUnqualifiedVersionless().getValue(); - String actualGroupId = - new IdDt(options.getGroupId()).toUnqualifiedVersionless().getValue(); - if (Objects.equals(expectedGroupId, actualGroupId)) { - return newVerdict( - theOperation, - theRequestDetails, - theInputResource, - theInputResourceId, - theOutputResource, - theRuleApplier); - } - } - - // 1. If each of the requested resource IDs in the parameters are present in the users permissions, Approve - // 2. If any requested ID is not present in the users permissions, Deny. - if (myWantExportStyle == BulkExportJobParameters.ExportStyle.PATIENT && isNotEmpty(myPatientIds)) { - List permittedPatientIds = myPatientIds.stream() - .map(id -> new IdDt(id).toUnqualifiedVersionless().getValue()) - .collect(Collectors.toList()); - if (!options.getPatientIds().isEmpty()) { - ourLog.debug("options.getPatientIds() != null"); - List requestedPatientIds = options.getPatientIds().stream() - .map(t -> new IdDt(t).toUnqualifiedVersionless().getValue()) - .collect(Collectors.toList()); - boolean requestedPatientsPermitted = true; - for (String requestedPatientId : requestedPatientIds) { - if (!permittedPatientIds.contains(requestedPatientId)) { - requestedPatientsPermitted = false; - break; - } - } - if (requestedPatientsPermitted) { - return newVerdict( - theOperation, - theRequestDetails, - theInputResource, - theInputResourceId, - theOutputResource, - theRuleApplier); - } - - return new AuthorizationInterceptor.Verdict(PolicyEnum.DENY, this); - } - - final List filters = options.getFilters(); - - if (!filters.isEmpty()) { - ourLog.debug("filters not empty"); - final Set patientIdsInFilters = filters.stream() - .filter(filter -> filter.startsWith("Patient?_id=")) - .map(filter -> filter.replace("?_id=", "/")) - .collect(Collectors.toUnmodifiableSet()); - - boolean filteredPatientIdsPermitted = true; - for (String patientIdInFilters : patientIdsInFilters) { - if (!permittedPatientIds.contains(patientIdInFilters)) { - filteredPatientIdsPermitted = false; - break; - } - } - - if (filteredPatientIdsPermitted) { - return newVerdict( - theOperation, - theRequestDetails, - theInputResource, - theInputResourceId, - theOutputResource, - theRuleApplier); - } - - return new AuthorizationInterceptor.Verdict(PolicyEnum.DENY, this); - } - ourLog.debug("patientIds and filters both empty"); - } return null; } + private Set sanitizeIds(Collection myPatientIds) { + return myPatientIds.stream() + .map(id -> new IdDt(id).toUnqualifiedVersionless().getValue()) + .collect(Collectors.toSet()); + } + public void setAppliesToGroupExportOnGroup(String theGroupId) { myWantExportStyle = BulkExportJobParameters.ExportStyle.GROUP; myGroupId = theGroupId; @@ -193,6 +165,16 @@ public class RuleBulkExportImpl extends BaseRule { myPatientIds.add(thePatientId); } + public void setAppliesToPatientExport(Collection thePatientIds) { + myWantExportStyle = BulkExportJobParameters.ExportStyle.PATIENT; + myPatientIds.addAll(thePatientIds); + } + + public void setAppliesToPatientExportAllPatients() { + myWantExportStyle = BulkExportJobParameters.ExportStyle.PATIENT; + myAppliesToAllPatients = true; + } + public void setAppliesToSystem() { myWantExportStyle = BulkExportJobParameters.ExportStyle.SYSTEM; } @@ -212,4 +194,14 @@ public class RuleBulkExportImpl extends BaseRule { BulkExportJobParameters.ExportStyle getWantExportStyle() { return myWantExportStyle; } + + @VisibleForTesting + Collection getPatientIds() { + return myPatientIds; + } + + @VisibleForTesting + Collection getResourceTypes() { + return myResourceTypes; + } } diff --git a/hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBuilderTest.java b/hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBuilderTest.java index b942fcc8023..7dd962d6630 100644 --- a/hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBuilderTest.java +++ b/hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBuilderTest.java @@ -1,16 +1,23 @@ package ca.uhn.fhir.rest.server.interceptor.auth; import ca.uhn.fhir.model.primitive.IdDt; +import ca.uhn.fhir.rest.server.provider.ProviderConstants; import com.google.common.collect.Lists; +import org.hl7.fhir.instance.model.api.IIdType; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; import java.util.ArrayList; +import java.util.Collection; import java.util.List; +import java.util.stream.Stream; import static org.hamcrest.Matchers.contains; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; import static org.mockito.Mockito.mock; public class RuleBuilderTest { @@ -98,7 +105,72 @@ public class RuleBuilderTest { builder.allow().bulkExport().patientExportOnPatient("Patient/pat2").withResourceTypes(resourceTypes); List rules = builder.build(); assertEquals(rules.size(),1); - assertTrue(rules.get(0) instanceof RuleBulkExportImpl); + assertInstanceOf(RuleBulkExportImpl.class, rules.get(0)); + } + + public static Stream multipleInstancesParams() { + return Stream.of( + Arguments.of(List.of("Patient/pat1"), List.of("Patient"), PolicyEnum.ALLOW), + Arguments.of(List.of("Patient/pat1", "Patient/pat2"), List.of("Patient"), PolicyEnum.ALLOW), + Arguments.of(List.of("Patient/pat1", "Patient/pat2"), List.of("Patient", "Observation"), PolicyEnum.ALLOW), + Arguments.of(List.of("Patient/pat1"), List.of("Patient"), PolicyEnum.DENY), + Arguments.of(List.of("Patient/pat1", "Patient/pat2"), List.of("Patient"), PolicyEnum.DENY), + Arguments.of(List.of("Patient/pat1", "Patient/pat2"), List.of("Patient", "Observation"), PolicyEnum.DENY) + ); + } + + @ParameterizedTest + @MethodSource("multipleInstancesParams") + public void testBulkExport_PatientExportOnPatients_MultiplePatientsSingleRule(Collection theExpectedPatientIds, Collection theExpectedResourceTypes, PolicyEnum thePolicyEnum) { + final RuleBuilder builder = new RuleBuilder(); + final IAuthRuleBuilderRule rule = switch (thePolicyEnum) { + case ALLOW -> builder.allow(); + case DENY -> builder.deny(); + }; + rule.bulkExport().patientExportOnPatientStrings(theExpectedPatientIds).withResourceTypes(theExpectedResourceTypes); + final List rules = builder.build(); + assertEquals(rules.size(),1); + final IAuthRule authRule = rules.get(0); + assertInstanceOf(RuleBulkExportImpl.class, authRule); + final RuleBulkExportImpl ruleBulkExport = (RuleBulkExportImpl) authRule; + assertEquals(theExpectedPatientIds, ruleBulkExport.getPatientIds()); + assertEquals(theExpectedResourceTypes, ruleBulkExport.getResourceTypes()); + assertEquals(thePolicyEnum, ruleBulkExport.getMode()); + } + + public static Stream owners() { + return Stream.of( + Arguments.of(List.of(new IdDt("Patient/pat1")), PolicyEnum.ALLOW), + Arguments.of(List.of(new IdDt("Patient/pat1")), PolicyEnum.DENY), + Arguments.of(List.of(new IdDt("Patient/pat1"), new IdDt("Patient/pat2")), PolicyEnum.ALLOW), + Arguments.of(List.of(new IdDt("Patient/pat1"), new IdDt("Patient/pat2")), PolicyEnum.DENY) + ); + } + + @ParameterizedTest + @MethodSource("owners") + public void testBulkExport_PatientExportOnPatients_onInstances(List theExpectedOwners, PolicyEnum thePolicyEnum) { + final RuleBuilder builder = new RuleBuilder(); + final IAuthRuleBuilderRule rule = switch (thePolicyEnum) { + case ALLOW -> builder.allow(); + case DENY -> builder.deny(); + }; + + final List rules = rule + .operation() + .named(ProviderConstants.OPERATION_EXPORT) + .onInstances(theExpectedOwners) + .andAllowAllResponses() + .andThen() + .build(); + + assertEquals(rules.size(),1); + final IAuthRule authRule = rules.get(0); + assertInstanceOf(OperationRule.class, authRule); + final OperationRule operationRule = (OperationRule) authRule; + assertEquals(theExpectedOwners, operationRule.getAppliesToIds()); + assertEquals(ProviderConstants.OPERATION_EXPORT, operationRule.getOperationName()); + assertEquals(thePolicyEnum, operationRule.getMode()); } @Test diff --git a/hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBulkExportImplTest.java b/hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBulkExportImplTest.java index dc71422f128..1dc8eb5b8e8 100644 --- a/hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBulkExportImplTest.java +++ b/hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBulkExportImplTest.java @@ -4,6 +4,9 @@ import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; @@ -13,7 +16,6 @@ import java.util.HashSet; import java.util.Set; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.when; @@ -28,10 +30,11 @@ public class RuleBulkExportImplTest { @Mock private Set myFlags; + @Test public void testDenyBulkRequestWithInvalidResourcesTypes() { RuleBulkExportImpl myRule = new RuleBulkExportImpl("a"); - + myRule.setMode(PolicyEnum.ALLOW); Set myTypes = new HashSet<>(); myTypes.add("Patient"); myTypes.add("Practitioner"); @@ -42,33 +45,210 @@ public class RuleBulkExportImplTest { BulkExportJobParameters options = new BulkExportJobParameters(); options.setResourceTypes(myWantTypes); - + when(myRequestDetails.getAttribute(any())).thenReturn(options); AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); - assertEquals(PolicyEnum.DENY, verdict.getDecision()); + assertDeny(verdict); + } + + + @Test + public void test_RuleSpecifiesResourceTypes_RequestDoesNot_Abstains() { + RuleBulkExportImpl myRule = new RuleBulkExportImpl("a"); + myRule.setAppliesToPatientExportAllPatients(); + myRule.setMode(PolicyEnum.ALLOW); + Set myTypes = new HashSet<>(); + myTypes.add("Patient"); + myRule.setResourceTypes(myTypes); + + + BulkExportJobParameters options = new BulkExportJobParameters(); + options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT); + when(myRequestDetails.getAttribute(any())).thenReturn(options); + + AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); + assertDeny(verdict); } @Test - public void testBulkRequestWithValidResourcesTypes() { + public void testBulkExportSystem_ruleHasTypes_RequestWithTypes_allow() { RuleBulkExportImpl myRule = new RuleBulkExportImpl("a"); - - Set myTypes = new HashSet<>(); - myTypes.add("Patient"); - myTypes.add("Practitioner"); - myRule.setResourceTypes(myTypes); - - Set myWantTypes = new HashSet<>(); - myWantTypes.add("Patient"); - myWantTypes.add("Practitioner"); + myRule.setMode(PolicyEnum.ALLOW); + myRule.setAppliesToSystem(); + myRule.setResourceTypes(Set.of("Patient", "Practitioner")); BulkExportJobParameters options = new BulkExportJobParameters(); - options.setResourceTypes(myWantTypes); + options.setExportStyle(BulkExportJobParameters.ExportStyle.SYSTEM); + options.setResourceTypes(Set.of("Patient", "Practitioner")); when(myRequestDetails.getAttribute(any())).thenReturn(options); AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); - assertNull(verdict); + + assertAllow(verdict); + } + + + @Test + public void testBulkExportSystem_ruleHasTypes_RequestWithTooManyTypes_abstain() { + RuleBulkExportImpl myRule = new RuleBulkExportImpl("a"); + myRule.setMode(PolicyEnum.ALLOW); + myRule.setAppliesToSystem(); + myRule.setResourceTypes(Set.of("Patient", "Practitioner")); + + + BulkExportJobParameters options = new BulkExportJobParameters(); + options.setExportStyle(BulkExportJobParameters.ExportStyle.SYSTEM); + options.setResourceTypes(Set.of("Patient", "Practitioner", "Encounter")); + + when(myRequestDetails.getAttribute(any())).thenReturn(options); + + AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); + + assertDeny(verdict); + } + @Nested + class StyleChecks { + BulkExportJobParameters myOptions = new BulkExportJobParameters(); + RuleBulkExportImpl myRule = new RuleBulkExportImpl("a"); + @BeforeEach + void setUp() { + myRule.setMode(PolicyEnum.ALLOW); + when(myRequestDetails.getAttribute(any())).thenReturn(myOptions); + } + @Nested class RuleAnyStyle { + @BeforeEach + void setUp(){ + myRule.setAppliesToAny(); + } + + @Test + public void testRuleAnyStyle_Matches_SystemStyle() { + myOptions.setExportStyle(BulkExportJobParameters.ExportStyle.SYSTEM); + AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); + + assertAllow(verdict); + } + @Test + public void testRuleAnyStyle_Matches_PatientStyle() { + myOptions.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT); + AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); + + assertAllow(verdict); + + } + @Test + public void testRuleAnyStyle_Matches_GroupStyle() { + myOptions.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP); + AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); + + assertAllow(verdict); + + } + + } + + @Nested + class RuleSystemStyle { + @BeforeEach + void setUp() { + myRule.setAppliesToSystem(); + } + + @Test + public void test_Matches_SystemStyle() { + myOptions.setExportStyle(BulkExportJobParameters.ExportStyle.SYSTEM); + AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); + + assertAllow(verdict); + } + + @Test + public void test_DoesntMatch_GroupStyle() { + myOptions.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP); + AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); + + assertAbstain(verdict); + } + + @Test + public void test_DoesntMatch_PatientStyle() { + myOptions.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT); + AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); + + assertAbstain(verdict); + } + } + + @Nested + class RuleGroupStyle { + @BeforeEach + void setUp() { + myRule.setAppliesToGroupExportOnGroup("Group/123"); + } + + @Test + public void test_DoesntMatch_SystemStyle() { + myOptions.setExportStyle(BulkExportJobParameters.ExportStyle.SYSTEM); + AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); + + assertAbstain(verdict); + } + + @Test + public void test_Matches_GroupStyle() { + myOptions.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP); + myOptions.setGroupId("Group/123"); + + AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); + + assertAllow(verdict); + } + + @Test + public void test_DoesntMatch_PatientStyle() { + myOptions.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT); + AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); + + assertAbstain(verdict); + } + } + + @Nested + class RulePatientStyle { + @BeforeEach + void setUp() { + myRule.setAppliesToPatientExport("Patient/123"); + } + + @Test + public void test_DoesntMatch_SystemStyle() { + myOptions.setExportStyle(BulkExportJobParameters.ExportStyle.SYSTEM); + AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); + + assertAbstain(verdict); + } + + @Test + public void test_DoesntMatch_GroupStyle() { + myOptions.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP); + myOptions.setGroupId("Group/123"); + + AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); + + assertAbstain(verdict); + } + + @Test + public void test_DoesntMatch_PatientStyle() { + myOptions.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT); + myOptions.setPatientIds(Set.of("Patient/123")); + AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); + + assertAllow(verdict); + } + } } @Test @@ -84,7 +264,7 @@ public class RuleBulkExportImplTest { when(myRequestDetails.getAttribute(any())).thenReturn(options); AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); - assertEquals(null, verdict); + assertAbstain(verdict); } @Test @@ -100,7 +280,7 @@ public class RuleBulkExportImplTest { when(myRequestDetails.getAttribute(any())).thenReturn(options); AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); - assertEquals(PolicyEnum.ALLOW, verdict.getDecision()); + assertAllow(verdict); } @Test @@ -118,7 +298,7 @@ public class RuleBulkExportImplTest { AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); //Then: We permit the request, as a patient ID that was requested is honoured by this rule. - assertEquals(PolicyEnum.ALLOW, verdict.getDecision()); + assertAllow(verdict); } @Test @@ -135,8 +315,8 @@ public class RuleBulkExportImplTest { //When AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); - //Then: we should deny the request, as the requested export does not contain the patient permitted. - assertEquals(PolicyEnum.DENY, verdict.getDecision()); + //Then: abstain + assertDeny(verdict); } @Test @@ -153,28 +333,45 @@ public class RuleBulkExportImplTest { AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); //Then: We make no claims about type-level export on Patient. - assertEquals(null, verdict); + assertAbstain(verdict); } @Test - public void testPatientExportRulesOnTypeLevelExportWithTypeFilterResourceTypePatient() { + public void testPatientExportRulesWithId_withRequestNoIds_abstains() { //Given - final RuleBulkExportImpl myRule = new RuleBulkExportImpl("b"); + RuleBulkExportImpl myRule = new RuleBulkExportImpl("b"); myRule.setAppliesToPatientExport("Patient/123"); myRule.setMode(PolicyEnum.ALLOW); - final BulkExportJobParameters options = new BulkExportJobParameters(); + BulkExportJobParameters options = new BulkExportJobParameters(); + options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT); - options.setFilters(Set.of("Patient?_id=123")); - options.setResourceTypes(Set.of("Patient")); when(myRequestDetails.getAttribute(any())).thenReturn(options); //When - final AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); + AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); - //Then: The patient IDs match so this is permitted - assertEquals(PolicyEnum.ALLOW, verdict.getDecision()); + //Then: We make no claims about type-level export on Patient. + assertAbstain(verdict); } + @Test + public void testPatientExportRuleWithNoIds_withRequestNoIds_allows() { + //Given + RuleBulkExportImpl myRule = new RuleBulkExportImpl("b"); + myRule.setAppliesToPatientExportAllPatients(); + myRule.setMode(PolicyEnum.ALLOW); + BulkExportJobParameters options = new BulkExportJobParameters(); + + options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT); + when(myRequestDetails.getAttribute(any())).thenReturn(options); + + //When + AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); + + assertAllow(verdict); + } + + @Test public void testPatientExportRulesOnTypeLevelExportWithTypeFilterResourceTypePatientAndFilterHasResources() { //Given @@ -191,27 +388,9 @@ public class RuleBulkExportImplTest { final AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); //Then: The patient IDs match so this is permitted - assertEquals(PolicyEnum.ALLOW, verdict.getDecision()); + assertAbstain(verdict); } - @Test - public void testPatientExportRulesOnTypeLevelExportWithTypeFilterResourceTypeObservation() { - //Given - final RuleBulkExportImpl myRule = new RuleBulkExportImpl("b"); - myRule.setAppliesToPatientExport("Patient/123"); - myRule.setMode(PolicyEnum.ALLOW); - final BulkExportJobParameters options = new BulkExportJobParameters(); - options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT); - options.setFilters(Set.of("Patient?_id=123")); - options.setResourceTypes(Set.of("Observation")); - when(myRequestDetails.getAttribute(any())).thenReturn(options); - - //When - final AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); - - //Then: The patient IDs match so this is permitted - assertEquals(PolicyEnum.ALLOW, verdict.getDecision()); - } @Test public void testPatientExportRulesOnTypeLevelExportWithTypeFilterNoResourceType() { @@ -227,27 +406,8 @@ public class RuleBulkExportImplTest { //When final AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); - //Then: The patient IDs match so this is permitted - assertEquals(PolicyEnum.ALLOW, verdict.getDecision()); - } - - @Test - public void testPatientExportRulesOnTypeLevelExportWithTypeFilterMismatch() { - //Given - final RuleBulkExportImpl myRule = new RuleBulkExportImpl("b"); - myRule.setAppliesToPatientExport("Patient/123"); - myRule.setMode(PolicyEnum.ALLOW); - final BulkExportJobParameters options = new BulkExportJobParameters(); - options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT); - options.setFilters(Set.of("Patient?_id=456")); - options.setResourceTypes(Set.of("Patient")); - when(myRequestDetails.getAttribute(any())).thenReturn(options); - - //When - final AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); - - //Then: The patient IDs do NOT match so this is not permitted. - assertEquals(PolicyEnum.DENY, verdict.getDecision()); + //Then: Filters are ignored for auth purposes. The rule has an ID, indicating it is for instance level, but the job requested type level. Abstain + assertAbstain(verdict); } @Test @@ -265,12 +425,12 @@ public class RuleBulkExportImplTest { //When final AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); - //Then: We do not have permissions on the requested patient so this is not permitted. - assertEquals(PolicyEnum.DENY, verdict.getDecision()); + //Then: We do not have permissions on the requested patient so we abstain + assertDeny(verdict); } @Test - public void testPatientExportRulesOnTypeLevelExportPermittedPatient() { + public void testPatientExport_ruleAllowsId_requestsId_allow() { //Given final RuleBulkExportImpl myRule = new RuleBulkExportImpl("b"); myRule.setAppliesToPatientExport("Patient/123"); @@ -289,7 +449,7 @@ public class RuleBulkExportImplTest { } @Test - public void testPatientExportRulesOnTypeLevelExportPermittedPatients() { + public void testPatientExport_ruleAllowsIds_requestsIds_allow() { //Given final RuleBulkExportImpl myRule = new RuleBulkExportImpl("b"); myRule.setAppliesToPatientExport("Patient/123"); @@ -309,7 +469,7 @@ public class RuleBulkExportImplTest { } @Test - public void testPatientExportRulesOnTypeLevelExportWithPermittedAndUnpermittedPatients() { + public void testPatientExport_ruleAllowsId_requestsTooManyIds_abstain() { //Given final RuleBulkExportImpl myRule = new RuleBulkExportImpl("b"); myRule.setAppliesToPatientExport("Patient/123"); @@ -324,8 +484,61 @@ public class RuleBulkExportImplTest { final AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); //Then: There are unpermitted patients in the request so this is not permitted. - assertEquals(PolicyEnum.DENY, verdict.getDecision()); + assertDeny(verdict); + } // + + @Test + public void testPatientExport_RuleAllowsAll_RequestId_allows() { + final RuleBulkExportImpl myRule = new RuleBulkExportImpl("b"); + myRule.setAppliesToPatientExportAllPatients(); + myRule.setMode(PolicyEnum.ALLOW); + + final BulkExportJobParameters options = new BulkExportJobParameters(); + options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT); + options.setPatientIds(Set.of("Patient/123")); + when(myRequestDetails.getAttribute(any())).thenReturn(options); + + //When + final AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); + + //Then + assertAllow(verdict); } + + @Test + public void testPatientExport_RuleAllowsAll_RequestAll_allows() { + final RuleBulkExportImpl myRule = new RuleBulkExportImpl("b"); + myRule.setAppliesToPatientExportAllPatients(); + myRule.setMode(PolicyEnum.ALLOW); + + final BulkExportJobParameters options = new BulkExportJobParameters(); + options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT); + when(myRequestDetails.getAttribute(any())).thenReturn(options); + + //When + final AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); + + //Then + assertAllow(verdict); + } + + @Test + public void testPatientExport_RuleAllowsExplicitPatient_RequestAll_abstain() { + final RuleBulkExportImpl myRule = new RuleBulkExportImpl("b"); + myRule.setAppliesToPatientExport("Patient/123"); + myRule.setMode(PolicyEnum.ALLOW); + + final BulkExportJobParameters options = new BulkExportJobParameters(); + options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT); + when(myRequestDetails.getAttribute(any())).thenReturn(options); + + //When + final AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); + + //Then + assertAbstain(verdict); + } + @Test public void testPatientExportRulesOnTypeLevelExportWithPermittedAndUnpermittedPatientFilters() { //Given @@ -341,7 +554,22 @@ public class RuleBulkExportImplTest { //When final AuthorizationInterceptor.Verdict verdict = myRule.applyRule(myOperation, myRequestDetails, null, null, null, myRuleApplier, myFlags, myPointcut); - //Then: There are unpermitted patients in the request so this is not permitted. - assertEquals(PolicyEnum.DENY, verdict.getDecision()); + //Then: There are unpermitted patients in the request so this is not permitted. abstain. + assertAbstain(verdict); + + } + + private static void assertAbstain(AuthorizationInterceptor.Verdict verdict) { + Assertions.assertEquals(null, verdict, "Expect abstain"); + } + + private static void assertAllow(AuthorizationInterceptor.Verdict verdict) { + Assertions.assertNotNull(verdict, "Expect ALLOW, got abstain"); + Assertions.assertEquals(PolicyEnum.ALLOW, verdict.getDecision(), "Expect ALLOW"); + } + + private static void assertDeny(AuthorizationInterceptor.Verdict verdict) { + Assertions.assertNotNull(verdict, "Expect DENY, got abstain"); + Assertions.assertEquals(PolicyEnum.DENY, verdict.getDecision(), "Expect DENY"); } } diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTask.java index 168e474ece7..50b7859a0a9 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTask.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTask.java @@ -308,6 +308,14 @@ public abstract class BaseTask { } } + public void doNothing() { + setDoNothing(true); + } + + public void failureAllowed() { + setFailureAllowed(true); + } + public boolean isDoNothing() { return myDoNothing; } diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameTableTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameTableTask.java index 49615e73c1a..4b90ea91ac3 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameTableTask.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameTableTask.java @@ -22,11 +22,8 @@ package ca.uhn.fhir.jpa.migrate.taskdef; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.migrate.JdbcUtils; import org.apache.commons.lang3.builder.HashCodeBuilder; -import org.intellij.lang.annotations.Language; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.jdbc.core.ColumnMapRowMapper; -import org.springframework.jdbc.core.JdbcTemplate; import java.sql.SQLException; import java.util.Set; @@ -51,53 +48,12 @@ public class RenameTableTask extends BaseTableTask { setDescription("Rename table " + getOldTableName()); } - private void handleTableWithNewTableName() throws SQLException { - - if (!myDeleteTargetColumnFirstIfExist) { - throw new SQLException(Msg.code(2517) + "Can not rename " + getOldTableName() + " to " + getNewTableName() - + " because a table with name " + getNewTableName() + " already exists"); - } - - // a table with the new tableName already exists and we can delete it. we will only do so if it is empty. - Integer rowsWithData = getConnectionProperties().getTxTemplate().execute(t -> { - String sql = "SELECT * FROM " + getNewTableName(); - JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate(); - jdbcTemplate.setMaxRows(1); - return jdbcTemplate.query(sql, new ColumnMapRowMapper()).size(); - }); - - if (rowsWithData != null && rowsWithData > 0) { - throw new SQLException(Msg.code(2518) + "Can not rename " + getOldTableName() + " to " + getNewTableName() - + " because a table with name " + getNewTableName() + " already exists and is populated."); - } - - logInfo( - ourLog, - "Table {} already exists - Going to drop it before renaming table {} to {}", - getNewTableName(), - getOldTableName(), - getNewTableName()); - - @Language("SQL") - String sql = "DROP TABLE " + getNewTableName(); - executeSql(getNewTableName(), sql); - } - @Override public void doExecute() throws SQLException { Set tableNames = JdbcUtils.getTableNames(getConnectionProperties()); boolean hasTableWithNewTableName = tableNames.contains(getNewTableName()); - if (!tableNames.contains(getOldTableName())) { - throw new SQLException(Msg.code(2516) + "Can not rename " + getOldTableName() + " to " + getNewTableName() - + " because the original table does not exists"); - } - - if (hasTableWithNewTableName) { - handleTableWithNewTableName(); - } - String sql = buildRenameTableSqlStatement(); logInfo(ourLog, "Renaming table: {}", getOldTableName()); diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/Builder.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/Builder.java index ffc800f9a9f..53741f65e5a 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/Builder.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/Builder.java @@ -57,6 +57,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; @@ -185,6 +186,7 @@ public class Builder { private final String myRelease; private final BaseMigrationTasks.IAcceptsTasks mySink; private final String myTableName; + private BaseTask myLastAddedTask; public BuilderWithTableName(String theRelease, BaseMigrationTasks.IAcceptsTasks theSink, String theTableName) { myRelease = theRelease; @@ -274,6 +276,7 @@ public class Builder { @Override public void addTask(BaseTask theTask) { ((BaseTableTask) theTask).setTableName(myTableName); + myLastAddedTask = theTask; mySink.addTask(theTask); } @@ -311,6 +314,10 @@ public class Builder { return this; } + public Optional getLastAddedTask() { + return Optional.ofNullable(myLastAddedTask); + } + /** * @param theFkName the name of the foreign key * @param theParentTableName the name of the table that exports the foreign key @@ -323,31 +330,37 @@ public class Builder { addTask(task); } - public void renameTable(String theVersion, String theNewTableName) { + public BuilderCompleteTask renameTable(String theVersion, String theNewTableName) { RenameTableTask task = new RenameTableTask(myRelease, theVersion, getTableName(), theNewTableName); addTask(task); + return new BuilderCompleteTask(task); } - public void migratePostgresTextClobToBinaryClob(String theVersion, String theColumnName) { + public BuilderCompleteTask migratePostgresTextClobToBinaryClob(String theVersion, String theColumnName) { MigratePostgresTextClobToBinaryClobTask task = new MigratePostgresTextClobToBinaryClobTask(myRelease, theVersion); task.setTableName(getTableName()); task.setColumnName(theColumnName); addTask(task); + return new BuilderCompleteTask(task); } - public void migrateBlobToBinary(String theVersion, String theFromColumName, String theToColumName) { + public BuilderCompleteTask migrateBlobToBinary( + String theVersion, String theFromColumName, String theToColumName) { MigrateColumBlobTypeToBinaryTypeTask task = new MigrateColumBlobTypeToBinaryTypeTask( myRelease, theVersion, getTableName(), theFromColumName, theToColumName); addTask(task); + return new BuilderCompleteTask(task); } - public void migrateClobToText(String theVersion, String theFromColumName, String theToColumName) { + public BuilderCompleteTask migrateClobToText( + String theVersion, String theFromColumName, String theToColumName) { MigrateColumnClobTypeToTextTypeTask task = new MigrateColumnClobTypeToTextTypeTask( myRelease, theVersion, getTableName(), theFromColumName, theToColumName); addTask(task); + return new BuilderCompleteTask(task); } public class BuilderAddIndexWithName { diff --git a/hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameTableTaskTest.java b/hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameTableTaskTest.java index a1c6aad40ba..14cf7896ec6 100644 --- a/hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameTableTaskTest.java +++ b/hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameTableTaskTest.java @@ -41,26 +41,4 @@ public class RenameTableTaskTest extends BaseTest { assertThat(tableNames, not(hasItem(oldTableName))); } - @ParameterizedTest(name = "{index}: {0}") - @MethodSource("data") - public void testRenameTableTask_whenTableDoesNotExists_willRaiseException(Supplier theTestDatabaseDetails) throws SQLException { - // given - before(theTestDatabaseDetails); - final String newTableName = "NEWTABLE"; - final String oldTableName = "SOMETABLE"; - - RenameTableTask task = new RenameTableTask("1", "1", oldTableName, newTableName); - getMigrator().addTask(task); - - // when - try { - getMigrator().migrate(); - fail(); - } catch (Exception e){ - // then - assertThat(e.getMessage(), containsString("2516")); - } - - } - } diff --git a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/imprt/BulkDataImportProviderTest.java b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/imprt/BulkDataImportProviderTest.java index ddc1fde19f5..9da0e377ce5 100644 --- a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/imprt/BulkDataImportProviderTest.java +++ b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/imprt/BulkDataImportProviderTest.java @@ -436,6 +436,16 @@ public class BulkDataImportProviderTest { public boolean isResourcePartitionable(String theResourceType) { return false; } + + @Override + public RequestPartitionId validateAndNormalizePartitionIds(RequestPartitionId theRequestPartitionId) { + return null; + } + + @Override + public RequestPartitionId validateAndNormalizePartitionNames(RequestPartitionId theRequestPartitionId) { + return null; + } } private Date parseDate(String theString) { diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/ICollectDataServiceFactory.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/ICollectDataServiceFactory.java index fcd883576bf..e94facf199f 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/ICollectDataServiceFactory.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/ICollectDataServiceFactory.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR - Clinical Reasoning + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.fhir.cr.r4; import ca.uhn.fhir.rest.api.server.RequestDetails; diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/IDataRequirementsServiceFactory.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/IDataRequirementsServiceFactory.java index f5de302cd97..483945ba1b5 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/IDataRequirementsServiceFactory.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/IDataRequirementsServiceFactory.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR - Clinical Reasoning + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.fhir.cr.r4; import ca.uhn.fhir.rest.api.server.RequestDetails; diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/CollectDataOperationProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/CollectDataOperationProvider.java index bdaec6cc8ac..09313ca21ff 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/CollectDataOperationProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/CollectDataOperationProvider.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR - Clinical Reasoning + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.fhir.cr.r4.measure; import ca.uhn.fhir.cr.r4.ICollectDataServiceFactory; diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/DataRequirementsOperationProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/DataRequirementsOperationProvider.java index 9c801d91d39..da7ff850647 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/DataRequirementsOperationProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/DataRequirementsOperationProvider.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR - Clinical Reasoning + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.fhir.cr.r4.measure; import ca.uhn.fhir.cr.r4.IDataRequirementsServiceFactory; diff --git a/hapi-fhir-storage-test-utilities/src/main/java/ca/uhn/fhir/storage/test/DaoTestDataBuilder.java b/hapi-fhir-storage-test-utilities/src/main/java/ca/uhn/fhir/storage/test/DaoTestDataBuilder.java index 299475af681..483736324b2 100644 --- a/hapi-fhir-storage-test-utilities/src/main/java/ca/uhn/fhir/storage/test/DaoTestDataBuilder.java +++ b/hapi-fhir-storage-test-utilities/src/main/java/ca/uhn/fhir/storage/test/DaoTestDataBuilder.java @@ -102,7 +102,9 @@ public class DaoTestDataBuilder implements ITestDataBuilder.WithSupport, ITestDa public void cleanup() { ourLog.info("cleanup {}", myIds); - myIds.keySet().forEach(nextType->{ + myIds.keySet().stream() + .sorted() // Hack to ensure Patients are deleted before Practitioners. This may need to be refined. + .forEach(nextType->{ // todo do this in a bundle for perf. IFhirResourceDao dao = myDaoRegistry.getResourceDao(nextType); myIds.get(nextType).forEach(dao::delete); diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java index aafb69b8e2d..78ec78d7c79 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java @@ -360,6 +360,14 @@ public class JpaStorageSettings extends StorageSettings { */ private long myRestDeleteByUrlResourceIdThreshold = DEFAULT_REST_DELETE_BY_URL_RESOURCE_ID_THRESHOLD; + /** + * If enabled, this setting causes persisting data to legacy LOB columns as well as columns introduced + * to migrate away from LOB columns which effectively duplicates stored information. + * + * @since 7.2.0 + */ + private boolean myWriteToLegacyLobColumns = false; + /** * Constructor */ @@ -2423,8 +2431,8 @@ public class JpaStorageSettings extends StorageSettings { * This setting controls the validation issue severity to report when a code validation * finds that the code is present in the given CodeSystem, but the display name being * validated doesn't match the expected value(s). Defaults to - * {@link ca.uhn.fhir.context.support.IValidationSupport.IssueSeverity#WARNING}. Set this - * value to {@link ca.uhn.fhir.context.support.IValidationSupport.IssueSeverity#INFORMATION} + * {@link IValidationSupport.IssueSeverity#WARNING}. Set this + * value to {@link IValidationSupport.IssueSeverity#INFORMATION} * if you don't want to see display name validation issues at all in resource validation * outcomes. * @@ -2439,8 +2447,8 @@ public class JpaStorageSettings extends StorageSettings { * This setting controls the validation issue severity to report when a code validation * finds that the code is present in the given CodeSystem, but the display name being * validated doesn't match the expected value(s). Defaults to - * {@link ca.uhn.fhir.context.support.IValidationSupport.IssueSeverity#WARNING}. Set this - * value to {@link ca.uhn.fhir.context.support.IValidationSupport.IssueSeverity#INFORMATION} + * {@link IValidationSupport.IssueSeverity#WARNING}. Set this + * value to {@link IValidationSupport.IssueSeverity#INFORMATION} * if you don't want to see display name validation issues at all in resource validation * outcomes. * @@ -2454,6 +2462,33 @@ public class JpaStorageSettings extends StorageSettings { myIssueSeverityForCodeDisplayMismatch = theIssueSeverityForCodeDisplayMismatch; } + /** + * This method returns whether data will be stored in LOB columns as well as the columns + * introduced to migrate away from LOB. Writing to LOB columns is set to false by + * default. Enabling the setting will effectively double the persisted information. + * If enabled, a careful monitoring of LOB table (if applicable) is required to avoid + * exceeding the table maximum capacity. + * + * @since 7.2.0 + */ + public boolean isWriteToLegacyLobColumns() { + return myWriteToLegacyLobColumns; + } + + /** + * This setting controls whether data will be stored in LOB columns as well as the columns + * introduced to migrate away from LOB. Writing to LOB columns is set to false by + * default. Enabling the setting will effectively double the persisted information. + * When enabled, a careful monitoring of LOB table (if applicable) is required to avoid + * exceeding the table maximum capacity. + * + * @param theWriteToLegacyLobColumns + * @since 7.2.0 + */ + public void setWriteToLegacyLobColumns(boolean theWriteToLegacyLobColumns) { + myWriteToLegacyLobColumns = theWriteToLegacyLobColumns; + } + /** * This setting controls whether MdmLink and other non-resource DB history is enabled. *

    diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/partition/BaseRequestPartitionHelperSvc.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/partition/BaseRequestPartitionHelperSvc.java index a98fb571d1a..be151786428 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/partition/BaseRequestPartitionHelperSvc.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/partition/BaseRequestPartitionHelperSvc.java @@ -333,10 +333,6 @@ public abstract class BaseRequestPartitionHelperSvc implements IRequestPartition return !myNonPartitionableResourceNames.contains(theResourceType); } - protected abstract RequestPartitionId validateAndNormalizePartitionIds(RequestPartitionId theRequestPartitionId); - - protected abstract RequestPartitionId validateAndNormalizePartitionNames(RequestPartitionId theRequestPartitionId); - private void validateSinglePartitionForCreate( RequestPartitionId theRequestPartitionId, @Nonnull String theResourceName, Pointcut thePointcut) { validateRequestPartitionNotNull(theRequestPartitionId, thePointcut);