diff --git a/hapi-deployable-pom/pom.xml b/hapi-deployable-pom/pom.xml index ccb06b52796..520335fca6b 100644 --- a/hapi-deployable-pom/pom.xml +++ b/hapi-deployable-pom/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-android/pom.xml b/hapi-fhir-android/pom.xml index 4af81cf369c..a0e76657589 100644 --- a/hapi-fhir-android/pom.xml +++ b/hapi-fhir-android/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-base/pom.xml b/hapi-fhir-base/pom.xml index ac906222b96..5fb7d9b771b 100644 --- a/hapi-fhir-base/pom.xml +++ b/hapi-fhir-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/valueset/BundleEntrySearchModeEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/valueset/BundleEntrySearchModeEnum.java index 8caf1e6c2e8..d81559092fd 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/valueset/BundleEntrySearchModeEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/valueset/BundleEntrySearchModeEnum.java @@ -29,6 +29,7 @@ import java.util.Map; public enum BundleEntrySearchModeEnum { MATCH("match", "http://hl7.org/fhir/search-entry-mode"), INCLUDE("include", "http://hl7.org/fhir/search-entry-mode"), + OUTCOME("outcome", "http://hl7.org/fhir/search-entry-mode"), ; /** @@ -79,7 +80,7 @@ public enum BundleEntrySearchModeEnum { /** * Returns the enumerated value associated with this code */ - public BundleEntrySearchModeEnum forCode(String theCode) { + public static BundleEntrySearchModeEnum forCode(String theCode) { BundleEntrySearchModeEnum retVal = CODE_TO_ENUM.get(theCode); return retVal; } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/TokenParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/TokenParam.java index 2d0c437da39..5f5f59a7819 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/TokenParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/TokenParam.java @@ -24,6 +24,7 @@ import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.base.composite.BaseCodingDt; import ca.uhn.fhir.model.base.composite.BaseIdentifierDt; import ca.uhn.fhir.model.primitive.UriDt; +import ca.uhn.fhir.rest.api.Constants; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; @@ -147,18 +148,22 @@ public class TokenParam extends BaseParam /*implements IQueryParameterType*/ { @Override void doSetValueAsQueryToken(FhirContext theContext, String theParamName, String theQualifier, String theParameter) { setModifier(null); + setSystem(null); + if (theQualifier != null) { + if (Constants.PARAMQUALIFIER_MDM.equals(theQualifier)) { + setMdmExpand(true); + } + TokenParamModifier modifier = TokenParamModifier.forValue(theQualifier); setModifier(modifier); if (modifier == TokenParamModifier.TEXT) { - setSystem(null); setValue(ParameterUtil.unescape(theParameter)); return; } } - setSystem(null); if (theParameter == null) { setValue(null); } else { diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/bundle/SearchBundleEntryParts.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/bundle/SearchBundleEntryParts.java index 5328c4f8ba1..2f38aaa1597 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/bundle/SearchBundleEntryParts.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/bundle/SearchBundleEntryParts.java @@ -30,11 +30,7 @@ public class SearchBundleEntryParts { public SearchBundleEntryParts(String theFullUrl, IBaseResource theResource, String theSearchMode) { myFullUrl = theFullUrl; myResource = theResource; - if (BundleEntrySearchModeEnum.INCLUDE.getCode().equalsIgnoreCase(theSearchMode)) { - mySearchMode = BundleEntrySearchModeEnum.INCLUDE; - } else { - mySearchMode = BundleEntrySearchModeEnum.MATCH; - } + mySearchMode = BundleEntrySearchModeEnum.forCode(theSearchMode); } public String getFullUrl() { diff --git a/hapi-fhir-base/src/test/java/ca/uhn/fhir/rest/param/StringParamTest.java b/hapi-fhir-base/src/test/java/ca/uhn/fhir/rest/param/StringParamTest.java index 3b6f7609537..62b83fdbaef 100644 --- a/hapi-fhir-base/src/test/java/ca/uhn/fhir/rest/param/StringParamTest.java +++ b/hapi-fhir-base/src/test/java/ca/uhn/fhir/rest/param/StringParamTest.java @@ -2,6 +2,7 @@ package ca.uhn.fhir.rest.param; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.model.api.IQueryParameterType; +import ca.uhn.fhir.rest.api.Constants; import ch.qos.logback.classic.Level; import ch.qos.logback.classic.Logger; import ch.qos.logback.classic.spi.ILoggingEvent; @@ -132,6 +133,23 @@ public class StringParamTest { assertNicknameWarningLogged(false); } + @Test + public void testNameNickname() { + StringParam param = new StringParam(); + assertFalse(param.isNicknameExpand()); + param.setValueAsQueryToken(myContext, "name", Constants.PARAMQUALIFIER_NICKNAME, "kenny"); + assertTrue(param.isNicknameExpand()); + } + + @Test + public void testGivenNickname() { + StringParam param = new StringParam(); + assertFalse(param.isNicknameExpand()); + param.setValueAsQueryToken(myContext, "given", Constants.PARAMQUALIFIER_NICKNAME, "kenny"); + assertTrue(param.isNicknameExpand()); + } + + private void assertNicknameQualifierSearchParameterIsValid(StringParam theStringParam, String theExpectedValue){ assertTrue(theStringParam.isNicknameExpand()); assertFalse(theStringParam.isExact()); @@ -164,5 +182,5 @@ public class StringParamTest { assertTrue(warningLogs.isEmpty()); } } - + } diff --git a/hapi-fhir-bom/pom.xml b/hapi-fhir-bom/pom.xml index 9762c74a4e6..37459003bbd 100644 --- a/hapi-fhir-bom/pom.xml +++ b/hapi-fhir-bom/pom.xml @@ -4,7 +4,7 @@ 4.0.0 ca.uhn.hapi.fhir hapi-fhir-bom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT pom HAPI FHIR BOM @@ -12,7 +12,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-checkstyle/pom.xml b/hapi-fhir-checkstyle/pom.xml index e7dd8f409e7..659f30ce0fc 100644 --- a/hapi-fhir-checkstyle/pom.xml +++ b/hapi-fhir-checkstyle/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml index 61ffbc386b9..d5a85e8db5e 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml index bc7648d950c..31efb5e1cdc 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir-cli - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-cli/pom.xml b/hapi-fhir-cli/pom.xml index cdc1a92215b..bae9d20464c 100644 --- a/hapi-fhir-cli/pom.xml +++ b/hapi-fhir-cli/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-client-okhttp/pom.xml b/hapi-fhir-client-okhttp/pom.xml index 3416c8d1573..0dce45e5b92 100644 --- a/hapi-fhir-client-okhttp/pom.xml +++ b/hapi-fhir-client-okhttp/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-client/pom.xml b/hapi-fhir-client/pom.xml index 076d8112f0d..0c8fd1a7c43 100644 --- a/hapi-fhir-client/pom.xml +++ b/hapi-fhir-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-converter/pom.xml b/hapi-fhir-converter/pom.xml index ef574e52bf0..69c806cf9fb 100644 --- a/hapi-fhir-converter/pom.xml +++ b/hapi-fhir-converter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-dist/pom.xml b/hapi-fhir-dist/pom.xml index b72963d0195..5463e9b5d5a 100644 --- a/hapi-fhir-dist/pom.xml +++ b/hapi-fhir-dist/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-docs/pom.xml b/hapi-fhir-docs/pom.xml index f3e812ae484..b253184ccbf 100644 --- a/hapi-fhir-docs/pom.xml +++ b/hapi-fhir-docs/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5088-fix-history-bundle-fullUrl.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5088-fix-history-bundle-fullUrl.yaml new file mode 100644 index 00000000000..b8acc62965f --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5088-fix-history-bundle-fullUrl.yaml @@ -0,0 +1,6 @@ +--- +type: fix +issue: 5088 +title: "Previously, the fullUrl for resources in _history bundles was not generated correctly when using a client +provided id. The same problem started to happen for the resources with server generated ids more recently +(after 6.9.10). This has now been fixed" diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5711-fix-patch-operation-failing-for-complex-extension.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5711-fix-patch-operation-failing-for-complex-extension.yaml new file mode 100644 index 00000000000..e7a987b8f44 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5711-fix-patch-operation-failing-for-complex-extension.yaml @@ -0,0 +1,6 @@ +--- +type: fix +issue: 5771 +jira: SMILE-7837 +title: "Previously, a Patch operation would fail when adding a complex extension, i.e. an extension +comprised of another extension. This issue has been fixed." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5748-avoid-lob-usage-in-batch2-and-search.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5748-avoid-lob-usage-in-batch2-and-search.yaml new file mode 100644 index 00000000000..1eff1ab7cef --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5748-avoid-lob-usage-in-batch2-and-search.yaml @@ -0,0 +1,8 @@ +--- +type: perf +issue: 5748 +title: "In the JPA server, several database columns related to Batch2 jobs and searching + have been reworked so that they no will longer use LOB datatypes going forward. This + is a significant advantage on Postgresql databases as it removes a significant use + of the inefficient `pg_largeobject` table, and should yield performance boosts for + MSSQL as well." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5750-update-cr-operations.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5750-update-cr-operations.yaml new file mode 100644 index 00000000000..628f12df473 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5750-update-cr-operations.yaml @@ -0,0 +1,12 @@ +--- +type: add +issue: 5750 +title: "Update to the 3.2.0 release of the Clinical Reasoning Module. This includes the following changes: +" diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5773-fix-subscriptions-with-null-content-cause-null-pointer-exception.yml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5773-fix-subscriptions-with-null-content-cause-null-pointer-exception.yml new file mode 100644 index 00000000000..af8c7de1b38 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5773-fix-subscriptions-with-null-content-cause-null-pointer-exception.yml @@ -0,0 +1,4 @@ +--- +type: fix +issue: 5773 +title: "Subscriptions with null content caused NullPointerExceptions. This condition is now checked and handled." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5777-change-cds-on-fhir-prefetch.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5777-change-cds-on-fhir-prefetch.yaml new file mode 100644 index 00000000000..3cf58c3b6e7 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5777-change-cds-on-fhir-prefetch.yaml @@ -0,0 +1,4 @@ +--- +type: add +issue: 5777 +title: "Change the implementation of CDS on FHIR to use the Auto Prefetch functionality and to no longer pass the fhirServer from the request into the dataEndpoint parameter of $apply." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5780-searchbundleentryparts-correction.yml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5780-searchbundleentryparts-correction.yml new file mode 100644 index 00000000000..4701d18b0f0 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5780-searchbundleentryparts-correction.yml @@ -0,0 +1,4 @@ +--- +type: fix +issue: 5780 +title: "SearchBundleEntryParts now correctly respects `OUTCOME` and `null` search modes in a bundle entry. In the public space, this means `BundleUtil#getSearchBundleEntryParts()` no longer incorrectly infers information about the entry mode " diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5784-composition-sort.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5784-composition-sort.yaml new file mode 100644 index 00000000000..1d3929581f2 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5784-composition-sort.yaml @@ -0,0 +1,4 @@ +--- +type: add +issue: 5784 +title: "Add support to _sort for chained `composition` Bundle SearchParameters" diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5788-retain-x-request-id-header-casing.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5788-retain-x-request-id-header-casing.yaml new file mode 100644 index 00000000000..d9a5d1ab36a --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5788-retain-x-request-id-header-casing.yaml @@ -0,0 +1,6 @@ +--- +type: fix +issue: 5788 +title: "Previously, the casing of the X-Request-ID header key was not retained in the corresponding response. +This has been fixed." + diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5800-enforce-maximum-bulk-export-file-size.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5800-enforce-maximum-bulk-export-file-size.yaml new file mode 100644 index 00000000000..46b2c510e34 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5800-enforce-maximum-bulk-export-file-size.yaml @@ -0,0 +1,6 @@ +--- +type: add +issue: 5800 +title: "A new setting in JpaStorageSettings enforces a maximum file size for Bulk Export + output files, as well as work chunks creating during processing. This setting has + a default value of 100 MB." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5802-fix-mdm-expansion-not-returning-expected-resources.yml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5802-fix-mdm-expansion-not-returning-expected-resources.yml new file mode 100644 index 00000000000..8a0c6463646 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5802-fix-mdm-expansion-not-returning-expected-resources.yml @@ -0,0 +1,5 @@ +--- +type: fix +issue: 5802 +title: "Previously, using the ':mdm' qualifier with the '_id' search parameter would not included expanded resources in +search result. This issue has been fixed." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/cds_hooks/cds_hooks_intro.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/cds_hooks/cds_hooks_intro.md new file mode 100644 index 00000000000..fec9449175e --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/cds_hooks/cds_hooks_intro.md @@ -0,0 +1,143 @@ +# CDS Hooks + +CDS Hooks are services called by CDS Clients (typically Electronic Health Record Systems (EHRs) or other health +information systems). They implement a "hook"-based pattern for invoking decision support from within a clinician's +workflow. + +HAPI FHIR implements [Version 1.1 of the CDS Hooks Specification](https://cds-hooks.hl7.org/ballots/2020Sep/). + +The HAPI FHIR CDS Hooks Module simplifies the effort for creating CDS Hooks. All you need to do is create a method that +accepts a `CdsServiceRequestJson` parameter and returns a `CdsServiceResponseJson` value and annotate this method with +the `@CdsService` annotation. This annotation and the Json classes and all their subcomponents are available in the +open-source project called `hapi-fhir-server-cds-hooks`. Any FHIR resources in requests and responses are automatically serialized +into hapi-fhir FHIR resource instances for you, so they are easy to work with within your code. + +In addition to simplifying the effort to build CDS Hooks, the HAPI FHIR CDS Hooks module also provides the following: + +* All access is logged in the HAPI FHIR Audit Trail. +* Authorization is controlled by the HAPI FHIR security framework. +* Management and monitoring capabilities are provided by the HAPI FHIR platform. +* [CDS on FHIR](/docs/cds_hooks/#cds-on-fhir) implementation that auto-generates CDS Services from PlanDefinitions and executes via the $apply operation. + +# Auto Prefetch + +The HAPI FHIR CDS Hooks module provides a couple of powerful Auto-Prefetch features: + +1. If `allowAutoFhirClientPrefetch` is set to `true` in the `@CdsService` annotation on your CDS Service method, then + before calling your method, HAPI FHIR will compare the prefetch elements declared by your service method in + the `@CdsService` annotation to the prefetch elements included within the `CdsServiceRequestJson` REST request and if + it detects any are missing, then HAPI FHIR will use the FHIR endpoint authorization details included within + the `fhirAuthorization` element in the request to automatically add them to the prefetch before calling your method. +2. Even simpler, if your HAPI FHIR server has a FHIR Storage module, you can optionally add a dependency from your + CDS Hooks Module on your FHIR Storage module. If you do this, then when HAPI FHIR detects any required prefetch + elements missing in a request, it will automatically fetch the missing data from your storage module before calling + your CDS Hooks method. Note in this case, the same credentials used to call the CDS Hooks endpoint are used to + authorize access to the FHIR Storage module. + +## CDS Hooks Auto Prefetch Rules + +- If there are no missing prefetch elements, the CDS Hooks service method is called directly with the request. (Note + that per the CDS Hooks specification, a value of `null` is not considered to be missing. CDS Hooks clients set a + prefetch value to `null` to indicate that this prefetch data is known to not exist). +- Otherwise, if a `fhirServer` is included in the request + - If the `@CdsService` annotation on the service method has `allowAutoFhirClientPrefetch = true`, then HAPI FHIR will + perform a FHIR REST call to that `fhirServer` endpoint to fetch the missing data. + - otherwise, the CDS Hooks service method is expected to call the `fhirServer` endpoint itself to retrieve the + missing data. +- Otherwise, if the CDS Hooks Module declares a dependency on a FHIR Storage Module, then HAPI FHIR will fetch the + missing data from that FHIR Storage Module. +- Otherwise, the method will fail with HTTP 412 PRECONDITION FAILED (per the CDS Hooks specification). +- The Auto-Prefetch rules can be overridden for individual elements by setting a `source` for the `@CdsServicePrefetch`. + HAPI FHIR will attempt to use the `source` strategy for the query instead of following the order above. + +# Architecture + +The diagram below shows how CDS Hooks work. The box in grey contains *customer code*, which is code that you write. + +CDS Hooks Architecture + +A CDS Hooks implementation is packaged as a Java JAR file that contains several key components: + +* **CDS Service** classes, which implement CDS Hooks *service* and *feedback* methods. +* A **Spring Context Config** class, which is a Spring Framework class used to instantiate and configure the CDS Hooks + classes. + +# CDS Hooks Classes + +A CDS Hooks class contains annotated *service* and *feedback* methods. One CDS Hooks class can contain any number of +these methods. A CDS Hooks *service* method is annotated with the `@CdsService` annotation and a CDS Hooks *feedback* +method is annotated with the `@CdsServiceFeedback` annotation. The "value" of these annotations corresponds to the id of +the CDS Hooks service. For example: + +A method annotated with `@CdsService(value="example-service")` is accessed at a path +like `https://example.com:8888/cds-services/example-service` + +A method annotated with `@CdsServiceFeedback(value="my-service")` is accessed at a path +like `https://example.com:8888/cds-services/my-service/feedback`. + +A very basic example is shown below: + +```java +{{snippet:file:hapi-fhir-server-cds-hooks/src/test/java/ca.uhn.hapi.fhir.cdshooks/controller/ExampleCdsService.java}} +``` + +Both of these example methods accept a single json instance parameter (`CdsServiceRequestJson` +and `CdsServiceFeedbackJson` respectively). Alternatively, these methods can accept a single String parameter in which +case the CDS Hooks module will string-encode the instance before calling the method. + +# The Spring Context Config Class + +This mandatory class is a [Spring Framework](https://springframework.org) Annotation-based Application Context Config +class. It is characterized by having the `@Configuration` annotation on the class itself, as well as having one or more +non-static factory methods annotated with the `@Bean` method, which create instances of your providers (as well as +creating any other utility classes you might need, such as database pools, HTTP clients, etc.). + +This class must instantiate a bean named `cdsServices`: + +* The `cdsServices` bean method should return a `List` of classes that contain `@CdsService` + and/or `@CdsServiceFeedback` annotated methods. + +The following example shows a Spring Context Config class that registers the CDS Hooks example above. + +```java +@Configuration +public class TestServerAppCtx { + + /** + * This bean is a list of CDS Hooks classes, each one + * of which implements one or more CDS-Hook Services. + */ + @Bean(name = "cdsServices") + public List cdsServices(){ + List retVal = new ArrayList<>(); + retVal.add(new ExampleCdsService()); +// add other CDS Hooks classes... + return retVal; + } +} +``` + +# Calling CDS Hooks + +Per [Version 1.1 of the CDS Hooks Specification](https://cds-hooks.hl7.org/ballots/2020Sep/), a list of all registered +services is available at a path like `https://example.com:8888/cds-services`. As a convenience, swagger REST +documentation is provided at the root of the endpoint: `https://example.com:8888/`. + +# Example Project + +A sample CDS Hooks project is available at the following links: + +* [cdr-endpoint-cds-hooks-demoproject-1.0.zip](/docs/downloads/cdr-endpoint-cds-hooks-demoproject-1.0.zip) +* [cdr-endpoint-cds-hooks-demoproject-1.0.tar.gz](/docs/downloads/cdr-endpoint-cds-hooks-demoproject-1.0.tar.gz) + +# CDS on FHIR + +To create CDS Services from PlanDefinitions the dependencies for a FHIR Storage Module, FHIR Endpoint and CQL module must be set. This will create a listener on the storage module so that any changes to PlanDefinition resources will update the CDS Service cache. + +Any PlanDefinition resource with an action that has a trigger of type [named-event](http://hl7.org/fhir/R4/codesystem-trigger-type.html#trigger-type-named-event) will have a CDS Service created using the PlanDefinition.id as the service id and the name of the trigger as the hook that the service is created for per the [CDS on FHIR Specification](https://hl7.org/fhir/clinicalreasoning-cds-on-fhir.html#surfacing-clinical-decision-support). + +CDS Services created this way will show up as registered services and can be called just as other services are called. The CDS Service request will be converted into parameters for the [$apply operation](/docs/clinical_reasoning/plan_definitions.html#apply), the results of which are then converted into a CDS Response per the [CDS on FHIR Specification](https://hl7.org/fhir/clinicalreasoning-cds-on-fhir.html#consuming-decision-support). + +These CDS Services will take advantage of the [Auto Prefetch](/docs/cds_hooks/#auto-prefetch) feature. Prefetch data is included as a Bundle in the `data` parameter of the $apply call. + +The $apply operation is running against the FHIR Storage Module, so it will also have access to any data stored there. Any CQL evaluation during the $apply operation that results in a retrieve will always pull from the Bundle and the FHIR Storage Module. This is done regardless of what data is passed into the prefetch of the service request. diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/clinical_reasoning/activity_definitions.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/clinical_reasoning/activity_definitions.md new file mode 100644 index 00000000000..d8dc3f0d526 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/clinical_reasoning/activity_definitions.md @@ -0,0 +1,48 @@ +# ActivityDefinition + +## Introduction + +The FHIR Clinical Reasoning Module defines the [ActivityDefinition resource](https://www.hl7.org/fhir/activitydefinition.html) and several [associated operations](https://www.hl7.org/fhir/activitydefinition-operations.html). An ActivityDefinition is a shareable, consumable description of some activity to be performed. It may be used to specify actions to be taken as part of a workflow, order set, or protocol, or it may be used independently as part of a catalog of activities such as orderables. + +In general, an activity definition is simply a conceptual description of some specific action that should be taken. An instance of an ActivityDefinition does not indicate that any action has been performed (as an event resource does), nor does it indicate the actual intent to carry out any particular action (as a request resource does). Instead, an activity definition provides a reusable template that can be used to construct specific request resources such as ServiceRequest and MedicationRequest. + +Note that this is conceptually similar to the Task resource as well, with the distinction being that ActivityDefinition represents the description of a task in the abstract, while the Task resource is used to track a specific instance of a task as it moves through the steps of a workflow. + +An ActivityDefinition resource provides a description, or template, of an action to perform. These actions can be purely text-based descriptions of the action to be performed, only interpretable by a human user, or they can be structured definitions with enough information to construct a resource to represent the request or activity directly. This process of converting the ActivityDefinition into a specific resource in a particular context is performed with the [$apply](/docs/clinical_reasoning/activity_definitions.html#apply) operation. + +## Operations + +HAPI implements the following operations for ActivityDefinitions + +* [$apply](/docs/clinical_reasoning/activity_definitions.html#apply) + +## Apply + +The `ActivityDefinition/$apply` [operation](https://www.hl7.org/fhir/activitydefinition-operation-apply.html) creates a [Request Resource](https://www.hl7.org/fhir/workflow.html#request) for a given context. This implementation follows the [FHIR Specification](https://www.hl7.org/fhir/activitydefinition.html#12.22.4.3) and supports the [FHIR Clinical Guidelines IG](http://hl7.org/fhir/uv/cpg/index.html). + +### Parameters + +The following parameters are supported for the `ActivityDefinition/$apply` operation: + +| Parameter | Type | Description | +|---------------------|---------------------------|-------------| +| activityDefinition | ActivityDefinition | The activity definition to be applied. If the operation is invoked at the instance level, this parameter is not allowed; if the operation is invoked at the type level, this parameter is required, or a url (and optionally version) must be supplied. | +| canonical | canonical(ActivityDefinition) | The canonical url of the activity definition to be applied. If the operation is invoked at the instance level, this parameter is not allowed; if the operation is invoked at the type level, this parameter (and optionally the version), or the activityDefinition parameter must be supplied. | +| url | uri | Canonical URL of the ActivityDefinition when invoked at the resource type level. This is exclusive with the activityDefinition and canonical parameters. | +| version | string | Version of the ActivityDefinition when invoked at the resource type level. This is exclusive with the activityDefinition and canonical parameters. | +| subject | string(reference) | The subject(s) that is/are the target of the activity definition to be applied. | +| encounter | string(reference) | The encounter in context, if any. | +| practitioner | string(reference) | The practitioner applying the activity definition. | +| organization | string(reference) | The organization applying the activity definition. | +| userType | CodeableConcept | The type of user initiating the request, e.g. patient, healthcare provider, or specific type of healthcare provider (physician, nurse, etc.) | +| userLanguage | CodeableConcept | Preferred language of the person using the system | +| userTaskContext | CodeableConcept | The task the system user is performing, e.g. laboratory results review, medication list review, etc. This information can be used to tailor decision support outputs, such as recommended information resources. | +| setting | CodeableConcept | The current setting of the request (inpatient, outpatient, etc.). | +| settingContext | CodeableConcept | Additional detail about the setting of the request, if any | +| parameters | Parameters | Any input parameters defined in libraries referenced by the ActivityDefinition. | +| useServerData | boolean | Whether to use data from the server performing the evaluation. If this parameter is true (the default), then the operation will use data first from any bundles provided as parameters (through the data and prefetch parameters), second data from the server performing the operation, and third, data from the dataEndpoint parameter (if provided). If this parameter is false, the operation will use data first from the bundles provided in the data or prefetch parameters, and second from the dataEndpoint parameter (if provided). | +| data | Bundle | Data to be made available to the ActivityDefinition evaluation. | +| dataEndpoint | Endpoint | An endpoint to use to access data referenced by retrieve operations in libraries referenced by the ActivityDefinition. | +| contentEndpoint | Endpoint | An endpoint to use to access content (i.e. libraries) referenced by the ActivityDefinition. | +| terminologyEndpoint | Endpoint | An endpoint to use to access terminology (i.e. valuesets, codesystems, and membership testing) referenced by the ActivityDefinition. | + diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/clinical_reasoning/overview.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/clinical_reasoning/overview.md index 37433211661..a15260832a8 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/clinical_reasoning/overview.md +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/clinical_reasoning/overview.md @@ -18,6 +18,7 @@ There are additional IGs outside the FHIR CR module that define further requirem * [Structured Data Capture IG](https://build.fhir.org/ig/HL7/sdc/) * [Clinical Guidelines IG](https://hl7.org/fhir/uv/cpg/) * [Quality Measures IG](http://hl7.org/fhir/us/cqfmeasures/) +* [Canonical Resource Management Infrastructure IG](https://build.fhir.org/ig/HL7/crmi-ig/index.html) ## HAPI FHIR diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/clinical_reasoning/plan_definitions.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/clinical_reasoning/plan_definitions.md index 3e0e6a0040a..19df9962f8b 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/clinical_reasoning/plan_definitions.md +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/clinical_reasoning/plan_definitions.md @@ -13,6 +13,75 @@ The process of applying a PlanDefinition to a particular context typically produ Each ActivityDefinition is used to construct a specific resource, based on the definition of the activity and combined with contextual information for the particular patient that the plan definition is being applied to. +## Operations + +HAPI implements the following operations for PlanDefinitions: + +* [$apply](/docs/clinical_reasoning/plan_definitions.html#apply) +* [$package](/docs/clinical_reasoning/plan_definitions.html#package) + +## Apply + +The `PlanDefinition/$apply` [operation](https://www.hl7.org/fhir/plandefinition-operation-apply.html) applies a PlanDefinition to a given context. This implementation follows the [FHIR Specification](https://www.hl7.org/fhir/plandefinition.html#12.23.4.3) and supports the [FHIR Clinical Guidelines IG](http://hl7.org/fhir/uv/cpg/index.html). In addition, an R5 version of apply is made available for R4 instances. This will cause $apply to return a Bundle of resources instead of a CarePlan. This can be invoked with `$r5.apply`. + +Some example PlanDefinition workflows are available in the [opioid-cds-r4](https://github.com/cqframework/opioid-cds-r4) IG. Full Bundles with all the required supporting resources are available [here](https://github.com/cqframework/opioid-cds-r4/tree/1e543f781138f3d85404b7f65a92ff713519ef2c/bundles). You can download a Bundle and load it on your server as a transaction: + +```bash +POST http://your-server-base/fhir opioidcds-10-patient-view-bundle.json +``` + +These Bundles do not include example Patient clinical data. Applying a PlanDefinition can be invoked with: + +```bash +GET http://your-server-base/fhir/PlanDefinition/opioidcds-10-patient-view/$apply?subject=Patient/patientId&encounter=Encounter/encounterId&practitioner=Practitioner/practitionerId +``` + +### Parameters + +The following parameters are supported for the `PlanDefinition/$apply` and `PlanDefinition/$r5.apply` operation: + +| Parameter | Type | Description | +|---------------------|---------------------------|-------------| +| planDefinition | PlanDefinition | The plan definition to be applied. If the operation is invoked at the instance level, this parameter is not allowed; if the operation is invoked at the type level, this parameter is required, or a url (and optionally version) must be supplied. | +| canonical | canonical(PlanDefinition) | The canonical url of the plan definition to be applied. If the operation is invoked at the instance level, this parameter is not allowed; if the operation is invoked at the type level, this parameter (and optionally the version), or the planDefinition parameter must be supplied. | +| url | uri | Canonical URL of the PlanDefinition when invoked at the resource type level. This is exclusive with the planDefinition and canonical parameters. | +| version | string | Version of the PlanDefinition when invoked at the resource type level. This is exclusive with the planDefinition and canonical parameters. | +| subject | string(reference) | The subject(s) that is/are the target of the plan definition to be applied. | +| encounter | string(reference) | The encounter in context, if any. | +| practitioner | string(reference) | The practitioner applying the plan definition. | +| organization | string(reference) | The organization applying the plan definition. | +| userType | CodeableConcept | The type of user initiating the request, e.g. patient, healthcare provider, or specific type of healthcare provider (physician, nurse, etc.) | +| userLanguage | CodeableConcept | Preferred language of the person using the system | +| userTaskContext | CodeableConcept | The task the system user is performing, e.g. laboratory results review, medication list review, etc. This information can be used to tailor decision support outputs, such as recommended information resources. | +| setting | CodeableConcept | The current setting of the request (inpatient, outpatient, etc.). | +| settingContext | CodeableConcept | Additional detail about the setting of the request, if any | +| parameters | Parameters | Any input parameters defined in libraries referenced by the PlanDefinition. | +| useServerData | boolean | Whether to use data from the server performing the evaluation. If this parameter is true (the default), then the operation will use data first from any bundles provided as parameters (through the data and prefetch parameters), second data from the server performing the operation, and third, data from the dataEndpoint parameter (if provided). If this parameter is false, the operation will use data first from the bundles provided in the data or prefetch parameters, and second from the dataEndpoint parameter (if provided). | +| data | Bundle | Data to be made available to the PlanDefinition evaluation. | +| dataEndpoint | Endpoint | An endpoint to use to access data referenced by retrieve operations in libraries referenced by the PlanDefinition. | +| contentEndpoint | Endpoint | An endpoint to use to access content (i.e. libraries) referenced by the PlanDefinition. | +| terminologyEndpoint | Endpoint | An endpoint to use to access terminology (i.e. valuesets, codesystems, and membership testing) referenced by the PlanDefinition. | + + +## Package + +The `PlanDefinition/$package` [operation](https://build.fhir.org/ig/HL7/crmi-ig/OperationDefinition-crmi-package.html) for PlanDefinition will generate a Bundle of resources that includes the PlanDefinition as well as any related resources which can then be shared. This implementation follows the [CRMI IG](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/index.html) guidance for [packaging artifacts](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/packaging.html). + +### Parameters + +The following parameters are supported for the `PlanDefinition/$package` operation: + +| Parameter | Type | Description | +|-----------|-----------|-------------| +| id | string | The logical id of an existing Resource to package on the server. | +| canonical | canonical | A canonical url (optionally version specific) of a Resource to package on the server. | +| url | uri | A canonical or artifact reference to a Resource to package on the server. This is exclusive with the canonical parameter. | +| version | string | The version of the Resource. This is exclusive with the canonical parameter. | +| usePut | boolean | Determines the type of method returned in the Bundle Entries: POST if False (the default), PUT if True. | + + +## Example PlanDefinition + ```json { "resourceType": "PlanDefinition", @@ -223,48 +292,3 @@ Each ActivityDefinition is used to construct a specific resource, based on the d ] } ``` - -## Operations - -HAPI implements the [$apply](http://hl7.org/fhir/uv/cpg/OperationDefinition-cpg-plandefinition-apply.html) operation. Support for additional operations is planned. - -## Apply - -The `$apply` operation applies a PlanDefinition to a given context. This implementation follows the [FHIR Specification](https://www.hl7.org/fhir/plandefinition.html#12.23.4.3) and supports the [FHIR Clinical Guidelines IG](http://hl7.org/fhir/uv/cpg/index.html). In addition, an R5 version of apply is made available for R4 instances. This will cause $apply to return a Bundle of resources instead of a CarePlan. This can be invoked with `$r5.apply`. - -### Example PlanDefinition - -Some example PlanDefinition workflows are available in the [opioid-cds-r4](https://github.com/cqframework/opioid-cds-r4) IG. Full Bundles with all the required supporting resources are available [here](https://github.com/cqframework/opioid-cds-r4/tree/1e543f781138f3d85404b7f65a92ff713519ef2c/bundles). You can download a Bundle and load it on your server as a transaction: - -```bash -POST http://your-server-base/fhir opioidcds-10-patient-view-bundle.json -``` - -These Bundles do not include example Patient clinical data. Applying a PlanDefinition can be invoked with: - -```bash -GET http://your-server-base/fhir/PlanDefinition/opioidcds-10-patient-view/$apply?subject=Patient/patientId&encounter=Encounter/encounterId&practitioner=Practitioner/practitionerId -``` - -### Additional Parameters - -The following additional parameters are supported for the `$apply` and `$r5.apply` operation: - -| Parameter | Type | Description | -|-----------|------------|-------------| -| organization | String | The organization in context | -| userType | String | The type of user initiating the request, e.g. patient, healthcare provider, or specific type of healthcare provider (physician, nurse, etc.) | -| userLanguage | String | Preferred language of the person using the system | -| userTaskContext | String | The task the system user is performing, e.g. laboratory results review, medication list review, etc. This information can be used to tailor decision support outputs, such as recommended information resources | -| setting | String | The current setting of the request (inpatient, outpatient, etc.) | -| settingContext | String | Additional detail about the setting of the request, if any | -| parameters | Parameters | Any input parameters defined in libraries referenced by the PlanDefinition. | -| data | Bundle | Data to be made available to the PlanDefinition evaluation. | -| dataEndpoint | Endpoint | An endpoint to use to access data referenced by retrieve operations in libraries referenced by the PlanDefinition. | -| contentEndpoint | Endpoint | An endpoint to use to access content (i.e. libraries) referenced by the PlanDefinition. | -| terminologyEndpoint | Endpoint | An endpoint to use to access terminology (i.e. valuesets, codesystems, and membership testing) referenced by the PlanDefinition. | - - -## Package - -The `package` operation for [PlanDefinition](https://www.hl7.org/fhir/plandefinition.html) will generate a Bundle of resources that includes the PlanDefinition as well as any related resources which can then be shared. This implementation follows the [CRMI IG](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/index.html) guidance for [packaging artifacts](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/packaging.html). diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/clinical_reasoning/questionnaires.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/clinical_reasoning/questionnaires.md index 64f4c9cbdc2..442322cb12e 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/clinical_reasoning/questionnaires.md +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/clinical_reasoning/questionnaires.md @@ -10,17 +10,96 @@ In addition to its use as a means for capturing data, Questionnaires can also be ## Operations -HAPI implements the following operations from the [Structured Data Capture IG](https://hl7.org/fhir/uv/sdc/index.html) -* [$populate](https://hl7.org/fhir/uv/sdc/OperationDefinition-Questionnaire-populate.html) -* [$extract](http://hl7.org/fhir/uv/sdc/OperationDefinition-QuestionnaireResponse-extract.html) +HAPI implements the following operations for Questionnaires and QuestionnaireResponses: -Support for additional operations is planned. +* [$questionnaire](/docs/clinical_reasoning/questionnaires.html#questionnaire) +* [$populate](/docs/clinical_reasoning/questionnaires.html#populate) +* [$extract](/docs/clinical_reasoning/questionnaires.html#extract) +* [$package](/docs/clinical_reasoning/questionnaires.html#package) + + +## Questionnaire + +The `StructureDefinition/$questionnaire` [operation]() generates a [Questionnaire](https://www.hl7.org/fhir/questionnaire.html) from a given [StructureDefinition](https://www.hl7.org/fhir/structuredefinition.html). A question will be created for each core element or extension element found in the StructureDefinition. + +### Parameters + +The following parameters are supported for the `StructureDefinition/$questionnaire` operation: + +| Parameter | Type | Description | +|-----------|------|-------------| +| profile | StructureDefinition | The StructureDefinition to base the Questionnaire on. Used when the operation is invoked at the 'type' level. | +| canonical | canonical | The canonical identifier for the StructureDefinition (optionally version-specific). | +| url | uri | Canonical URL of the StructureDefinition when invoked at the resource type level. This is exclusive with the profile and canonical parameters. | +| version | string | Version of the StructureDefinition when invoked at the resource type level. This is exclusive with the profile and canonical parameters. | +| supportedOnly | boolean | If true (default: false), the questionnaire will only include those elements marked as "mustSupport='true'" in the StructureDefinition. | +| requiredOnly | boolean | If true (default: false), the questionnaire will only include those elements marked as "min>0" in the StructureDefinition. | +| subject | string | The subject(s) that is/are the target of the Questionnaire. | +| parameters | Parameters | Any input parameters defined in libraries referenced by the StructureDefinition. | +| useServerData | boolean Whether to use data from the server performing the evaluation. | +| data | Bundle | Data to be made available during CQL evaluation. | +| dataEndpoint | Endpoint | An endpoint to use to access data referenced by retrieve operations in libraries referenced by the StructureDefinition. | +| contentEndpoint | Endpoint | An endpoint to use to access content (i.e. libraries) referenced by the StructureDefinition. | +| terminologyEndpoint | Endpoint | An endpoint to use to access terminology (i.e. valuesets, codesystems, and membership testing) referenced by the StructureDefinition. | ## Populate -The `populate` operation generates a [QuestionnaireResponse](https://www.hl7.org/fhir/questionnaireresponse.html) based on a specific [Questionnaire](https://www.hl7.org/fhir/questionnaire.html), filling in answers to questions where possible based on information provided as part of the operation or already known by the server about the subject of the Questionnaire. +The `Questionnaire/$populate` [operation](https://hl7.org/fhir/uv/sdc/OperationDefinition-Questionnaire-populate.html) generates a [QuestionnaireResponse](https://www.hl7.org/fhir/questionnaireresponse.html) based on a specific [Questionnaire](https://www.hl7.org/fhir/questionnaire.html), filling in answers to questions where possible based on information provided as part of the operation or already known by the server about the subject of the Questionnaire. -### Example Questionnaire +### Parameters + +The following parameters are supported for the `Questionnaire/$populate` operation: + +| Parameter | Type | Description | +|-----------|------|-------------| +| questionnaire | Questionnaire | The Questionnaire to populate. Used when the operation is invoked at the 'type' level. | +| canonical | canonical | The canonical identifier for the Questionnaire (optionally version-specific). | +| url | uri | Canonical URL of the Questionnaire when invoked at the resource type level. This is exclusive with the questionnaire and canonical parameters. | +| version | string | Version of the Questionnaire when invoked at the resource type level. This is exclusive with the questionnaire and canonical parameters. | +| subject | string | The subject(s) that is/are the target of the Questionnaire. | +| parameters | Parameters | Any input parameters defined in libraries referenced by the Questionnaire. | +| useServerData | boolean | Whether to use data from the server performing the evaluation. | +| data | Bundle | Data to be made available during CQL evaluation. | +| dataEndpoint | Endpoint | An endpoint to use to access data referenced by retrieve operations in libraries referenced by the Questionnaire. | +| contentEndpoint | Endpoint | An endpoint to use to access content (i.e. libraries) referenced by the Questionnaire. | +| terminologyEndpoint | Endpoint | An endpoint to use to access terminology (i.e. valuesets, codesystems, and membership testing) referenced by the Questionnaire. | + + +## Extract + +The `QuestionnaireResponse/$extract` [operation](http://hl7.org/fhir/uv/sdc/OperationDefinition-QuestionnaireResponse-extract.html) takes a completed [QuestionnaireResponse](https://www.hl7.org/fhir/questionnaireresponse.html) and converts it to a Bundle of resources by using metadata embedded in the [Questionnaire](https://www.hl7.org/fhir/questionnaire.html) the QuestionnaireResponse is based on. The extracted resources might include Observations, MedicationStatements and other standard FHIR resources which can then be shared and manipulated. When invoking the $extract operation, care should be taken that the submitted QuestionnaireResponse is itself valid. If not, the extract operation could fail (with appropriate OperationOutcomes) or, more problematic, might succeed but provide incorrect output. + +This implementation allows for both [Observation based](https://hl7.org/fhir/uv/sdc/extraction.html#observation-based-extraction) and [Definition based](https://hl7.org/fhir/uv/sdc/extraction.html#definition-based-extraction) extraction. + +### Parameters + +The following parameters are supported for the `QuestionnaireResponse/$extract` operation: + +| Parameter | Type | Description | +|-----------|------|-------------| +| questionnaire-response | QuestionnaireResponse | The QuestionnaireResponse to extract data from. Used when the operation is invoked at the 'type' level. | +| parameters | Parameters | Any input parameters defined in libraries referenced by the Questionnaire. | +| data | Bundle | Data to be made available during CQL evaluation. | + + +## Package + +The `Questionnaire/$package` [operation](https://build.fhir.org/ig/HL7/crmi-ig/OperationDefinition-crmi-package.html) for [Questionnaire](https://www.hl7.org/fhir/questionnaire.html) will generate a Bundle of resources that includes the Questionnaire as well as any related Library or ValueSet resources which can then be shared. This implementation follows the [CRMI IG](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/index.html) guidance for [packaging artifacts](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/packaging.html). + +### Parameters + +The following parameters are supported for the `Questionnaire/$package` operation: + +| Parameter | Type | Description | +|-----------|-----------|-------------| +| id | string | The logical id of an existing Resource to package on the server. | +| canonical | canonical | A canonical url (optionally version specific) of a Resource to package on the server. | +| url | uri | A canonical or artifact reference to a Resource to package on the server. This is exclusive with the canonical parameter. | +| version | string | The version of the Resource. This is exclusive with the canonical parameter. | +| usePut | boolean | Determines the type of method returned in the Bundle Entries: POST if False (the default), PUT if True. | + + +## Example Questionnaire ```json { @@ -219,7 +298,7 @@ The `populate` operation generates a [QuestionnaireResponse](https://www.hl7.org } ``` -### Example QuestionnaireResponse +## Example QuestionnaireResponse ```json { @@ -486,14 +565,3 @@ The `populate` operation generates a [QuestionnaireResponse](https://www.hl7.org ] } ``` - -## Extract - -The `extract` operation takes a completed [QuestionnaireResponse](https://www.hl7.org/fhir/questionnaireresponse.html) and converts it to a Bundle of resources by using metadata embedded in the [Questionnaire](https://www.hl7.org/fhir/questionnaire.html) the QuestionnaireResponse is based on. The extracted resources might include Observations, MedicationStatements and other standard FHIR resources which can then be shared and manipulated. When invoking the $extract operation, care should be taken that the submitted QuestionnaireResponse is itself valid. If not, the extract operation could fail (with appropriate OperationOutcomes) or, more problematic, might succeed but provide incorrect output. - -This implementation allows for both [Observation based](https://hl7.org/fhir/uv/sdc/extraction.html#observation-based-extraction) and [Definition based](https://hl7.org/fhir/uv/sdc/extraction.html#definition-based-extraction) extraction. - - -## Package - -The `package` operation for [Questionnaire](https://www.hl7.org/fhir/questionnaire.html) will generate a Bundle of resources that includes the Questionnaire as well as any related Library or ValueSet resources which can then be shared. This implementation follows the [CRMI IG](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/index.html) guidance for [packaging artifacts](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/packaging.html). diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/images/cds_hooks.svg b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/images/cds_hooks.svg new file mode 100644 index 00000000000..dcbd0f9868b --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/images/cds_hooks.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/hapi-fhir-jacoco/pom.xml b/hapi-fhir-jacoco/pom.xml index f36ac240c1a..a5d73a949a7 100644 --- a/hapi-fhir-jacoco/pom.xml +++ b/hapi-fhir-jacoco/pom.xml @@ -11,7 +11,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jaxrsserver-base/pom.xml b/hapi-fhir-jaxrsserver-base/pom.xml index 62109e4badc..c992c31b9bb 100644 --- a/hapi-fhir-jaxrsserver-base/pom.xml +++ b/hapi-fhir-jaxrsserver-base/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpa/pom.xml b/hapi-fhir-jpa/pom.xml index ccbad3dc773..dc0e37591be 100644 --- a/hapi-fhir-jpa/pom.xml +++ b/hapi-fhir-jpa/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/BaseSqlLoggerFilterImpl.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/BaseSqlLoggerFilterImpl.java index 9aa59627fc1..98d7d6b3e53 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/BaseSqlLoggerFilterImpl.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/BaseSqlLoggerFilterImpl.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR JPA Model + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.fhir.jpa.logging; import com.google.common.annotations.VisibleForTesting; diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/FilteringSqlLoggerImplContributor.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/FilteringSqlLoggerImplContributor.java index 5f3ec6b5e67..1a5af05263f 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/FilteringSqlLoggerImplContributor.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/FilteringSqlLoggerImplContributor.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR JPA Model + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.fhir.jpa.logging; import org.hibernate.boot.registry.StandardServiceRegistryBuilder; diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/ISqlLoggerFilter.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/ISqlLoggerFilter.java index 13bbe53f05a..bbef364ef3d 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/ISqlLoggerFilter.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/ISqlLoggerFilter.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR JPA Model + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.fhir.jpa.logging; /** diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/SqlLoggerFilteringUtil.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/SqlLoggerFilteringUtil.java index ab07a97522f..0cbd8e34619 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/SqlLoggerFilteringUtil.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/SqlLoggerFilteringUtil.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR JPA Model + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.fhir.jpa.logging; import ca.uhn.fhir.i18n.Msg; diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/SqlLoggerFragmentFilter.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/SqlLoggerFragmentFilter.java index f37670e8b00..71a38955e38 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/SqlLoggerFragmentFilter.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/SqlLoggerFragmentFilter.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR JPA Model + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.fhir.jpa.logging; /** diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/SqlLoggerStackTraceFilter.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/SqlLoggerStackTraceFilter.java index 20c62da5e07..d2734ee03c0 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/SqlLoggerStackTraceFilter.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/SqlLoggerStackTraceFilter.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR JPA Model + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.fhir.jpa.logging; import ca.uhn.fhir.i18n.Msg; diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/SqlLoggerStartsWithFilter.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/SqlLoggerStartsWithFilter.java index 44f4f9921aa..17fc5faec91 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/SqlLoggerStartsWithFilter.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/SqlLoggerStartsWithFilter.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR JPA Model + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.fhir.jpa.logging; /** diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/SqlStatementFilteringLogger.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/SqlStatementFilteringLogger.java index 4c5c41587da..dae7888bb35 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/SqlStatementFilteringLogger.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/logging/SqlStatementFilteringLogger.java @@ -1,3 +1,22 @@ +/*- + * #%L + * HAPI FHIR JPA Model + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ package ca.uhn.fhir.jpa.logging; import org.hibernate.engine.jdbc.spi.SqlStatementLogger; diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml index 8e21a98cf58..9e57f2dedae 100644 --- a/hapi-fhir-jpaserver-base/pom.xml +++ b/hapi-fhir-jpaserver-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/HapiFhirHibernateJpaDialect.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/HapiFhirHibernateJpaDialect.java index 6e820303187..b76f2113af0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/HapiFhirHibernateJpaDialect.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/HapiFhirHibernateJpaDialect.java @@ -74,7 +74,7 @@ public class HapiFhirHibernateJpaDialect extends HibernateJpaDialect { } if (HapiSystemProperties.isUnitTestModeEnabled()) { - ourLog.error("Hibernate exception", theException); + ourLog.error("Unit test mode: Hibernate exception", theException); } if (theException instanceof ConstraintViolationException) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilder.java index 21f40b20ee5..a44d3bbf0a7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilder.java @@ -167,6 +167,14 @@ public class HistoryBuilder { Optional forcedId = pidToForcedId.get(JpaPid.fromId(nextResourceId)); if (forcedId.isPresent()) { resourceId = forcedId.get(); + // IdHelperService returns a forcedId with the '/' prefix + // but the transientForcedId is expected to be just the idPart (without the / prefix). + // For that reason, strip the prefix before setting the transientForcedId below. + // If not stripped this messes up the id of the resource as the resourceType would be repeated + // twice like Patient/Patient/1234 in the resource constructed + if (resourceId.startsWith(myResourceType + "/")) { + resourceId = resourceId.substring(myResourceType.length() + 1); + } } else { resourceId = nextResourceId.toString(); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBatch2JobInstanceRepository.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBatch2JobInstanceRepository.java index 4a078473395..023fd93af64 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBatch2JobInstanceRepository.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBatch2JobInstanceRepository.java @@ -55,14 +55,15 @@ public interface IBatch2JobInstanceRepository int updateWorkChunksPurgedTrue(@Param("id") String theInstanceId); @Query( - "SELECT b from Batch2JobInstanceEntity b WHERE b.myDefinitionId = :defId AND b.myParamsJson = :params AND b.myStatus IN( :stats )") + "SELECT b from Batch2JobInstanceEntity b WHERE b.myDefinitionId = :defId AND (b.myParamsJson = :params OR b.myParamsJsonVc = :params) AND b.myStatus IN( :stats )") List findInstancesByJobIdParamsAndStatus( @Param("defId") String theDefinitionId, @Param("params") String theParams, @Param("stats") Set theStatus, Pageable thePageable); - @Query("SELECT b from Batch2JobInstanceEntity b WHERE b.myDefinitionId = :defId AND b.myParamsJson = :params") + @Query( + "SELECT b from Batch2JobInstanceEntity b WHERE b.myDefinitionId = :defId AND (b.myParamsJson = :params OR b.myParamsJsonVc = :params)") List findInstancesByJobIdAndParams( @Param("defId") String theDefinitionId, @Param("params") String theParams, Pageable thePageable); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBatch2WorkChunkRepository.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBatch2WorkChunkRepository.java index 97be8b31f70..2273fb3aa05 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBatch2WorkChunkRepository.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBatch2WorkChunkRepository.java @@ -65,7 +65,7 @@ public interface IBatch2WorkChunkRepository @Modifying @Query("UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myEndTime = :et, " - + "e.myRecordsProcessed = :rp, e.myErrorCount = e.myErrorCount + :errorRetries, e.mySerializedData = null, " + + "e.myRecordsProcessed = :rp, e.myErrorCount = e.myErrorCount + :errorRetries, e.mySerializedData = null, e.mySerializedDataVc = null, " + "e.myWarningMessage = :warningMessage WHERE e.myId = :id") void updateChunkStatusAndClearDataForEndSuccess( @Param("id") String theChunkId, @@ -77,7 +77,7 @@ public interface IBatch2WorkChunkRepository @Modifying @Query( - "UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myEndTime = :et, e.mySerializedData = null, e.myErrorMessage = :em WHERE e.myId IN(:ids)") + "UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myEndTime = :et, e.mySerializedData = null, e.mySerializedDataVc = null, e.myErrorMessage = :em WHERE e.myId IN(:ids)") void updateAllChunksForInstanceStatusClearDataAndSetError( @Param("ids") List theChunkIds, @Param("et") Date theEndTime, diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Batch2JobInstanceEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Batch2JobInstanceEntity.java index 65ddadcc796..bcf0a6cc0d9 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Batch2JobInstanceEntity.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Batch2JobInstanceEntity.java @@ -36,6 +36,7 @@ import jakarta.persistence.TemporalType; import jakarta.persistence.Version; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; +import org.hibernate.Length; import java.io.Serializable; import java.util.Date; @@ -95,13 +96,17 @@ public class Batch2JobInstanceEntity implements Serializable { @Column(name = "FAST_TRACKING", nullable = true) private Boolean myFastTracking; + // TODO: VC column added in 7.2.0 - Remove non-VC column later @Column(name = "PARAMS_JSON", length = PARAMS_JSON_MAX_LENGTH, nullable = true) private String myParamsJson; - @Lob + @Lob // TODO: VC column added in 7.2.0 - Remove non-VC column later @Column(name = "PARAMS_JSON_LOB", nullable = true) private String myParamsJsonLob; + @Column(name = "PARAMS_JSON_VC", nullable = true, length = Length.LONG32) + private String myParamsJsonVc; + @Column(name = "CMB_RECS_PROCESSED", nullable = true) private Integer myCombinedRecordsProcessed; @@ -142,11 +147,14 @@ public class Batch2JobInstanceEntity implements Serializable { * Any output from the job can be held in this column * Even serialized json */ - @Lob + @Lob // TODO: VC column added in 7.2.0 - Remove non-VC column later @Basic(fetch = FetchType.LAZY) @Column(name = "REPORT", nullable = true, length = Integer.MAX_VALUE - 1) private String myReport; + @Column(name = "REPORT_VC", nullable = true, length = Length.LONG32) + private String myReportVc; + public String getCurrentGatedStepId() { return myCurrentGatedStepId; } @@ -260,6 +268,9 @@ public class Batch2JobInstanceEntity implements Serializable { } public String getParams() { + if (myParamsJsonVc != null) { + return myParamsJsonVc; + } if (myParamsJsonLob != null) { return myParamsJsonLob; } @@ -267,13 +278,9 @@ public class Batch2JobInstanceEntity implements Serializable { } public void setParams(String theParams) { + myParamsJsonVc = theParams; myParamsJsonLob = null; myParamsJson = null; - if (theParams != null && theParams.length() > PARAMS_JSON_MAX_LENGTH) { - myParamsJsonLob = theParams; - } else { - myParamsJson = theParams; - } } public boolean getWorkChunksPurged() { @@ -309,11 +316,12 @@ public class Batch2JobInstanceEntity implements Serializable { } public String getReport() { - return myReport; + return myReportVc != null ? myReportVc : myReport; } public void setReport(String theReport) { - myReport = theReport; + myReportVc = theReport; + myReport = null; } public String getWarningMessages() { @@ -362,7 +370,7 @@ public class Batch2JobInstanceEntity implements Serializable { .append("progress", myProgress) .append("errorMessage", myErrorMessage) .append("estimatedTimeRemaining", myEstimatedTimeRemaining) - .append("report", myReport) + .append("report", getReport()) .append("warningMessages", myWarningMessages) .append("initiatingUsername", myTriggeringUsername) .append("initiatingclientId", myTriggeringClientId) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Batch2WorkChunkEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Batch2WorkChunkEntity.java index c7bcb688cf4..126f839f843 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Batch2WorkChunkEntity.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Batch2WorkChunkEntity.java @@ -39,6 +39,7 @@ import jakarta.persistence.TemporalType; import jakarta.persistence.Version; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; +import org.hibernate.Length; import java.io.Serializable; import java.util.Date; @@ -97,11 +98,14 @@ public class Batch2WorkChunkEntity implements Serializable { @Column(name = "TGT_STEP_ID", length = ID_MAX_LENGTH, nullable = false) private String myTargetStepId; - @Lob + @Lob // TODO: VC column added in 7.2.0 - Remove non-VC column later @Basic(fetch = FetchType.LAZY) @Column(name = "CHUNK_DATA", nullable = true, length = Integer.MAX_VALUE - 1) private String mySerializedData; + @Column(name = "CHUNK_DATA_VC", nullable = true, length = Length.LONG32) + private String mySerializedDataVc; + @Column(name = "STAT", length = STATUS_MAX_LENGTH, nullable = false) @Enumerated(EnumType.STRING) private WorkChunkStatusEnum myStatus; @@ -290,11 +294,12 @@ public class Batch2WorkChunkEntity implements Serializable { } public String getSerializedData() { - return mySerializedData; + return mySerializedDataVc != null ? mySerializedDataVc : mySerializedData; } public void setSerializedData(String theSerializedData) { - mySerializedData = theSerializedData; + mySerializedData = null; + mySerializedDataVc = theSerializedData; } public WorkChunkStatusEnum getStatus() { @@ -336,7 +341,7 @@ public class Batch2WorkChunkEntity implements Serializable { .append("updateTime", myUpdateTime) .append("recordsProcessed", myRecordsProcessed) .append("targetStepId", myTargetStepId) - .append("serializedData", mySerializedData) + .append("serializedData", getSerializedData()) .append("status", myStatus) .append("errorMessage", myErrorMessage) .append("warningMessage", myWarningMessage) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobFileEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobFileEntity.java index 7f448fa2a62..6c2c61c42b6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobFileEntity.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobFileEntity.java @@ -32,6 +32,7 @@ import jakarta.persistence.Lob; import jakarta.persistence.ManyToOne; import jakarta.persistence.SequenceGenerator; import jakarta.persistence.Table; +import org.hibernate.Length; import java.io.Serializable; import java.nio.charset.StandardCharsets; @@ -66,10 +67,13 @@ public class BulkImportJobFileEntity implements Serializable { @Column(name = "FILE_DESCRIPTION", nullable = true, length = MAX_DESCRIPTION_LENGTH) private String myFileDescription; - @Lob - @Column(name = "JOB_CONTENTS", nullable = false) + @Lob // TODO: VC column added in 7.2.0 - Remove non-VC column later + @Column(name = "JOB_CONTENTS", nullable = true) private byte[] myContents; + @Column(name = "JOB_CONTENTS_VC", nullable = true, length = Length.LONG32) + private String myContentsVc; + @Column(name = "TENANT_NAME", nullable = true, length = PartitionEntity.MAX_NAME_LENGTH) private String myTenantName; @@ -98,11 +102,16 @@ public class BulkImportJobFileEntity implements Serializable { } public String getContents() { - return new String(myContents, StandardCharsets.UTF_8); + if (myContentsVc != null) { + return myContentsVc; + } else { + return new String(myContents, StandardCharsets.UTF_8); + } } public void setContents(String theContents) { - myContents = theContents.getBytes(StandardCharsets.UTF_8); + myContentsVc = theContents; + myContents = null; } public BulkImportJobFileJson toJson() { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Search.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Search.java index 0cfb437cfea..4237c56a14f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Search.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Search.java @@ -50,6 +50,7 @@ import jakarta.persistence.UniqueConstraint; import jakarta.persistence.Version; import org.apache.commons.lang3.SerializationUtils; import org.apache.commons.lang3.builder.ToStringBuilder; +import org.hibernate.Length; import org.hibernate.annotations.JdbcTypeCode; import org.hibernate.annotations.OptimisticLock; import org.hibernate.type.SqlTypes; @@ -141,14 +142,21 @@ public class Search implements ICachedSearchDetails, Serializable { @Column(name = "RESOURCE_TYPE", length = 200, nullable = true) private String myResourceType; + /** * Note that this field may have the request partition IDs prepended to it */ - @Lob() + @Lob // TODO: VC column added in 7.2.0 - Remove non-VC column later @Basic(fetch = FetchType.LAZY) @Column(name = "SEARCH_QUERY_STRING", nullable = true, updatable = false, length = MAX_SEARCH_QUERY_STRING) private String mySearchQueryString; + /** + * Note that this field may have the request partition IDs prepended to it + */ + @Column(name = "SEARCH_QUERY_STRING_VC", nullable = true, length = Length.LONG32) + private String mySearchQueryStringVc; + @Column(name = "SEARCH_QUERY_STRING_HASH", nullable = true, updatable = false) private Integer mySearchQueryStringHash; @@ -172,10 +180,13 @@ public class Search implements ICachedSearchDetails, Serializable { @Column(name = "OPTLOCK_VERSION", nullable = true) private Integer myVersion; - @Lob + @Lob // TODO: VC column added in 7.2.0 - Remove non-VC column later @Column(name = "SEARCH_PARAM_MAP", nullable = true) private byte[] mySearchParameterMap; + @Column(name = "SEARCH_PARAM_MAP_BIN", nullable = true, length = Length.LONG32) + private byte[] mySearchParameterMapBin; + @Transient private transient SearchParameterMap mySearchParameterMapTransient; @@ -350,7 +361,7 @@ public class Search implements ICachedSearchDetails, Serializable { * Note that this field may have the request partition IDs prepended to it */ public String getSearchQueryString() { - return mySearchQueryString; + return mySearchQueryStringVc != null ? mySearchQueryStringVc : mySearchQueryString; } public void setSearchQueryString(String theSearchQueryString, RequestPartitionId theRequestPartitionId) { @@ -362,12 +373,13 @@ public class Search implements ICachedSearchDetails, Serializable { // We want this field to always have a wide distribution of values in order // to avoid optimizers avoiding using it if it has lots of nulls, so in the // case of null, just put a value that will never be hit - mySearchQueryString = UUID.randomUUID().toString(); + mySearchQueryStringVc = UUID.randomUUID().toString(); } else { - mySearchQueryString = searchQueryString; + mySearchQueryStringVc = searchQueryString; } - mySearchQueryStringHash = mySearchQueryString.hashCode(); + mySearchQueryString = null; + mySearchQueryStringHash = mySearchQueryStringVc.hashCode(); } public SearchTypeEnum getSearchType() { @@ -466,8 +478,12 @@ public class Search implements ICachedSearchDetails, Serializable { return Optional.of(mySearchParameterMapTransient); } SearchParameterMap searchParameterMap = null; - if (mySearchParameterMap != null) { - searchParameterMap = SerializationUtils.deserialize(mySearchParameterMap); + byte[] searchParameterMapSerialized = mySearchParameterMapBin; + if (searchParameterMapSerialized == null) { + searchParameterMapSerialized = mySearchParameterMap; + } + if (searchParameterMapSerialized != null) { + searchParameterMap = SerializationUtils.deserialize(searchParameterMapSerialized); mySearchParameterMapTransient = searchParameterMap; } return Optional.ofNullable(searchParameterMap); @@ -475,7 +491,8 @@ public class Search implements ICachedSearchDetails, Serializable { public void setSearchParameterMap(SearchParameterMap theSearchParameterMap) { mySearchParameterMapTransient = theSearchParameterMap; - mySearchParameterMap = SerializationUtils.serialize(theSearchParameterMap); + mySearchParameterMapBin = SerializationUtils.serialize(theSearchParameterMap); + mySearchParameterMap = null; } @Override diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java index 69f491e6d30..fd0ef2bcf8c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java @@ -117,6 +117,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { init640_after_20230126(); init660(); init680(); + init680_Part2(); init700(); } @@ -226,6 +227,44 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { .withColumns("INSTANCE_ID", "TGT_STEP_ID", "STAT", "SEQ", "ID"); } + private void init680_Part2() { + Builder version = forVersion(VersionEnum.V6_8_0); + + // Add additional LOB migration columns + version.onTable("BT2_JOB_INSTANCE") + .addColumn("20240227.1", "REPORT_VC") + .nullable() + .type(ColumnTypeEnum.TEXT); + version.onTable("BT2_JOB_INSTANCE") + .addColumn("20240227.2", "PARAMS_JSON_VC") + .nullable() + .type(ColumnTypeEnum.TEXT); + + version.onTable("BT2_WORK_CHUNK") + .addColumn("20240227.3", "CHUNK_DATA_VC") + .nullable() + .type(ColumnTypeEnum.TEXT); + + version.onTable("HFJ_SEARCH") + .addColumn("20240227.4", "SEARCH_QUERY_STRING_VC") + .nullable() + .type(ColumnTypeEnum.TEXT); + version.onTable("HFJ_SEARCH") + .addColumn("20240227.5", "SEARCH_PARAM_MAP_BIN") + .nullable() + .type(ColumnTypeEnum.BINARY); + + version.onTable("HFJ_BLK_IMPORT_JOBFILE") + .addColumn("20240227.6", "JOB_CONTENTS_VC") + .nullable() + .type(ColumnTypeEnum.TEXT); + + version.onTable("HFJ_BLK_IMPORT_JOBFILE") + .modifyColumn("20240227.7", "JOB_CONTENTS") + .nullable() + .withType(ColumnTypeEnum.BLOB); + } + protected void init680() { Builder version = forVersion(VersionEnum.V6_8_0); // HAPI-FHIR #4801 - Add New Index On HFJ_RESOURCE diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java index 0874b2b41b5..dc34125641f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java @@ -843,6 +843,11 @@ public class SearchBuilder implements ISearchBuilder { RuntimeSearchParam param = null; + if (param == null) { + // do we have a composition param defined for the whole chain? + param = mySearchParamRegistry.getActiveSearchParam(myResourceName, theSort.getParamName()); + } + /* * If we have a sort like _sort=subject.name and we have an * uplifted refchain for that combination we can do it more efficiently @@ -851,7 +856,7 @@ public class SearchBuilder implements ISearchBuilder { * to "name" in this example) so that we know what datatype it is. */ String paramName = theSort.getParamName(); - if (myStorageSettings.isIndexOnUpliftedRefchains()) { + if (param == null && myStorageSettings.isIndexOnUpliftedRefchains()) { String[] chains = StringUtils.split(paramName, '.'); if (chains.length == 2) { diff --git a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml index ef81aae7867..e969a0be649 100644 --- a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml +++ b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-hfql/pom.xml b/hapi-fhir-jpaserver-hfql/pom.xml index 6cc3592d079..6de7d563dd7 100644 --- a/hapi-fhir-jpaserver-hfql/pom.xml +++ b/hapi-fhir-jpaserver-hfql/pom.xml @@ -3,7 +3,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-ips/pom.xml b/hapi-fhir-jpaserver-ips/pom.xml index 128345bf2e6..dbb537202f7 100644 --- a/hapi-fhir-jpaserver-ips/pom.xml +++ b/hapi-fhir-jpaserver-ips/pom.xml @@ -3,7 +3,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-mdm/pom.xml b/hapi-fhir-jpaserver-mdm/pom.xml index 7e9d8ac818b..6441a09562d 100644 --- a/hapi-fhir-jpaserver-mdm/pom.xml +++ b/hapi-fhir-jpaserver-mdm/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-model/pom.xml b/hapi-fhir-jpaserver-model/pom.xml index 7e1922e2ac6..b44506f53ab 100644 --- a/hapi-fhir-jpaserver-model/pom.xml +++ b/hapi-fhir-jpaserver-model/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-searchparam/pom.xml b/hapi-fhir-jpaserver-searchparam/pom.xml index 9bfc11cd691..751edb8b63f 100755 --- a/hapi-fhir-jpaserver-searchparam/pom.xml +++ b/hapi-fhir-jpaserver-searchparam/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-subscription/pom.xml b/hapi-fhir-jpaserver-subscription/pom.xml index 74386ca62b1..4e05e2edf03 100644 --- a/hapi-fhir-jpaserver-subscription/pom.xml +++ b/hapi-fhir-jpaserver-subscription/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/websocket/SubscriptionWebsocketHandler.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/websocket/SubscriptionWebsocketHandler.java index 8ae139f786d..519d0cd06c4 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/websocket/SubscriptionWebsocketHandler.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/websocket/SubscriptionWebsocketHandler.java @@ -205,6 +205,9 @@ public class SubscriptionWebsocketHandler extends TextWebSocketHandler implement * @return The payload */ private Optional getPayloadByContent(ResourceDeliveryMessage msg) { + if (msg.getSubscription().getContent() == null) { + return Optional.empty(); + } switch (msg.getSubscription().getContent()) { case IDONLY: return Optional.of(msg.getPayloadId()); diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicConfig.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicConfig.java index 8f3964dca0d..4b571c7bd33 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicConfig.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicConfig.java @@ -25,8 +25,10 @@ import ca.uhn.fhir.jpa.searchparam.matcher.SearchParamMatcher; import ca.uhn.fhir.jpa.subscription.match.matcher.matching.SubscriptionStrategyEvaluator; import ca.uhn.fhir.jpa.subscription.submit.interceptor.SubscriptionQueryValidator; import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Lazy; +@Configuration public class SubscriptionTopicConfig { @Bean SubscriptionTopicMatchingSubscriber subscriptionTopicMatchingSubscriber(FhirContext theFhirContext) { diff --git a/hapi-fhir-jpaserver-test-dstu2/pom.xml b/hapi-fhir-jpaserver-test-dstu2/pom.xml index 62466aa6bc2..d51a8940bc0 100644 --- a/hapi-fhir-jpaserver-test-dstu2/pom.xml +++ b/hapi-fhir-jpaserver-test-dstu2/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-dstu3/pom.xml b/hapi-fhir-jpaserver-test-dstu3/pom.xml index 4a1b02a3fa5..7fa9c4fbb06 100644 --- a/hapi-fhir-jpaserver-test-dstu3/pom.xml +++ b/hapi-fhir-jpaserver-test-dstu3/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r4/pom.xml b/hapi-fhir-jpaserver-test-r4/pom.xml index e25cde6dbc9..8bec81531a4 100644 --- a/hapi-fhir-jpaserver-test-r4/pom.xml +++ b/hapi-fhir-jpaserver-test-r4/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportTest.java index dd5a48fea49..1df9bd5e48e 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportTest.java @@ -1,6 +1,7 @@ package ca.uhn.fhir.jpa.bulk; import ca.uhn.fhir.batch2.api.IJobCoordinator; +import ca.uhn.fhir.batch2.api.IJobPersistence; import ca.uhn.fhir.batch2.model.JobInstance; import ca.uhn.fhir.batch2.model.JobInstanceStartRequest; import ca.uhn.fhir.batch2.model.StatusEnum; @@ -10,6 +11,9 @@ import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.model.BulkExportJobResults; import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse; +import ca.uhn.fhir.jpa.batch2.JpaJobPersistenceImpl; +import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository; +import ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test; import ca.uhn.fhir.rest.api.Constants; @@ -21,11 +25,13 @@ import ca.uhn.fhir.rest.client.apache.ResourceEntity; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.provider.ProviderConstants; import ca.uhn.fhir.test.utilities.HttpClientExtension; +import ca.uhn.fhir.test.utilities.ProxyUtil; import ca.uhn.fhir.util.Batch2JobDefinitionConstants; import ca.uhn.fhir.util.JsonUtil; import com.google.common.collect.Sets; import jakarta.annotation.Nonnull; import org.apache.commons.io.LineIterator; +import org.apache.commons.lang3.StringUtils; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; @@ -66,6 +72,7 @@ import org.mockito.Spy; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.PageRequest; import java.io.IOException; import java.io.StringReader; @@ -80,6 +87,8 @@ import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.stream.Stream; +import static ca.uhn.fhir.batch2.jobs.export.BulkExportAppCtx.CREATE_REPORT_STEP; +import static ca.uhn.fhir.batch2.jobs.export.BulkExportAppCtx.WRITE_TO_BINARIES; import static ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoR4TagsInlineTest.createSearchParameterForInlineSecurity; import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.awaitility.Awaitility.await; @@ -100,17 +109,25 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test { @Autowired private IJobCoordinator myJobCoordinator; + @Autowired + private IBatch2WorkChunkRepository myWorkChunkRepository; + @Autowired + private IJobPersistence myJobPersistence; + private JpaJobPersistenceImpl myJobPersistenceImpl; @AfterEach void afterEach() { myStorageSettings.setIndexMissingFields(JpaStorageSettings.IndexEnabledEnum.DISABLED); - myStorageSettings.setTagStorageMode(new JpaStorageSettings().getTagStorageMode()); - myStorageSettings.setResourceClientIdStrategy(new JpaStorageSettings().getResourceClientIdStrategy()); + JpaStorageSettings defaults = new JpaStorageSettings(); + myStorageSettings.setTagStorageMode(defaults.getTagStorageMode()); + myStorageSettings.setResourceClientIdStrategy(defaults.getResourceClientIdStrategy()); + myStorageSettings.setBulkExportFileMaximumSize(defaults.getBulkExportFileMaximumSize()); } @BeforeEach public void beforeEach() { myStorageSettings.setJobFastTrackingEnabled(false); + myJobPersistenceImpl = ProxyUtil.getSingletonTarget(myJobPersistence, JpaJobPersistenceImpl.class); } @Spy diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/export/ExpandResourcesAndWriteBinaryStepJpaTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/export/ExpandResourcesAndWriteBinaryStepJpaTest.java new file mode 100644 index 00000000000..3df3cc65040 --- /dev/null +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/export/ExpandResourcesAndWriteBinaryStepJpaTest.java @@ -0,0 +1,177 @@ +package ca.uhn.fhir.jpa.bulk.export; + +import ca.uhn.fhir.batch2.api.IJobDataSink; +import ca.uhn.fhir.batch2.api.StepExecutionDetails; +import ca.uhn.fhir.batch2.jobs.export.ExpandResourceAndWriteBinaryStep; +import ca.uhn.fhir.batch2.jobs.export.ExpandResourcesStep; +import ca.uhn.fhir.batch2.jobs.export.models.BulkExportBinaryFileId; +import ca.uhn.fhir.batch2.jobs.export.models.ExpandedResourcesList; +import ca.uhn.fhir.batch2.jobs.export.models.ResourceIdList; +import ca.uhn.fhir.batch2.jobs.models.BatchResourceId; +import ca.uhn.fhir.batch2.model.JobInstance; +import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; +import ca.uhn.fhir.jpa.test.BaseJpaR4Test; +import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters; +import jakarta.persistence.Id; +import org.apache.commons.lang3.StringUtils; +import org.hl7.fhir.r4.model.Binary; +import org.hl7.fhir.r4.model.IdType; +import org.hl7.fhir.r4.model.Patient; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; +import org.mockito.ArgumentCaptor; +import org.mockito.Captor; +import org.mockito.Mock; +import org.springframework.beans.factory.annotation.Autowired; + +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.atLeast; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +public class ExpandResourcesAndWriteBinaryStepJpaTest extends BaseJpaR4Test { + + @Autowired + private ExpandResourceAndWriteBinaryStep myExpandResourcesStep; + + @Mock + private IJobDataSink mySink; + @Captor + private ArgumentCaptor myWorkChunkCaptor; + + @Override + public void afterCleanupDao() { + super.afterCleanupDao(); + + JpaStorageSettings defaults = new JpaStorageSettings(); + myStorageSettings.setBulkExportFileMaximumSize(defaults.getBulkExportFileMaximumSize()); + } + + @Test + public void testMaximumChunkSize() { + /* + * We're going to set the maximum file size to 3000, and create some resources with + * a name that is 1000 chars long. With the other boilerplate text in a resource that + * will put the resource length at just over 1000 chars, meaning that any given + * chunk or file should have only 2 resources in it. + */ + int testResourceSize = 1000; + int maxFileSize = 3 * testResourceSize; + myStorageSettings.setBulkExportFileMaximumSize(maxFileSize); + + List expectedIds = new ArrayList<>(); + for (int i = 0; i < 10; i++) { + Patient p = new Patient(); + p.addName().setFamily(StringUtils.leftPad("", testResourceSize, 'A')); + String id = myPatientDao.create(p, mySrd).getId().getIdPart(); + expectedIds.add(new BatchResourceId().setResourceType("Patient").setId(id)); + } + Collections.sort(expectedIds); + + ResourceIdList resourceList = new ResourceIdList(); + resourceList.setResourceType("Patient"); + resourceList.setIds(expectedIds); + + BulkExportJobParameters params = new BulkExportJobParameters(); + JobInstance jobInstance = new JobInstance(); + String chunkId = "ABC"; + + StepExecutionDetails details = new StepExecutionDetails<>(params, resourceList, jobInstance, chunkId); + + // Test + + myExpandResourcesStep.run(details, mySink); + + // Verify + verify(mySink, atLeast(1)).accept(myWorkChunkCaptor.capture()); + List actualResourceIdList = new ArrayList<>(); + for (BulkExportBinaryFileId next : myWorkChunkCaptor.getAllValues()) { + + Binary nextBinary = myBinaryDao.read(new IdType(next.getBinaryId()), mySrd); + String nextNdJsonString = new String(nextBinary.getContent(), StandardCharsets.UTF_8); + + // This is the most important check here + assertThat(nextNdJsonString.length(), lessThanOrEqualTo(maxFileSize)); + + Arrays.stream(nextNdJsonString.split("\\n")) + .filter(StringUtils::isNotBlank) + .map(t->myFhirContext.newJsonParser().parseResource(t)) + .map(t->new BatchResourceId().setResourceType(t.getIdElement().getResourceType()).setId(t.getIdElement().getIdPart())) + .forEach(actualResourceIdList::add); + + } + Collections.sort(actualResourceIdList); + assertEquals(expectedIds, actualResourceIdList); + } + + @Test + public void testMaximumChunkSize_SingleFileExceedsMaximum() { + /* + * We're going to set the maximum file size to 1000, and create some resources + * with a name that is 1500 chars long. In this case, we'll exceed the + * configured maximum, so it should be one output file per resourcs. + */ + int testResourceSize = 1500; + int maxFileSize = 1000; + myStorageSettings.setBulkExportFileMaximumSize(maxFileSize); + + List expectedIds = new ArrayList<>(); + int numberOfResources = 10; + for (int i = 0; i < numberOfResources; i++) { + Patient p = new Patient(); + p.addName().setFamily(StringUtils.leftPad("", testResourceSize, 'A')); + String id = myPatientDao.create(p, mySrd).getId().getIdPart(); + expectedIds.add(new BatchResourceId().setResourceType("Patient").setId(id)); + } + Collections.sort(expectedIds); + + ResourceIdList resourceList = new ResourceIdList(); + resourceList.setResourceType("Patient"); + resourceList.setIds(expectedIds); + + BulkExportJobParameters params = new BulkExportJobParameters(); + JobInstance jobInstance = new JobInstance(); + String chunkId = "ABC"; + + StepExecutionDetails details = new StepExecutionDetails<>(params, resourceList, jobInstance, chunkId); + + // Test + + myExpandResourcesStep.run(details, mySink); + + // Verify + + // This is the most important check - we should have one file per resource + verify(mySink, times(numberOfResources)).accept(myWorkChunkCaptor.capture()); + + List actualResourceIdList = new ArrayList<>(); + for (BulkExportBinaryFileId next : myWorkChunkCaptor.getAllValues()) { + + Binary nextBinary = myBinaryDao.read(new IdType(next.getBinaryId()), mySrd); + String nextNdJsonString = new String(nextBinary.getContent(), StandardCharsets.UTF_8); + + Arrays.stream(nextNdJsonString.split("\\n")) + .filter(StringUtils::isNotBlank) + .map(t->myFhirContext.newJsonParser().parseResource(t)) + .map(t->new BatchResourceId().setResourceType(t.getIdElement().getResourceType()).setId(t.getIdElement().getIdPart())) + .forEach(actualResourceIdList::add); + + } + Collections.sort(actualResourceIdList); + assertEquals(expectedIds, actualResourceIdList); + } + +} diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/export/ExpandResourcesStepJpaTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/export/ExpandResourcesStepJpaTest.java index 2b4257efe84..b77cd82fbbc 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/export/ExpandResourcesStepJpaTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/export/ExpandResourcesStepJpaTest.java @@ -3,13 +3,14 @@ package ca.uhn.fhir.jpa.bulk.export; import ca.uhn.fhir.batch2.api.IJobDataSink; import ca.uhn.fhir.batch2.api.StepExecutionDetails; import ca.uhn.fhir.batch2.jobs.export.ExpandResourcesStep; -import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters; import ca.uhn.fhir.batch2.jobs.export.models.ExpandedResourcesList; import ca.uhn.fhir.batch2.jobs.export.models.ResourceIdList; import ca.uhn.fhir.batch2.jobs.models.BatchResourceId; import ca.uhn.fhir.batch2.model.JobInstance; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.test.BaseJpaR4Test; +import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters; +import org.apache.commons.lang3.StringUtils; import org.hl7.fhir.r4.model.Patient; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; @@ -20,13 +21,17 @@ import org.mockito.Mock; import org.springframework.beans.factory.annotation.Autowired; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.stream.IntStream; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.atLeast; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -44,7 +49,9 @@ public class ExpandResourcesStepJpaTest extends BaseJpaR4Test { public void afterCleanupDao() { super.afterCleanupDao(); - myStorageSettings.setTagStorageMode(new JpaStorageSettings().getTagStorageMode()); + JpaStorageSettings defaults = new JpaStorageSettings(); + myStorageSettings.setTagStorageMode(defaults.getTagStorageMode()); + myStorageSettings.setBulkExportFileMaximumSize(defaults.getBulkExportFileMaximumSize()); } /** @@ -194,4 +201,60 @@ public class ExpandResourcesStepJpaTest extends BaseJpaR4Test { } + @Test + public void testMaximumChunkSize() { + /* + * We're going to set the maximum file size to 3000, and create some resources with + * a name that is 1000 chars long. With the other boilerplate text in a resource that + * will put the resource length at just over 1000 chars, meaning that any given + * chunk or file should have only 2 resources in it. + */ + int testResourceSize = 1000; + int maxFileSize = 3 * testResourceSize; + myStorageSettings.setBulkExportFileMaximumSize(maxFileSize); + + List expectedIds = new ArrayList<>(); + for (int i = 0; i < 10; i++) { + Patient p = new Patient(); + p.addName().setFamily(StringUtils.leftPad("", testResourceSize, 'A')); + String id = myPatientDao.create(p, mySrd).getId().getIdPart(); + expectedIds.add(new BatchResourceId().setResourceType("Patient").setId(id)); + } + Collections.sort(expectedIds); + + ResourceIdList resourceList = new ResourceIdList(); + resourceList.setResourceType("Patient"); + resourceList.setIds(expectedIds); + + BulkExportJobParameters params = new BulkExportJobParameters(); + JobInstance jobInstance = new JobInstance(); + String chunkId = "ABC"; + + StepExecutionDetails details = new StepExecutionDetails<>(params, resourceList, jobInstance, chunkId); + + // Test + + myCaptureQueriesListener.clear(); + myExpandResourcesStep.run(details, mySink); + + // Verify + verify(mySink, atLeast(1)).accept(myWorkChunkCaptor.capture()); + List actualResourceIdList = new ArrayList<>(); + for (var next : myWorkChunkCaptor.getAllValues()) { + int nextSize = String.join("\n", next.getStringifiedResources()).length(); + ourLog.info("Next size: {}", nextSize); + assertThat(nextSize, lessThanOrEqualTo(maxFileSize)); + next.getStringifiedResources().stream() + .filter(StringUtils::isNotBlank) + .map(t->myFhirContext.newJsonParser().parseResource(t)) + .map(t->new BatchResourceId().setResourceType(t.getIdElement().getResourceType()).setId(t.getIdElement().getIdPart())) + .forEach(actualResourceIdList::add); + } + + Collections.sort(actualResourceIdList); + assertEquals(expectedIds, actualResourceIdList); + + + } + } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/export/FetchResourceIdsStepJpaTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/export/FetchResourceIdsStepJpaTest.java index e6e6fd08108..c27568b5ff1 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/export/FetchResourceIdsStepJpaTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/export/FetchResourceIdsStepJpaTest.java @@ -10,14 +10,18 @@ import ca.uhn.fhir.batch2.model.JobInstance; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoR4TagsTest; import ca.uhn.fhir.jpa.test.BaseJpaR4Test; +import ca.uhn.fhir.util.JsonUtil; import org.hl7.fhir.r4.model.DateTimeType; import org.hl7.fhir.r4.model.OrganizationAffiliation; import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.Mock; +import org.mockito.Mockito; import org.springframework.beans.factory.annotation.Autowired; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -38,7 +42,9 @@ public class FetchResourceIdsStepJpaTest extends BaseJpaR4Test { public void afterCleanupDao() { super.afterCleanupDao(); - myStorageSettings.setTagStorageMode(new JpaStorageSettings().getTagStorageMode()); + JpaStorageSettings defaults = new JpaStorageSettings(); + myStorageSettings.setTagStorageMode(defaults.getTagStorageMode()); + myStorageSettings.setBulkExportFileMaximumSize(defaults.getBulkExportFileMaximumSize()); } @Test @@ -74,6 +80,39 @@ public class FetchResourceIdsStepJpaTest extends BaseJpaR4Test { assertEquals(10, idList.getIds().size()); } + @Test + public void testChunkMaximumSize() { + myStorageSettings.setBulkExportFileMaximumSize(500); + for (int i = 0; i < 100; i++) { + OrganizationAffiliation orgAff = new OrganizationAffiliation(); + orgAff.setActive(true); + myOrganizationAffiliationDao.create(orgAff, mySrd); + } + + BulkExportJobParameters params = new BulkExportJobParameters(); + params.setResourceTypes(List.of("OrganizationAffiliation")); + VoidModel data = new VoidModel(); + JobInstance instance = new JobInstance(); + instance.setInstanceId("instance-id"); + String chunkId = "chunk-id"; + StepExecutionDetails executionDetails = new StepExecutionDetails<>(params, data, instance, chunkId); + + // Test + myFetchResourceIdsStep.run(executionDetails, mySink); + + // Verify + verify(mySink, Mockito.atLeast(1)).accept(myResourceIdListCaptor.capture()); + List idLists = myResourceIdListCaptor.getAllValues(); + for (var next : idLists) { + String serialized = JsonUtil.serialize(next, false); + + // Note that the 600 is a bit higher than the configured maximum of 500 above, + // because our chunk size estimate is not totally accurate, but it's not + // going to be way off, less than 100 regardless of how big the maximum is + assertThat(serialized, serialized.length(), lessThanOrEqualTo(600)); + } + + } } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/ChainingR4SearchTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/ChainingR4SearchTest.java index 7c75a50ae87..8f235367610 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/ChainingR4SearchTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/ChainingR4SearchTest.java @@ -5,13 +5,10 @@ import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; -import ca.uhn.fhir.jpa.searchparam.ResourceSearch; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.test.BaseJpaR4Test; import ca.uhn.fhir.jpa.util.SqlQuery; import ca.uhn.fhir.parser.StrictErrorHandler; -import ca.uhn.fhir.rest.api.server.IBundleProvider; -import ca.uhn.fhir.rest.param.ReferenceParam; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.AuditEvent; @@ -20,7 +17,6 @@ import org.hl7.fhir.r4.model.CodeableConcept; import org.hl7.fhir.r4.model.Coding; import org.hl7.fhir.r4.model.Composition; import org.hl7.fhir.r4.model.Device; -import org.hl7.fhir.r4.model.DomainResource; import org.hl7.fhir.r4.model.Encounter; import org.hl7.fhir.r4.model.Enumerations; import org.hl7.fhir.r4.model.IdType; @@ -41,14 +37,13 @@ import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.CsvSource; import org.springframework.beans.factory.annotation.Autowired; -import java.io.IOException; import java.sql.Date; -import java.util.ArrayList; import java.util.List; import static org.apache.commons.lang3.StringUtils.countMatches; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.hasSize; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.fail; @@ -117,7 +112,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { @Test - public void testShouldResolveATwoLinkChainWithStandAloneResourcesWithoutContainedResourceIndexing() throws Exception { + public void testShouldResolveATwoLinkChainWithStandAloneResourcesWithoutContainedResourceIndexing() { // setup IIdType oid1; @@ -141,7 +136,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { String url = "/Observation?subject.name=Smith"; // execute - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); // validate assertEquals(1L, oids.size()); @@ -149,7 +144,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testShouldResolveATwoLinkChainWithStandAloneResources() throws Exception { + public void testShouldResolveATwoLinkChainWithStandAloneResources() { // setup myStorageSettings.setIndexOnContainedResources(true); @@ -175,7 +170,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { String url = "/Observation?subject.name=Smith"; // execute - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); // validate assertEquals(1L, oids.size()); @@ -183,7 +178,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testShouldResolveATwoLinkChainWithStandAloneResources_CommonReference() throws Exception { + public void testShouldResolveATwoLinkChainWithStandAloneResources_CommonReference() { // setup myStorageSettings.setIndexOnContainedResources(true); @@ -218,7 +213,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { // execute myCaptureQueriesListener.clear(); - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); myCaptureQueriesListener.logSelectQueries(); // validate @@ -227,7 +222,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testShouldResolveATwoLinkChainWithStandAloneResources_CompoundReference() throws Exception { + public void testShouldResolveATwoLinkChainWithStandAloneResources_CompoundReference() { // setup myStorageSettings.setIndexOnContainedResources(true); @@ -265,7 +260,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { // execute myCaptureQueriesListener.clear(); - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url, myAuditEventDao); + List oids = myTestDaoSearch.searchForIds(url); myCaptureQueriesListener.logSelectQueries(); // validate @@ -274,7 +269,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testShouldResolveATwoLinkChainWithContainedResources_CompoundReference() throws Exception { + public void testShouldResolveATwoLinkChainWithContainedResources_CompoundReference() { // setup myStorageSettings.setIndexOnContainedResources(true); @@ -313,7 +308,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { // execute myCaptureQueriesListener.clear(); - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url, myAuditEventDao); + List oids = myTestDaoSearch.searchForIds(url); myCaptureQueriesListener.logSelectQueries(); // validate @@ -322,7 +317,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testShouldResolveATwoLinkChainWithAContainedResource() throws Exception { + public void testShouldResolveATwoLinkChainWithAContainedResource() { // setup myStorageSettings.setIndexOnContainedResources(true); @@ -355,7 +350,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { String url = "/Observation?subject.name=Smith"; // execute - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); // validate assertEquals(1L, oids.size()); @@ -363,7 +358,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testShouldNotResolveATwoLinkChainWithAContainedResourceWhenContainedResourceIndexingIsTurnedOff() throws Exception { + public void testShouldNotResolveATwoLinkChainWithAContainedResourceWhenContainedResourceIndexingIsTurnedOff() { // setup IIdType oid1; @@ -378,24 +373,24 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { obs.setValue(new StringType("Test")); obs.getSubject().setReference("#pat"); - oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless(); + myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless(); - // Create a dummy record so that an unconstrained query doesn't pass the test due to returning the only record + // Create a dummy record so that an unconstrained query doesn't pass the test due to returning the only record myObservationDao.create(new Observation(), mySrd); } String url = "/Observation?subject.name=Smith"; // execute - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); // validate assertEquals(0L, oids.size()); } @Test - @Disabled - public void testShouldResolveATwoLinkChainWithQualifiersWithAContainedResource() throws Exception { + @Disabled("Known limitation") + public void testShouldResolveATwoLinkChainWithQualifiersWithAContainedResource() { // TODO: This test fails because of a known limitation in qualified searches over contained resources. // Type information for intermediate resources in the chain is not being retained in the indexes. // setup @@ -435,7 +430,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { String url = "/Observation?subject:Patient.name=Smith"; // execute - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); // validate assertEquals(1L, oids.size()); @@ -443,7 +438,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testShouldResolveATwoLinkChainToAContainedReference() throws Exception { + public void testShouldResolveATwoLinkChainToAContainedReference() { // Adding support for this case in SMILE-3151 // setup @@ -477,7 +472,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { String url = "/Observation?subject.organization=" + orgId.getValueAsString(); // execute - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); // validate assertEquals(1L, oids.size()); @@ -485,7 +480,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testShouldResolveATwoLinkChainToAStandAloneReference() throws Exception { + public void testShouldResolveATwoLinkChainToAStandAloneReference() { // Adding support for this case in SMILE-3151 // setup @@ -519,7 +514,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { String url = "/Observation?subject.organization=" + orgId.getValueAsString(); // execute - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); // validate assertEquals(1L, oids.size()); @@ -527,7 +522,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testShouldResolveATwoLinkChainWithAContainedResource_CommonReference() throws Exception { + public void testShouldResolveATwoLinkChainWithAContainedResource_CommonReference() { // setup myStorageSettings.setIndexOnContainedResources(true); @@ -558,7 +553,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { // execute myCaptureQueriesListener.clear(); - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); myCaptureQueriesListener.logSelectQueries(); // validate @@ -567,7 +562,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testShouldResolveAThreeLinkChainWhereAllResourcesStandAloneWithoutContainedResourceIndexing() throws Exception { + public void testShouldResolveAThreeLinkChainWhereAllResourcesStandAloneWithoutContainedResourceIndexing() { // setup IIdType oid1; @@ -611,7 +606,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { String url = "/Observation?subject.organization.name=HealthCo"; // execute - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); // validate assertEquals(1L, oids.size()); @@ -619,7 +614,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testShouldResolveAThreeLinkChainWhereAllResourcesStandAlone() throws Exception { + public void testShouldResolveAThreeLinkChainWhereAllResourcesStandAlone() { // setup myStorageSettings.setIndexOnContainedResources(true); @@ -665,7 +660,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { String url = "/Observation?subject.organization.name=HealthCo"; // execute - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); // validate assertEquals(1L, oids.size()); @@ -673,7 +668,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testShouldResolveAThreeLinkChainWithAContainedResourceAtTheEndOfTheChain() throws Exception { + public void testShouldResolveAThreeLinkChainWithAContainedResourceAtTheEndOfTheChain() { // This is the case that is most relevant to SMILE-2899 // setup @@ -706,7 +701,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { String url = "/Observation?subject.organization.name=HealthCo"; // execute - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); // validate assertEquals(1L, oids.size()); @@ -714,7 +709,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testShouldResolveAThreeLinkChainWithAContainedResourceAtTheEndOfTheChain_CommonReference() throws Exception { + public void testShouldResolveAThreeLinkChainWithAContainedResourceAtTheEndOfTheChain_CommonReference() { // setup myStorageSettings.setIndexOnContainedResources(true); @@ -750,7 +745,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { // execute myCaptureQueriesListener.clear(); - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); myCaptureQueriesListener.logSelectQueries(); // validate @@ -759,7 +754,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testShouldResolveAThreeLinkChainWithAContainedResourceAtTheBeginningOfTheChain() throws Exception { + public void testShouldResolveAThreeLinkChainWithAContainedResourceAtTheBeginningOfTheChain() { // Adding support for this case in SMILE-3151 // setup @@ -792,7 +787,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { String url = "/Observation?subject.organization.name=HealthCo"; // execute - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); // validate assertEquals(1L, oids.size()); @@ -800,7 +795,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testShouldResolveAThreeLinkChainWithAContainedResourceAtTheBeginningOfTheChain_CommonReference() throws Exception { + public void testShouldResolveAThreeLinkChainWithAContainedResourceAtTheBeginningOfTheChain_CommonReference() { // setup myStorageSettings.setIndexOnContainedResources(true); @@ -835,7 +830,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { // execute myCaptureQueriesListener.clear(); - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); myCaptureQueriesListener.logSelectQueries(); // validate @@ -844,7 +839,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testShouldNotResolveAThreeLinkChainWithAllContainedResourcesWhenRecursiveContainedIndexesAreDisabled() throws Exception { + public void testShouldNotResolveAThreeLinkChainWithAllContainedResourcesWhenRecursiveContainedIndexesAreDisabled() { // setup myStorageSettings.setIndexOnContainedResources(true); @@ -867,23 +862,23 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { obs.getCode().setText("Observation 1"); obs.getSubject().setReference("#pat"); - oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless(); + myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless(); - // Create a dummy record so that an unconstrained query doesn't pass the test due to returning the only record + // Create a dummy record so that an unconstrained query doesn't pass the test due to returning the only record myObservationDao.create(new Observation(), mySrd); } String url = "/Observation?subject.organization.name=HealthCo"; // execute - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); // validate assertEquals(0L, oids.size()); } @Test - public void testShouldResolveAThreeLinkChainWithAllContainedResources() throws Exception { + public void testShouldResolveAThreeLinkChainWithAllContainedResources() { // setup myStorageSettings.setIndexOnContainedResources(true); @@ -918,7 +913,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { // execute myCaptureQueriesListener.clear(); - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); myCaptureQueriesListener.logSelectQueries(); // validate @@ -927,7 +922,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testShouldResolveAThreeLinkChainWithQualifiersWhereAllResourcesStandAlone() throws Exception { + public void testShouldResolveAThreeLinkChainWithQualifiersWhereAllResourcesStandAlone() { // setup myStorageSettings.setIndexOnContainedResources(true); @@ -969,7 +964,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { String url = "/Observation?subject:Patient.organization:Organization.name=HealthCo"; // execute - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); // validate assertEquals(1L, oids.size()); @@ -977,7 +972,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testShouldResolveAThreeLinkChainWithQualifiersWithAContainedResourceAtTheEndOfTheChain() throws Exception { + public void testShouldResolveAThreeLinkChainWithQualifiersWithAContainedResourceAtTheEndOfTheChain() { // This is the case that is most relevant to SMILE-2899 // setup @@ -1025,7 +1020,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { String url = "/Observation?subject:Patient.organization:Organization.name=HealthCo"; // execute - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); // validate assertEquals(1L, oids.size()); @@ -1033,7 +1028,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testShouldResolveAThreeLinkChainWithQualifiersWithAContainedResourceAtTheBeginning() throws Exception { + public void testShouldResolveAThreeLinkChainWithQualifiersWithAContainedResourceAtTheBeginning() { // Adding support for this case in SMILE-3151 // setup @@ -1078,7 +1073,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { // execute myCaptureQueriesListener.clear(); - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); myCaptureQueriesListener.logSelectQueries(); // validate @@ -1087,8 +1082,8 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - @Disabled - public void testShouldResolveAThreeLinkChainWithQualifiersWithAContainedResourceAtTheBeginning_NotDistinctSourcePaths() throws Exception { + @Disabled("Known limitation") + public void testShouldResolveAThreeLinkChainWithQualifiersWithAContainedResourceAtTheBeginning_NotDistinctSourcePaths() { // TODO: This test fails because of a known limitation in qualified searches over contained resources. // Type information for intermediate resources in the chain is not being retained in the indexes. @@ -1136,7 +1131,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { // execute myCaptureQueriesListener.clear(); - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); myCaptureQueriesListener.logSelectQueries(); // validate @@ -1145,8 +1140,8 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - @Disabled - public void testShouldResolveAThreeLinkChainWithQualifiersWithAllContainedResources() throws Exception { + @Disabled("Known limitation") + public void testShouldResolveAThreeLinkChainWithQualifiersWithAllContainedResources() { // TODO: This test fails because of a known limitation in qualified searches over contained resources. // Type information for intermediate resources in the chain is not being retained in the indexes. @@ -1198,7 +1193,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { // execute myCaptureQueriesListener.clear(); - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); myCaptureQueriesListener.logSelectQueries(); // validate @@ -1207,7 +1202,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testShouldResolveAFourLinkChainWhereAllResourcesStandAlone() throws Exception { + public void testShouldResolveAFourLinkChainWhereAllResourcesStandAlone() { // setup myStorageSettings.setIndexOnContainedResources(true); @@ -1244,7 +1239,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { String url = "/Observation?subject.organization.partof.name=HealthCo"; // execute - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); // validate assertEquals(1L, oids.size()); @@ -1252,7 +1247,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testShouldResolveAFourLinkChainWhereTheLastReferenceIsContained() throws Exception { + public void testShouldResolveAFourLinkChainWhereTheLastReferenceIsContained() { // setup myStorageSettings.setIndexOnContainedResources(true); @@ -1289,7 +1284,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { String url = "/Observation?subject.organization.partof.name=HealthCo"; // execute - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); // validate assertEquals(1L, oids.size()); @@ -1297,7 +1292,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testShouldResolveAFourLinkChainWhereTheLastTwoReferencesAreContained() throws Exception { + public void testShouldResolveAFourLinkChainWhereTheLastTwoReferencesAreContained() { // setup myStorageSettings.setIndexOnContainedResources(true); @@ -1334,7 +1329,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { String url = "/Observation?subject.organization.partof.name=HealthCo"; // execute - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); // validate assertEquals(1L, oids.size()); @@ -1342,7 +1337,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testShouldResolveAFourLinkChainWithAContainedResourceInTheMiddle() throws Exception { + public void testShouldResolveAFourLinkChainWithAContainedResourceInTheMiddle() { // setup myStorageSettings.setIndexOnContainedResources(true); @@ -1384,7 +1379,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { // execute myCaptureQueriesListener.clear(); - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); myCaptureQueriesListener.logSelectQueries(); // validate @@ -1393,7 +1388,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testShouldResolveAFourLinkChainWhereTheFirstTwoReferencesAreContained() throws Exception { + public void testShouldResolveAFourLinkChainWhereTheFirstTwoReferencesAreContained() { // setup myStorageSettings.setIndexOnContainedResources(true); @@ -1431,7 +1426,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { String url = "/Observation?subject.organization.partof.name=HealthCo"; // execute - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); // validate assertEquals(1L, oids.size()); @@ -1439,7 +1434,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testShouldResolveAFourLinkChainWhereTheFirstReferenceAndTheLastReferenceAreContained() throws Exception { + public void testShouldResolveAFourLinkChainWhereTheFirstReferenceAndTheLastReferenceAreContained() { // setup myStorageSettings.setIndexOnContainedResources(true); @@ -1476,7 +1471,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { String url = "/Observation?subject.organization.partof.name=HealthCo"; // execute - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); // validate assertEquals(1L, oids.size()); @@ -1484,7 +1479,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testShouldResolveAFourLinkChainWhereAllReferencesAreContained() throws Exception { + public void testShouldResolveAFourLinkChainWhereAllReferencesAreContained() { // setup myStorageSettings.setIndexOnContainedResources(true); @@ -1524,7 +1519,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { // execute myCaptureQueriesListener.clear(); - List oids = searchAndReturnUnqualifiedVersionlessIdValues(url); + List oids = myTestDaoSearch.searchForIds(url); myCaptureQueriesListener.logSelectQueriesForCurrentThread(); // validate @@ -1533,7 +1528,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testShouldThrowAnExceptionForAFiveLinkChain() throws Exception { + public void testShouldThrowAnExceptionForAFiveLinkChain() { // setup myStorageSettings.setIndexOnContainedResources(true); @@ -1543,7 +1538,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { try { // execute - searchAndReturnUnqualifiedVersionlessIdValues(url); + myTestDaoSearch.searchForIds(url); fail("Expected an exception to be thrown"); } catch (InvalidRequestException e) { assertEquals(Msg.code(2007) + "The search chain subject.organization.partof.partof.name is too long. Only chains up to three references are supported.", e.getMessage()); @@ -1551,7 +1546,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { } @Test - public void testQueryStructure() throws Exception { + public void testQueryStructure() { // With indexing of contained resources turned off, we should not see UNION clauses in the query countUnionStatementsInGeneratedQuery("/Observation?patient.name=Smith", 0); @@ -1584,16 +1579,19 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { @ParameterizedTest @CsvSource({ // search url expected count - "/Bundle?composition.patient.identifier=system|value-1&composition.patient.birthdate=1980-01-01, 1", // correct identifier, correct birthdate - "/Bundle?composition.patient.birthdate=1980-01-01&composition.patient.identifier=system|value-1, 1", // correct birthdate, correct identifier - "/Bundle?composition.patient.identifier=system|value-1&composition.patient.birthdate=2000-01-01, 0", // correct identifier, incorrect birthdate - "/Bundle?composition.patient.birthdate=2000-01-01&composition.patient.identifier=system|value-1, 0", // incorrect birthdate, correct identifier - "/Bundle?composition.patient.identifier=system|value-2&composition.patient.birthdate=1980-01-01, 0", // incorrect identifier, correct birthdate - "/Bundle?composition.patient.birthdate=1980-01-01&composition.patient.identifier=system|value-2, 0", // correct birthdate, incorrect identifier - "/Bundle?composition.patient.identifier=system|value-2&composition.patient.birthdate=2000-01-01, 0", // incorrect identifier, incorrect birthdate - "/Bundle?composition.patient.birthdate=2000-01-01&composition.patient.identifier=system|value-2, 0", // incorrect birthdate, incorrect identifier + "/Bundle?composition.patient.identifier=system|value-1&composition.patient.birthdate=1980-01-01, 1, correct identifier correct birthdate", + "/Bundle?composition.patient.birthdate=1980-01-01&composition.patient.identifier=system|value-1, 1, correct birthdate correct identifier", + "/Bundle?composition.patient.identifier=system|value-1&composition.patient.birthdate=2000-01-01, 0, correct identifier incorrect birthdate", + "/Bundle?composition.patient.birthdate=2000-01-01&composition.patient.identifier=system|value-1, 0, incorrect birthdate correct identifier", + "/Bundle?composition.patient.identifier=system|value-2&composition.patient.birthdate=1980-01-01, 0, incorrect identifier correct birthdate", + "/Bundle?composition.patient.birthdate=1980-01-01&composition.patient.identifier=system|value-2, 0, correct birthdate incorrect identifier", + "/Bundle?composition.patient.identifier=system|value-2&composition.patient.birthdate=2000-01-01, 0, incorrect identifier incorrect birthdate", + "/Bundle?composition.patient.birthdate=2000-01-01&composition.patient.identifier=system|value-2, 0, incorrect birthdate incorrect identifier", + // try sort by composition sp + "/Bundle?composition.patient.identifier=system|value-1&_sort=composition.patient.birthdate, 1, correct identifier sort by birthdate", + }) - public void testMultipleChainedBundleCompositionSearchParameters(String theSearchUrl, int theExpectedCount) { + public void testMultipleChainedBundleCompositionSearchParameters(String theSearchUrl, int theExpectedCount, String theMessage) { createSearchParameter("bundle-composition-patient-birthdate", "composition.patient.birthdate", "Bundle", @@ -1610,8 +1608,8 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { createDocumentBundleWithPatientDetails("1980-01-01", "system", "value-1"); - SearchParameterMap params = myMatchUrlService.getResourceSearch(theSearchUrl).getSearchParameterMap().setLoadSynchronous(true); - assertSearchReturns(myBundleDao, params, theExpectedCount); + List ids = myTestDaoSearch.searchForIds(theSearchUrl); + assertThat(theMessage, ids, hasSize(theExpectedCount)); } private void createSearchParameter(String theId, String theCode, String theBase, String theExpression, Enumerations.SearchParamType theType) { @@ -1651,9 +1649,9 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { assertEquals(theExpectedCount, theDao.search(theSearchParams, mySrd).size()); } - private void countUnionStatementsInGeneratedQuery(String theUrl, int theExpectedNumberOfUnions) throws IOException { + private void countUnionStatementsInGeneratedQuery(String theUrl, int theExpectedNumberOfUnions) { myCaptureQueriesListener.clear(); - searchAndReturnUnqualifiedVersionlessIdValues(theUrl); + myTestDaoSearch.searchForIds(theUrl); List selectQueries = myCaptureQueriesListener.getSelectQueriesForCurrentThread(); assertEquals(1, selectQueries.size()); @@ -1661,18 +1659,4 @@ public class ChainingR4SearchTest extends BaseJpaR4Test { assertEquals(theExpectedNumberOfUnions, countMatches(sqlQuery, "union"), sqlQuery); } - private List searchAndReturnUnqualifiedVersionlessIdValues(String theUrl) throws IOException { - return searchAndReturnUnqualifiedVersionlessIdValues(theUrl, myObservationDao); - } - - private List searchAndReturnUnqualifiedVersionlessIdValues(String theUrl, IFhirResourceDao theObservationDao) { - List ids = new ArrayList<>(); - - ResourceSearch search = myMatchUrlService.getResourceSearch(theUrl); - SearchParameterMap map = search.getSearchParameterMap(); - map.setLoadSynchronous(true); - IBundleProvider result = theObservationDao.search(map); - return result.getAllResourceIds(); - } - } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SortTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SortTest.java index 23ab501a227..1300247608b 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SortTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SortTest.java @@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.dao.r4; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.test.BaseJpaR4Test; +import ca.uhn.fhir.parser.IParser; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.SortOrderEnum; import ca.uhn.fhir.rest.api.SortSpec; @@ -10,13 +11,18 @@ import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.param.DateParam; import ca.uhn.fhir.rest.param.DateRangeParam; import ca.uhn.fhir.rest.param.ParamPrefixEnum; -import ca.uhn.fhir.rest.param.ReferenceParam; import ca.uhn.fhir.rest.param.TokenParam; import org.hl7.fhir.instance.model.api.IIdType; +import org.hl7.fhir.r4.model.Bundle; +import org.hl7.fhir.r4.model.CodeableConcept; +import org.hl7.fhir.r4.model.Coding; +import org.hl7.fhir.r4.model.Composition; import org.hl7.fhir.r4.model.DateTimeType; +import org.hl7.fhir.r4.model.Enumerations; import org.hl7.fhir.r4.model.Enumerations.AdministrativeGender; import org.hl7.fhir.r4.model.Observation; import org.hl7.fhir.r4.model.Patient; +import org.hl7.fhir.r4.model.SearchParameter; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; @@ -28,7 +34,7 @@ import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.junit.jupiter.api.Assertions.assertEquals; -@SuppressWarnings({"unchecked", "deprecation"}) +@SuppressWarnings({"deprecation"}) public class FhirResourceDaoR4SortTest extends BaseJpaR4Test { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoR4SortTest.class); @@ -51,7 +57,7 @@ public class FhirResourceDaoR4SortTest extends BaseJpaR4Test { } @Test - public void testSortOnId() throws Exception { + public void testSortOnId() { // Numeric ID Patient p01 = new Patient(); p01.setActive(true); @@ -147,7 +153,7 @@ public class FhirResourceDaoR4SortTest extends BaseJpaR4Test { } @Test - public void testSortOnSearchParameterWhereAllResourcesHaveAValue() throws Exception { + public void testSortOnSearchParameterWhereAllResourcesHaveAValue() { Patient pBA = new Patient(); pBA.setId("BA"); pBA.setActive(true); @@ -348,20 +354,63 @@ public class FhirResourceDaoR4SortTest extends BaseJpaR4Test { SearchParameterMap map; List ids; - runInTransaction(() -> { - ourLog.info("Dates:\n * {}", myResourceIndexedSearchParamDateDao.findAll().stream().map(t -> t.toString()).collect(Collectors.joining("\n * "))); - }); - - map = new SearchParameterMap(); - map.setLoadSynchronous(true); - map.add(Observation.SP_SUBJECT, new ReferenceParam("Patient", "identifier", "PCA|PCA")); - map.setSort(new SortSpec("date").setOrder(SortOrderEnum.DESC)); - myCaptureQueriesListener.clear(); - ids = toUnqualifiedVersionlessIdValues(myObservationDao.search(map)); - ourLog.info("IDS: {}", ids); - myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - assertThat(ids.toString(), ids, contains("Observation/OBS2", "Observation/OBS1")); + runInTransaction(() -> ourLog.info("Dates:\n * {}", myResourceIndexedSearchParamDateDao.findAll().stream().map(t -> t.toString()).collect(Collectors.joining("\n * ")))); + myTestDaoSearch.assertSearchFinds( + "chained search", + "Observation?subject.identifier=PCA|PCA&_sort=-date", + "OBS2", "OBS1" + ); } + /** + * Define a composition SP for document Bundles, and sort by it. + * The chain is referencing the Bundle contents. + * @see https://smilecdr.com/docs/fhir_storage_relational/chained_searches_and_sorts.html#document-and-message-search-parameters + */ + @Test + void testSortByCompositionSP() { + // given + SearchParameter searchParameter = new SearchParameter(); + searchParameter.setId("bundle-composition-patient-birthdate"); + searchParameter.setCode("composition.patient.birthdate"); + searchParameter.setName("composition.patient.birthdate"); + searchParameter.setUrl("http://example.org/SearchParameter/bundle-composition-patient-birthdate"); + searchParameter.setStatus(Enumerations.PublicationStatus.ACTIVE); + searchParameter.addBase("Bundle"); + searchParameter.setType(Enumerations.SearchParamType.DATE); + searchParameter.setExpression("Bundle.entry.resource.ofType(Patient).birthDate"); + doUpdateResource(searchParameter); + + mySearchParamRegistry.forceRefresh(); + + Patient pat1 = buildResource("Patient", withId("pat1"), withBirthdate("2001-03-17")); + doUpdateResource(pat1); + Bundle pat1Bundle = buildCompositionBundle(pat1); + String pat1BundleId = doCreateResource(pat1Bundle).getIdPart(); + + Patient pat2 = buildResource("Patient", withId("pat2"), withBirthdate("2000-01-01")); + doUpdateResource(pat2); + Bundle pat2Bundle = buildCompositionBundle(pat2); + String pat2BundleId = doCreateResource(pat2Bundle).getIdPart(); + + // then + myTestDaoSearch.assertSearchFinds("sort by contained date", + "Bundle?_sort=composition.patient.birthdate", List.of(pat2BundleId, pat1BundleId)); + myTestDaoSearch.assertSearchFinds("reverse sort by contained date", + "Bundle?_sort=-composition.patient.birthdate", List.of(pat1BundleId, pat2BundleId)); + } + + private static Bundle buildCompositionBundle(Patient pat11) { + Bundle bundle = new Bundle(); + bundle.setType(Bundle.BundleType.DOCUMENT); + Composition composition = new Composition(); + composition.setType(new CodeableConcept().addCoding(new Coding().setCode("code").setSystem("http://example.org"))); + bundle.addEntry().setResource(composition); + composition.getSubject().setReference(pat11.getIdElement().getValue()); + bundle.addEntry().setResource(pat11); + return bundle; + } + + } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/patch/FhirPatchApplyR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/patch/FhirPatchApplyR4Test.java index 87693e434b6..49532624974 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/patch/FhirPatchApplyR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/patch/FhirPatchApplyR4Test.java @@ -12,6 +12,7 @@ import org.hl7.fhir.r4.model.BooleanType; import org.hl7.fhir.r4.model.CodeType; import org.hl7.fhir.r4.model.CodeableConcept; import org.hl7.fhir.r4.model.Coding; +import org.hl7.fhir.r4.model.DateTimeType; import org.hl7.fhir.r4.model.Extension; import org.hl7.fhir.r4.model.HumanName; import org.hl7.fhir.r4.model.Identifier; @@ -37,6 +38,7 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -531,6 +533,59 @@ public class FhirPatchApplyR4Test { assertThat(patient.getExtension().get(0).getValueAsPrimitive().getValueAsString(), is(equalTo("foo"))); } + @Test + public void testAddExtensionWithExtension() { + final String extensionUrl = "http://foo/fhir/extension/foo"; + final String innerExtensionUrl = "http://foo/fhir/extension/innerExtension"; + final String innerExtensionValue = "2021-07-24T13:23:30-04:00"; + + FhirPatch svc = new FhirPatch(ourCtx); + Patient patient = new Patient(); + + Parameters patch = new Parameters(); + Parameters.ParametersParameterComponent addOperation = createPatchAddOperation("Patient", "extension", null); + addOperation + .addPart() + .setName("value") + .addPart( + new Parameters.ParametersParameterComponent() + .setName("url") + .setValue(new UriType(extensionUrl)) + ) + .addPart( + new Parameters.ParametersParameterComponent() + .setName("extension") + .addPart( + new Parameters.ParametersParameterComponent() + .setName("url") + .setValue(new UriType(innerExtensionUrl)) + ) + .addPart( + new Parameters.ParametersParameterComponent() + .setName("value") + .setValue(new DateTimeType(innerExtensionValue)) + ) + ); + + patch.addParameter(addOperation); + + ourLog.info("Patch:\n{}", ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(patch)); + + svc.apply(patient, patch); + ourLog.debug("Outcome:\n{}", ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(patient)); + + //Then: it adds the new extension correctly. + assertThat(patient.getExtension(), hasSize(1)); + Extension extension = patient.getExtension().get(0); + assertThat(extension.getUrl(), is(equalTo(extensionUrl))); + Extension innerExtension = extension.getExtensionFirstRep(); + + assertThat(innerExtension, notNullValue()); + assertThat(innerExtension.getUrl(), is(equalTo(innerExtensionUrl))); + assertThat(innerExtension.getValue().primitiveValue(), is(equalTo(innerExtensionValue))); + + } + private Parameters.ParametersParameterComponent createPatchAddOperation(String thePath, String theName, Type theValue) { return createPatchOperation("add", thePath, theName, theValue, null); } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java index 59f5519c007..fe8fab14308 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java @@ -18,6 +18,7 @@ import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.term.ZipCollectionBuilder; import ca.uhn.fhir.jpa.test.config.TestR4Config; +import ca.uhn.fhir.jpa.util.MemoryCacheService; import ca.uhn.fhir.jpa.util.QueryParameterUtils; import ca.uhn.fhir.model.api.StorageResponseCodeEnum; import ca.uhn.fhir.model.api.TemporalPrecisionEnum; @@ -2404,6 +2405,100 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { assertThat(idValues, hasSize(0)); } + + @ParameterizedTest + @CsvSource({ + "false,PatientWithServerGeneratedId1", + "true,PatientWithServerGeneratedId2" + }) + public void testHistoryOnInstanceWithServerGeneratedId(boolean theInvalidateCacheBeforeHistory, + String thePatientFamilyName) { + + Patient patient = new Patient(); + patient.addName().setFamily(thePatientFamilyName); + IIdType id = myClient.create().resource(patient).execute().getId().toVersionless(); + ourLog.info("Res ID: {}", id); + + final String expectedFullUrl = myServerBase + "/Patient/" + id.getIdPart(); + + if (theInvalidateCacheBeforeHistory) { + // the reason for this test parameterization to invalidate the cache is that + // when a resource is created/updated, its id mapping is cached for 1 minute so + // retrieving the history right after creating the resource will use the cached value. + // By invalidating the cache here and getting the history bundle again, + // we test the scenario where the id mapping needs to be read from the db, + // hence testing a different code path. + myMemoryCacheService.invalidateCaches(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID); + } + + Bundle history = myClient.history().onInstance(id.getValue()).andReturnBundle(Bundle.class).execute(); + assertEquals(1, history.getEntry().size()); + BundleEntryComponent historyEntry0 = history.getEntry().get(0); + // validate entry.fullUrl + assertEquals(expectedFullUrl, historyEntry0.getFullUrl()); + //validate entry.request + assertEquals(HTTPVerb.POST, historyEntry0.getRequest().getMethod()); + assertEquals("Patient/" + id.getIdPart() + "/_history/1", historyEntry0.getRequest().getUrl()); + //validate entry.response + assertEquals("201 Created", historyEntry0.getResponse().getStatus()); + assertNotNull(historyEntry0.getResponse().getEtag()); + + //validate patient resource details in the entry + Patient historyEntry0Patient = (Patient) historyEntry0.getResource(); + assertEquals(id.withVersion("1").getValue(), historyEntry0Patient.getId()); + assertEquals(1, historyEntry0Patient.getName().size()); + assertEquals(thePatientFamilyName, historyEntry0Patient.getName().get(0).getFamily()); + + + } + + @ParameterizedTest + @CsvSource({ + "false,PatientWithForcedId1", + "true,PatientWithForcedId2" + }) + public void testHistoryOnInstanceWithForcedId(boolean theInvalidateCacheBeforeHistory, + String thePatientFamilyName) { + + final String patientForcedId = thePatientFamilyName + "-ForcedId"; + Patient patient = new Patient(); + patient.addName().setFamily(thePatientFamilyName); + patient.setId(patientForcedId); + IIdType id = myClient.update().resource(patient).execute().getId().toVersionless(); + ourLog.info("Res ID: {}", id); + assertEquals(patientForcedId, id.getIdPart()); + + final String expectedFullUrl = myServerBase + "/Patient/" + id.getIdPart(); + + if (theInvalidateCacheBeforeHistory) { + // the reason for this test parameterization to invalidate the cache is that + // when a resource is created/updated, its id mapping is cached for 1 minute so + // retrieving the history right after creating the resource will use the cached value. + // By invalidating the cache here and getting the history bundle again, + // we test the scenario where the id mapping needs to be read from the db, + // hence testing a different code path. + myMemoryCacheService.invalidateCaches(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID); + } + + Bundle history = myClient.history().onInstance(id.getValue()).andReturnBundle(Bundle.class).execute(); + assertEquals(1, history.getEntry().size()); + BundleEntryComponent historyEntry0 = history.getEntry().get(0); + // validate entry.fullUrl + assertEquals(expectedFullUrl, historyEntry0.getFullUrl()); + //validate entry.request + assertEquals(HTTPVerb.POST, historyEntry0.getRequest().getMethod()); + assertEquals("Patient/" + id.getIdPart() + "/_history/1", historyEntry0.getRequest().getUrl()); + //validate entry.response + assertEquals("201 Created", historyEntry0.getResponse().getStatus()); + assertNotNull(historyEntry0.getResponse().getEtag()); + + //validate patient resource details in the entry + Patient historyEntry0Patient = (Patient) historyEntry0.getResource(); + assertEquals(id.withVersion("1").getValue(), historyEntry0Patient.getId()); + assertEquals(1, historyEntry0Patient.getName().size()); + assertEquals(thePatientFamilyName, historyEntry0Patient.getName().get(0).getFamily()); + } + @Test public void testHistoryWithDeletedResource() { String methodName = "testHistoryWithDeletedResource"; diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ServerR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ServerR4Test.java index 75f3e142cc2..2a7abc1c9cc 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ServerR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ServerR4Test.java @@ -7,6 +7,7 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; import ca.uhn.fhir.util.ExtensionConstants; import org.apache.commons.io.IOUtils; +import org.apache.http.Header; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; import org.hl7.fhir.r4.model.CapabilityStatement; @@ -15,11 +16,15 @@ import org.hl7.fhir.r4.model.CapabilityStatement.CapabilityStatementRestResource import org.hl7.fhir.r4.model.Extension; import org.hl7.fhir.r4.model.Patient; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; import org.springframework.beans.factory.annotation.Autowired; import java.io.IOException; import java.nio.charset.StandardCharsets; +import java.util.Arrays; import java.util.HashSet; +import java.util.List; import java.util.Set; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -141,4 +146,26 @@ public class ServerR4Test extends BaseResourceProviderR4Test { } + + @ParameterizedTest + @ValueSource(strings = {"x-request-id", "X-Request-Id", "X-Request-ID", "X-REQUEST-ID"}) + public void testXRequestIdHeaderRetainsCase(String theXRequestIdHeaderKey) throws Exception { + HttpGet get = new HttpGet(myServerBase + "/Patient"); + String xRequestIdHeaderValue = "abc123"; + get.addHeader(theXRequestIdHeaderKey, xRequestIdHeaderValue); + + try (CloseableHttpResponse response = ourHttpClient.execute(get)) { + assertEquals(200, response.getStatusLine().getStatusCode()); + + String responseContent = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8); + ourLog.debug(responseContent); + + List
xRequestIdHeaders = Arrays.stream(response.getAllHeaders()) + .filter(header -> theXRequestIdHeaderKey.equals(header.getName())) + .toList(); + + assertEquals(1, xRequestIdHeaders.size()); + assertEquals(xRequestIdHeaderValue, xRequestIdHeaders.get(0).getValue()); + } + } } diff --git a/hapi-fhir-jpaserver-test-r4b/pom.xml b/hapi-fhir-jpaserver-test-r4b/pom.xml index aa484cba918..77be64734fc 100644 --- a/hapi-fhir-jpaserver-test-r4b/pom.xml +++ b/hapi-fhir-jpaserver-test-r4b/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r4b/src/test/java/ca/uhn/fhir/batch2/jobs/models/BatchResourceIdTest.java b/hapi-fhir-jpaserver-test-r4b/src/test/java/ca/uhn/fhir/batch2/jobs/models/BatchResourceIdTest.java new file mode 100644 index 00000000000..fce96e883e9 --- /dev/null +++ b/hapi-fhir-jpaserver-test-r4b/src/test/java/ca/uhn/fhir/batch2/jobs/models/BatchResourceIdTest.java @@ -0,0 +1,20 @@ +package ca.uhn.fhir.batch2.jobs.models; + +import ca.uhn.fhir.util.JsonUtil; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.*; + +class BatchResourceIdTest { + + @Test + public void testEstimateSize() { + BatchResourceId id = new BatchResourceId(); + id.setId("12345"); + id.setResourceType("Patient"); + String serialized = JsonUtil.serialize(id, false); + assertEquals(serialized.length(), id.estimateSerializedSize(), serialized); + } + + +} diff --git a/hapi-fhir-jpaserver-test-r5/pom.xml b/hapi-fhir-jpaserver-test-r5/pom.xml index 728edb24df4..545c9c2fbb4 100644 --- a/hapi-fhir-jpaserver-test-r5/pom.xml +++ b/hapi-fhir-jpaserver-test-r5/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/subscription/websocket/WebsocketWithSubscriptionIdR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/subscription/websocket/WebsocketWithSubscriptionIdR5Test.java index b87f9acb872..054e1cfd4fc 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/subscription/websocket/WebsocketWithSubscriptionIdR5Test.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/subscription/websocket/WebsocketWithSubscriptionIdR5Test.java @@ -59,6 +59,36 @@ public class WebsocketWithSubscriptionIdR5Test extends BaseSubscriptionsR5Test { myWebsocketClientExtension.afterEach(null); } + @Test + public void testSubscriptionMessagePayloadContentIsNull() { + // Given a subscription + Subscription subscription = new Subscription(); + subscription.setStatus(Enumerations.SubscriptionStatusCodes.ACTIVE); + subscription.setContent(null); + subscription.setTopic("Topic/123"); + subscription.getChannelType().setCode("websocket"); + MethodOutcome methodOutcome = myClient.create().resource(subscription).execute(); + String subscriptionId = methodOutcome.getId().getIdPart(); + + // When + myWebsocketClientExtension.bind(subscriptionId); + + // And + // Trigger resource creation + Patient patient = new Patient(); + patient.setActive(true); + myClient.create().resource(patient).execute(); + + // Then + List messages = myWebsocketClientExtension.getMessages(); + await().until(() -> !messages.isEmpty()); + + // Log it + ourLog.info("Messages: {}", messages); + + // Verify a ping message shall be returned + Assertions.assertTrue(messages.contains("ping " + subscriptionId)); + } @Test public void testSubscriptionMessagePayloadContentIsEmpty() { diff --git a/hapi-fhir-jpaserver-test-utilities/pom.xml b/hapi-fhir-jpaserver-test-utilities/pom.xml index eb5a1e4f920..d92fba3b123 100644 --- a/hapi-fhir-jpaserver-test-utilities/pom.xml +++ b/hapi-fhir-jpaserver-test-utilities/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java index fc81b0ceeea..531291487e4 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java @@ -42,6 +42,7 @@ import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider; import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper; import ca.uhn.fhir.jpa.dao.GZipUtil; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; +import ca.uhn.fhir.jpa.dao.TestDaoSearch; import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository; import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository; import ca.uhn.fhir.jpa.dao.data.IForcedIdDao; @@ -552,6 +553,8 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil private IInterceptorService myInterceptorService; @Autowired(required = false) private MdmStorageInterceptor myMdmStorageInterceptor; + @Autowired + protected TestDaoSearch myTestDaoSearch; @RegisterExtension private final PreventDanglingInterceptorsExtension myPreventDanglingInterceptorsExtension = new PreventDanglingInterceptorsExtension(()-> myInterceptorRegistry); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestR4Config.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestR4Config.java index f3d1824713e..3bae2e3ad1f 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestR4Config.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestR4Config.java @@ -28,6 +28,7 @@ import ca.uhn.fhir.jpa.binstore.MemoryBinaryStorageSvcImpl; import ca.uhn.fhir.jpa.config.PackageLoaderConfig; import ca.uhn.fhir.jpa.config.r4.JpaR4Config; import ca.uhn.fhir.jpa.config.util.HapiEntityManagerFactoryUtil; +import ca.uhn.fhir.jpa.dao.TestDaoSearch; import ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect; import ca.uhn.fhir.jpa.searchparam.config.NicknameServiceConfig; import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener; @@ -78,7 +79,8 @@ import static org.junit.jupiter.api.Assertions.fail; TestHSearchAddInConfig.DefaultLuceneHeap.class, JpaBatch2Config.class, Batch2JobsConfig.class, - NicknameServiceConfig.class + NicknameServiceConfig.class, + TestDaoSearch.Config.class }) public class TestR4Config { diff --git a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml index 8e0155619ab..a38b2b8c988 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml +++ b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-server-cds-hooks/pom.xml b/hapi-fhir-server-cds-hooks/pom.xml index db6df4d50fa..4749239fb7f 100644 --- a/hapi-fhir-server-cds-hooks/pom.xml +++ b/hapi-fhir-server-cds-hooks/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/CdsCrServiceMethod.java b/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/CdsCrServiceMethod.java index cf2b2facb99..09f098ca2e3 100644 --- a/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/CdsCrServiceMethod.java +++ b/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/CdsCrServiceMethod.java @@ -38,8 +38,7 @@ public class CdsCrServiceMethod extends BaseCdsCrMethod implements ICdsServiceMe @Override public boolean isAllowAutoFhirClientPrefetch() { - // The $apply operation will make FHIR requests for any data it needs - // directly against the fhirServer of the ServiceRequest. - return false; + // The $apply operation will NOT make FHIR requests for any data it needs. + return true; } } diff --git a/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/CdsHooksContextBooter.java b/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/CdsHooksContextBooter.java index 828aed8505a..acd9558d2d6 100644 --- a/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/CdsHooksContextBooter.java +++ b/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/CdsHooksContextBooter.java @@ -51,13 +51,13 @@ import java.util.stream.Collectors; * is complete so that other beans can use the stuff it creates. */ public class CdsHooksContextBooter { - private static final Logger ourLog = LoggerFactory.getLogger(CdsHooksContextBooter.class); - private static final String CDS_SERVICES_BEAN_NAME = "cdsServices"; - private Class myDefinitionsClass; - private AnnotationConfigApplicationContext myAppCtx; + protected static final Logger ourLog = LoggerFactory.getLogger(CdsHooksContextBooter.class); + protected static final String CDS_SERVICES_BEAN_NAME = "cdsServices"; + protected Class myDefinitionsClass; + protected AnnotationConfigApplicationContext myAppCtx; - private List myCdsServiceBeans = new ArrayList<>(); - private final CdsServiceCache myCdsServiceCache = new CdsServiceCache(); + protected List myCdsServiceBeans = new ArrayList<>(); + protected final CdsServiceCache myCdsServiceCache = new CdsServiceCache(); public void setDefinitionsClass(Class theDefinitionsClass) { myDefinitionsClass = theDefinitionsClass; @@ -70,7 +70,7 @@ public class CdsHooksContextBooter { return myCdsServiceCache; } - private void extractCdsServices(Object theServiceBean) { + protected void extractCdsServices(Object theServiceBean) { Method[] methods = theServiceBean.getClass().getMethods(); // Sort alphabetically so service list output is deterministic (to ensure GET /cds-services is idempotent). // This also simplifies testing :-) @@ -104,7 +104,7 @@ public class CdsHooksContextBooter { } } - String validateJson(String theExtension) { + protected String validateJson(String theExtension) { if (StringUtils.isEmpty(theExtension)) { return null; } diff --git a/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/CdsCrServiceDstu3.java b/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/CdsCrServiceDstu3.java index 13d2c220027..a8f71ffb3a1 100644 --- a/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/CdsCrServiceDstu3.java +++ b/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/CdsCrServiceDstu3.java @@ -33,7 +33,6 @@ import ca.uhn.hapi.fhir.cdshooks.api.json.CdsServiceResponseSuggestionJson; import ca.uhn.hapi.fhir.cdshooks.api.json.CdsServiceResponseSystemActionJson; import org.hl7.fhir.dstu3.model.Bundle; import org.hl7.fhir.dstu3.model.CarePlan; -import org.hl7.fhir.dstu3.model.Endpoint; import org.hl7.fhir.dstu3.model.Extension; import org.hl7.fhir.dstu3.model.IdType; import org.hl7.fhir.dstu3.model.ParameterDefinition; @@ -54,7 +53,6 @@ import java.util.List; import java.util.Map; import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_DATA; -import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_DATA_ENDPOINT; import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_ENCOUNTER; import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_PARAMETERS; import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_PRACTITIONER; @@ -113,22 +111,6 @@ public class CdsCrServiceDstu3 implements ICdsCrService { if (data.hasEntry()) { parameters.addParameter(part(APPLY_PARAMETER_DATA, data)); } - if (theJson.getFhirServer() != null) { - Endpoint endpoint = new Endpoint().setAddress(theJson.getFhirServer()); - if (theJson.getServiceRequestAuthorizationJson().getAccessToken() != null) { - String tokenType = getTokenType(theJson.getServiceRequestAuthorizationJson()); - endpoint.addHeader(String.format( - "Authorization: %s %s", - tokenType, theJson.getServiceRequestAuthorizationJson().getAccessToken())); - if (theJson.getServiceRequestAuthorizationJson().getSubject() != null) { - endpoint.addHeader(String.format( - "%s: %s", - myCdsConfigService.getCdsCrSettings().getClientIdHeaderName(), - theJson.getServiceRequestAuthorizationJson().getSubject())); - } - } - parameters.addParameter(part(APPLY_PARAMETER_DATA_ENDPOINT, endpoint)); - } return parameters; } diff --git a/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/CdsCrServiceR4.java b/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/CdsCrServiceR4.java index 8204d23adb7..81468f3d843 100644 --- a/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/CdsCrServiceR4.java +++ b/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/CdsCrServiceR4.java @@ -36,7 +36,6 @@ import ca.uhn.hapi.fhir.cdshooks.api.json.CdsServiceResponseSystemActionJson; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.r4.model.Bundle; import org.hl7.fhir.r4.model.CanonicalType; -import org.hl7.fhir.r4.model.Endpoint; import org.hl7.fhir.r4.model.Extension; import org.hl7.fhir.r4.model.IdType; import org.hl7.fhir.r4.model.ParameterDefinition; @@ -56,7 +55,6 @@ import java.util.Map; import java.util.stream.Collectors; import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_DATA; -import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_DATA_ENDPOINT; import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_ENCOUNTER; import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_PARAMETERS; import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_PRACTITIONER; @@ -115,22 +113,6 @@ public class CdsCrServiceR4 implements ICdsCrService { if (data.hasEntry()) { parameters.addParameter(part(APPLY_PARAMETER_DATA, data)); } - if (theJson.getFhirServer() != null) { - Endpoint endpoint = new Endpoint().setAddress(theJson.getFhirServer()); - if (theJson.getServiceRequestAuthorizationJson().getAccessToken() != null) { - String tokenType = getTokenType(theJson.getServiceRequestAuthorizationJson()); - endpoint.addHeader(String.format( - "Authorization: %s %s", - tokenType, theJson.getServiceRequestAuthorizationJson().getAccessToken())); - if (theJson.getServiceRequestAuthorizationJson().getSubject() != null) { - endpoint.addHeader(String.format( - "%s: %s", - myCdsConfigService.getCdsCrSettings().getClientIdHeaderName(), - theJson.getServiceRequestAuthorizationJson().getSubject())); - } - } - parameters.addParameter(part(APPLY_PARAMETER_DATA_ENDPOINT, endpoint)); - } return parameters; } diff --git a/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/CdsCrServiceR5.java b/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/CdsCrServiceR5.java index 06859a428cb..0beb319f198 100644 --- a/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/CdsCrServiceR5.java +++ b/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/CdsCrServiceR5.java @@ -36,7 +36,6 @@ import ca.uhn.hapi.fhir.cdshooks.api.json.CdsServiceResponseSystemActionJson; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.r5.model.Bundle; import org.hl7.fhir.r5.model.CanonicalType; -import org.hl7.fhir.r5.model.Endpoint; import org.hl7.fhir.r5.model.Extension; import org.hl7.fhir.r5.model.IdType; import org.hl7.fhir.r5.model.ParameterDefinition; @@ -56,7 +55,6 @@ import java.util.Map; import java.util.stream.Collectors; import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_DATA; -import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_DATA_ENDPOINT; import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_ENCOUNTER; import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_PARAMETERS; import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_PRACTITIONER; @@ -115,22 +113,6 @@ public class CdsCrServiceR5 implements ICdsCrService { if (data.hasEntry()) { parameters.addParameter(part(APPLY_PARAMETER_DATA, data)); } - if (theJson.getFhirServer() != null) { - Endpoint endpoint = new Endpoint().setAddress(theJson.getFhirServer()); - if (theJson.getServiceRequestAuthorizationJson().getAccessToken() != null) { - String tokenType = getTokenType(theJson.getServiceRequestAuthorizationJson()); - endpoint.addHeader(String.format( - "Authorization: %s %s", - tokenType, theJson.getServiceRequestAuthorizationJson().getAccessToken())); - if (theJson.getServiceRequestAuthorizationJson().getSubject() != null) { - endpoint.addHeader(String.format( - "%s: %s", - myCdsConfigService.getCdsCrSettings().getClientIdHeaderName(), - theJson.getServiceRequestAuthorizationJson().getSubject())); - } - } - parameters.addParameter(part(APPLY_PARAMETER_DATA_ENDPOINT, endpoint)); - } return parameters; } diff --git a/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/discovery/CrDiscoveryElementDstu3.java b/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/discovery/CrDiscoveryElementDstu3.java index a0d6da80d03..c9b1fe19f3f 100644 --- a/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/discovery/CrDiscoveryElementDstu3.java +++ b/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/discovery/CrDiscoveryElementDstu3.java @@ -68,13 +68,13 @@ public class CrDiscoveryElementDstu3 implements ICrDiscoveryElement { || p.equals("Patient?_id=Patient/{{context.patientId}}"))) { String key = getKey(++itemNo); service.addPrefetch(key, "Patient?_id={{context.patientId}}"); - service.addSource(key, CdsResolutionStrategyEnum.SERVICE); + service.addSource(key, CdsResolutionStrategyEnum.FHIR_CLIENT); } for (String item : myPrefetchUrlList) { String key = getKey(++itemNo); service.addPrefetch(key, item); - service.addSource(key, CdsResolutionStrategyEnum.SERVICE); + service.addSource(key, CdsResolutionStrategyEnum.FHIR_CLIENT); } return service; diff --git a/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/discovery/CrDiscoveryElementR4.java b/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/discovery/CrDiscoveryElementR4.java index 630a05dc259..111e3844d9c 100644 --- a/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/discovery/CrDiscoveryElementR4.java +++ b/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/discovery/CrDiscoveryElementR4.java @@ -68,13 +68,13 @@ public class CrDiscoveryElementR4 implements ICrDiscoveryElement { || p.equals("Patient?_id=Patient/{{context.patientId}}"))) { String key = getKey(++itemNo); service.addPrefetch(key, "Patient?_id={{context.patientId}}"); - service.addSource(key, CdsResolutionStrategyEnum.NONE); + service.addSource(key, CdsResolutionStrategyEnum.FHIR_CLIENT); } for (String item : myPrefetchUrlList) { String key = getKey(++itemNo); service.addPrefetch(key, item); - service.addSource(key, CdsResolutionStrategyEnum.NONE); + service.addSource(key, CdsResolutionStrategyEnum.FHIR_CLIENT); } return service; diff --git a/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/discovery/CrDiscoveryElementR5.java b/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/discovery/CrDiscoveryElementR5.java index 879662c3131..105f56ec55a 100644 --- a/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/discovery/CrDiscoveryElementR5.java +++ b/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/discovery/CrDiscoveryElementR5.java @@ -68,13 +68,13 @@ public class CrDiscoveryElementR5 implements ICrDiscoveryElement { || p.equals("Patient?_id=Patient/{{context.patientId}}"))) { String key = getKey(++itemNo); service.addPrefetch(key, "Patient?_id={{context.patientId}}"); - service.addSource(key, CdsResolutionStrategyEnum.SERVICE); + service.addSource(key, CdsResolutionStrategyEnum.FHIR_CLIENT); } for (String item : myPrefetchUrlList) { String key = getKey(++itemNo); service.addPrefetch(key, item); - service.addSource(key, CdsResolutionStrategyEnum.SERVICE); + service.addSource(key, CdsResolutionStrategyEnum.FHIR_CLIENT); } return service; diff --git a/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/prefetch/CdsPrefetchSvc.java b/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/prefetch/CdsPrefetchSvc.java index da56e98b21a..ee4fb9855ef 100644 --- a/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/prefetch/CdsPrefetchSvc.java +++ b/hapi-fhir-server-cds-hooks/src/main/java/ca/uhn/hapi/fhir/cdshooks/svc/prefetch/CdsPrefetchSvc.java @@ -27,7 +27,6 @@ import ca.uhn.hapi.fhir.cdshooks.api.ICdsHooksDaoAuthorizationSvc; import ca.uhn.hapi.fhir.cdshooks.api.ICdsServiceMethod; import ca.uhn.hapi.fhir.cdshooks.api.json.CdsServiceJson; import ca.uhn.hapi.fhir.cdshooks.api.json.CdsServiceRequestJson; -import ca.uhn.hapi.fhir.cdshooks.svc.CdsCrServiceMethod; import org.hl7.fhir.instance.model.api.IBaseResource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -57,11 +56,6 @@ public class CdsPrefetchSvc { public void augmentRequest(CdsServiceRequestJson theCdsServiceRequestJson, ICdsServiceMethod theServiceMethod) { CdsServiceJson serviceSpec = theServiceMethod.getCdsServiceJson(); - if (theServiceMethod instanceof CdsCrServiceMethod) { - // CdsCrServices will retrieve data from the dao or fhir server passed in as needed, - // checking for missing prefetch is not necessary. - return; - } Set missingPrefetch = findMissingPrefetch(serviceSpec, theCdsServiceRequestJson); if (missingPrefetch.isEmpty()) { return; diff --git a/hapi-fhir-server-cds-hooks/src/test/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/resolution/CdsCrServiceR4Test.java b/hapi-fhir-server-cds-hooks/src/test/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/resolution/CdsCrServiceR4Test.java index 6243ab8a8bb..a980c2975cc 100644 --- a/hapi-fhir-server-cds-hooks/src/test/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/resolution/CdsCrServiceR4Test.java +++ b/hapi-fhir-server-cds-hooks/src/test/java/ca/uhn/hapi/fhir/cdshooks/svc/cr/resolution/CdsCrServiceR4Test.java @@ -47,7 +47,7 @@ public class CdsCrServiceR4Test extends BaseCrTest { requestDetails.setId(planDefinitionId); final Parameters params = new CdsCrServiceR4(requestDetails, repository, myCdsConfigService).encodeParams(cdsServiceRequestJson); - assertTrue(params.getParameter().size() == 3); + assertTrue(params.getParameter().size() == 2); assertTrue(params.getParameter("parameters").hasResource()); } diff --git a/hapi-fhir-server-mdm/pom.xml b/hapi-fhir-server-mdm/pom.xml index 2b06e200f87..366ceb47463 100644 --- a/hapi-fhir-server-mdm/pom.xml +++ b/hapi-fhir-server-mdm/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server-openapi/pom.xml b/hapi-fhir-server-openapi/pom.xml index bd1089b7b0a..cfaae68b9d9 100644 --- a/hapi-fhir-server-openapi/pom.xml +++ b/hapi-fhir-server-openapi/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server/pom.xml b/hapi-fhir-server/pom.xml index 25bb177e976..3d74ccc0870 100644 --- a/hapi-fhir-server/pom.xml +++ b/hapi-fhir-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServer.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServer.java index 8906c74af4a..d3386e5b9b1 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServer.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServer.java @@ -1325,7 +1325,14 @@ public class RestfulServer extends HttpServlet implements IRestfulServer hapi-fhir-serviceloaders ca.uhn.hapi.fhir - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml index 38ffb260eaa..32680b37acb 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml @@ -7,7 +7,7 @@ hapi-fhir-serviceloaders ca.uhn.hapi.fhir - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../pom.xml @@ -21,7 +21,7 @@ ca.uhn.hapi.fhir hapi-fhir-caching-api - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml index 2c010d73a21..da34ff1770e 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml @@ -7,7 +7,7 @@ hapi-fhir-serviceloaders ca.uhn.hapi.fhir - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml index 8e806a73771..3e09e7c0160 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml @@ -7,7 +7,7 @@ hapi-fhir ca.uhn.hapi.fhir - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../../pom.xml diff --git a/hapi-fhir-serviceloaders/pom.xml b/hapi-fhir-serviceloaders/pom.xml index 722bcfa2efe..3a7bdba7d54 100644 --- a/hapi-fhir-serviceloaders/pom.xml +++ b/hapi-fhir-serviceloaders/pom.xml @@ -5,7 +5,7 @@ hapi-deployable-pom ca.uhn.hapi.fhir - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml index 924a9753c59..93b126309c1 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml index ee838830f12..7a250b4b4c5 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT hapi-fhir-spring-boot-sample-client-apache diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml index 1311703ec16..a99bea8806d 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml index bc4fa610eb4..e3b0cf47a83 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml index 2c2fe2f31cb..f45a5e76eba 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml index 4aedd388b8b..d554053161d 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/pom.xml b/hapi-fhir-spring-boot/pom.xml index d9ede8644f7..8b56e169941 100644 --- a/hapi-fhir-spring-boot/pom.xml +++ b/hapi-fhir-spring-boot/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-sql-migrate/pom.xml b/hapi-fhir-sql-migrate/pom.xml index f7a3ca491fc..0827041f3d5 100644 --- a/hapi-fhir-sql-migrate/pom.xml +++ b/hapi-fhir-sql-migrate/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/JdbcUtils.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/JdbcUtils.java index b8db102a469..75767ba6bf3 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/JdbcUtils.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/JdbcUtils.java @@ -207,14 +207,7 @@ public class JdbcUtils { case Types.BLOB: return new ColumnType(ColumnTypeEnum.BLOB, length); case Types.LONGVARBINARY: - if (DriverTypeEnum.MYSQL_5_7.equals(theConnectionProperties.getDriverType())) { - // See git - return new ColumnType(ColumnTypeEnum.BLOB, length); - } else { - throw new IllegalArgumentException( - Msg.code(32) + "Don't know how to handle datatype " + dataType - + " for column " + theColumnName + " on table " + theTableName); - } + return new ColumnType(ColumnTypeEnum.BINARY, length); case Types.VARBINARY: if (DriverTypeEnum.MSSQL_2012.equals(theConnectionProperties.getDriverType())) { // MS SQLServer seems to be mapping BLOB to VARBINARY under the covers, so we need diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ColumnTypeEnum.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ColumnTypeEnum.java index 1de4092beba..acebbb82383 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ColumnTypeEnum.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ColumnTypeEnum.java @@ -37,5 +37,7 @@ public enum ColumnTypeEnum { * @Column(length=Integer.MAX_VALUE) */ TEXT, + /** Long inline binary */ + BINARY, BIG_DECIMAL; } diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ColumnTypeToDriverTypeToSqlType.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ColumnTypeToDriverTypeToSqlType.java index f483b74b330..7592d6c5d1a 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ColumnTypeToDriverTypeToSqlType.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ColumnTypeToDriverTypeToSqlType.java @@ -131,6 +131,14 @@ public final class ColumnTypeToDriverTypeToSqlType { setColumnType(ColumnTypeEnum.TEXT, DriverTypeEnum.POSTGRES_9_4, "text"); setColumnType(ColumnTypeEnum.TEXT, DriverTypeEnum.MSSQL_2012, "varchar(MAX)"); + setColumnType(ColumnTypeEnum.BINARY, DriverTypeEnum.H2_EMBEDDED, "blob"); + setColumnType(ColumnTypeEnum.BINARY, DriverTypeEnum.DERBY_EMBEDDED, "blob"); + setColumnType(ColumnTypeEnum.BINARY, DriverTypeEnum.MARIADB_10_1, "longblob"); + setColumnType(ColumnTypeEnum.BINARY, DriverTypeEnum.MYSQL_5_7, "longblob"); + setColumnType(ColumnTypeEnum.BINARY, DriverTypeEnum.ORACLE_12C, "blob"); + setColumnType(ColumnTypeEnum.BINARY, DriverTypeEnum.POSTGRES_9_4, "bytea"); + setColumnType(ColumnTypeEnum.BINARY, DriverTypeEnum.MSSQL_2012, "varbinary(MAX)"); + setColumnType(ColumnTypeEnum.BIG_DECIMAL, DriverTypeEnum.H2_EMBEDDED, "numeric(38,2)"); setColumnType(ColumnTypeEnum.BIG_DECIMAL, DriverTypeEnum.DERBY_EMBEDDED, "decimal(31,2)"); setColumnType(ColumnTypeEnum.BIG_DECIMAL, DriverTypeEnum.MARIADB_10_1, "decimal(38,2)"); diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ModifyColumnTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ModifyColumnTask.java index 284e24ac3c3..70dc3d2c0d1 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ModifyColumnTask.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ModifyColumnTask.java @@ -133,9 +133,10 @@ public class ModifyColumnTask extends BaseTableColumnTypeTask { } break; case ORACLE_12C: - String oracleNullableStmt = !alreadyCorrectNullable ? notNull : ""; - sql = "alter table " + getTableName() + " modify ( " + getColumnName() + " " + type + oracleNullableStmt - + " )"; + String oracleNullableStmt = alreadyCorrectNullable ? "" : notNull; + String oracleTypeStmt = alreadyOfCorrectType ? "" : type; + sql = "alter table " + getTableName() + " modify ( " + getColumnName() + " " + oracleTypeStmt + " " + + oracleNullableStmt + " )"; break; case MSSQL_2012: sql = "alter table " + getTableName() + " alter column " + getColumnName() + " " + type + notNull; diff --git a/hapi-fhir-storage-batch2-jobs/pom.xml b/hapi-fhir-storage-batch2-jobs/pom.xml index 2fabd9ec3d0..bb38f3ccee7 100644 --- a/hapi-fhir-storage-batch2-jobs/pom.xml +++ b/hapi-fhir-storage-batch2-jobs/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportAppCtx.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportAppCtx.java index b44bb04f8c0..56de694bc49 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportAppCtx.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportAppCtx.java @@ -36,6 +36,7 @@ import org.springframework.context.annotation.Scope; public class BulkExportAppCtx { public static final String WRITE_TO_BINARIES = "write-to-binaries"; + public static final String CREATE_REPORT_STEP = "create-report-step"; @Bean public JobDefinition bulkExportJobDefinition() { @@ -65,7 +66,7 @@ public class BulkExportAppCtx { writeBinaryStep()) // finalize the job (set to complete) .addFinalReducerStep( - "create-report-step", + CREATE_REPORT_STEP, "Creates the output report from a bulk export job", BulkExportJobResults.class, createReportStep()) @@ -119,16 +120,25 @@ public class BulkExportAppCtx { return new FetchResourceIdsStep(); } + /** + * Note, this bean is only used for version 1 of the bulk export job definition + */ @Bean public ExpandResourcesStep expandResourcesStep() { return new ExpandResourcesStep(); } + /** + * Note, this bean is only used for version 1 of the bulk export job definition + */ @Bean public WriteBinaryStep writeBinaryStep() { return new WriteBinaryStep(); } + /** + * Note, this bean is only used for version 2 of the bulk export job definition + */ @Bean public ExpandResourceAndWriteBinaryStep expandResourceAndWriteBinaryStep() { return new ExpandResourceAndWriteBinaryStep(); diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourceAndWriteBinaryStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourceAndWriteBinaryStep.java index 6f43cc67967..6200ffdd8c0 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourceAndWriteBinaryStep.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourceAndWriteBinaryStep.java @@ -34,6 +34,7 @@ import ca.uhn.fhir.interceptor.api.HookParams; import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.interceptor.executor.InterceptorService; import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; @@ -41,7 +42,6 @@ import ca.uhn.fhir.jpa.api.model.PersistentIdToForcedIdMap; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; import ca.uhn.fhir.jpa.bulk.export.api.IBulkExportProcessor; import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService; -import ca.uhn.fhir.jpa.model.entity.StorageSettings; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult; @@ -75,11 +75,13 @@ import org.springframework.context.ApplicationContext; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; -import java.util.ArrayList; +import java.util.HashMap; import java.util.Iterator; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.function.Consumer; import java.util.stream.Collectors; import static ca.uhn.fhir.rest.api.Constants.PARAM_ID; @@ -103,7 +105,7 @@ public class ExpandResourceAndWriteBinaryStep private IBulkExportProcessor myBulkExportProcessor; @Autowired - private StorageSettings myStorageSettings; + private JpaStorageSettings myStorageSettings; @Autowired private ApplicationContext myApplicationContext; @@ -119,6 +121,23 @@ public class ExpandResourceAndWriteBinaryStep private volatile ResponseTerminologyTranslationSvc myResponseTerminologyTranslationSvc; + /** + * Note on the design of this step: + * This step takes a list of resource PIDs as input, fetches those + * resources, applies a bunch of filtering/consent/MDM/etc. modifications + * on them, serializes the result as NDJSON files, and then persists those + * NDJSON files as Binary resources. + *

+ * We want to avoid writing files which exceed the configured maximum + * file size, and we also want to avoid keeping too much in memory + * at any given time, so this class works a bit like a stream processor + * (although not using Java streams). + *

+ * The {@link #fetchResourcesByIdAndConsumeThem(ResourceIdList, RequestPartitionId, Consumer)} + * method loads the resources by ID, {@link ExpandResourcesConsumer} handles + * the filtering and whatnot, then the {@link NdJsonResourceWriter} + * ultimately writes them. + */ @Nonnull @Override public RunOutcome run( @@ -126,235 +145,36 @@ public class ExpandResourceAndWriteBinaryStep @Nonnull IJobDataSink theDataSink) throws JobExecutionFailedException { - List expandedResourcesList = expandResourcesFromList(theStepExecutionDetails); - int numResourcesProcessed = 0; - ourLog.info("Write binary step of Job Export"); + // Currently only NDJSON output format is supported, but we could add other + // kinds of writers here for other formats if needed + NdJsonResourceWriter resourceWriter = new NdJsonResourceWriter(theStepExecutionDetails, theDataSink); - // write to binary each resource type separately, without chunking, we need to do this in a loop now - for (ExpandedResourcesList expandedResources : expandedResourcesList) { + expandResourcesFromList(theStepExecutionDetails, resourceWriter); - numResourcesProcessed += expandedResources.getStringifiedResources().size(); - - ourLog.info("Writing {} resources to binary file", numResourcesProcessed); - - @SuppressWarnings("unchecked") - IFhirResourceDao binaryDao = myDaoRegistry.getResourceDao("Binary"); - - IBaseBinary binary = BinaryUtil.newBinary(myFhirContext); - - addMetadataExtensionsToBinary(theStepExecutionDetails, expandedResources, binary); - - // TODO - // should be dependent on the output format in parameters but for now, only NDJSON is supported - binary.setContentType(Constants.CT_FHIR_NDJSON); - - int processedRecordsCount = 0; - try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) { - try (OutputStreamWriter streamWriter = getStreamWriter(outputStream)) { - for (String stringified : expandedResources.getStringifiedResources()) { - streamWriter.append(stringified); - streamWriter.append("\n"); - processedRecordsCount++; - } - streamWriter.flush(); - outputStream.flush(); - } - binary.setContent(outputStream.toByteArray()); - } catch (IOException ex) { - String errorMsg = String.format( - "Failure to process resource of type %s : %s", - expandedResources.getResourceType(), ex.getMessage()); - ourLog.error(errorMsg); - - throw new JobExecutionFailedException(Msg.code(2431) + errorMsg); - } - - SystemRequestDetails srd = new SystemRequestDetails(); - BulkExportJobParameters jobParameters = theStepExecutionDetails.getParameters(); - RequestPartitionId partitionId = jobParameters.getPartitionId(); - if (partitionId == null) { - srd.setRequestPartitionId(RequestPartitionId.defaultPartition()); - } else { - srd.setRequestPartitionId(partitionId); - } - - // Pick a unique ID and retry until we get one that isn't already used. This is just to - // avoid any possibility of people guessing the IDs of these Binaries and fishing for them. - while (true) { - // Use a random ID to make it harder to guess IDs - 32 characters of a-zA-Z0-9 - // has 190 bts of entropy according to https://www.omnicalculator.com/other/password-entropy - String proposedId = RandomTextUtils.newSecureRandomAlphaNumericString(32); - binary.setId(proposedId); - - // Make sure we don't accidentally reuse an ID. This should be impossible given the - // amount of entropy in the IDs but might as well be sure. - try { - IBaseBinary output = binaryDao.read(binary.getIdElement(), new SystemRequestDetails(), true); - if (output != null) { - continue; - } - } catch (ResourceNotFoundException e) { - // good - } - - break; - } - - if (myFhirContext.getVersion().getVersion().isNewerThan(FhirVersionEnum.DSTU2)) { - if (isNotBlank(jobParameters.getBinarySecurityContextIdentifierSystem()) - || isNotBlank(jobParameters.getBinarySecurityContextIdentifierValue())) { - FhirTerser terser = myFhirContext.newTerser(); - terser.setElement( - binary, - "securityContext.identifier.system", - jobParameters.getBinarySecurityContextIdentifierSystem()); - terser.setElement( - binary, - "securityContext.identifier.value", - jobParameters.getBinarySecurityContextIdentifierValue()); - } - } - - DaoMethodOutcome outcome = binaryDao.update(binary, srd); - IIdType id = outcome.getId(); - - BulkExportBinaryFileId bulkExportBinaryFileId = new BulkExportBinaryFileId(); - bulkExportBinaryFileId.setBinaryId(id.getValueAsString()); - bulkExportBinaryFileId.setResourceType(expandedResources.getResourceType()); - theDataSink.accept(bulkExportBinaryFileId); - - ourLog.info( - "Binary writing complete for {} resources of type {}.", - processedRecordsCount, - expandedResources.getResourceType()); - } - return new RunOutcome(numResourcesProcessed); + return new RunOutcome(resourceWriter.getNumResourcesProcessed()); } - private List expandResourcesFromList( - StepExecutionDetails theStepExecutionDetails) { - List expandedResourcesList = new ArrayList<>(); - String instanceId = theStepExecutionDetails.getInstance().getInstanceId(); - String chunkId = theStepExecutionDetails.getChunkId(); + private void expandResourcesFromList( + StepExecutionDetails theStepExecutionDetails, + Consumer theResourceWriter) { + ResourceIdList idList = theStepExecutionDetails.getData(); BulkExportJobParameters parameters = theStepExecutionDetails.getParameters(); - ourLog.info( - "Bulk export instance[{}] chunk[{}] - About to expand {} resource IDs into their full resource bodies.", - instanceId, - chunkId, - idList.getIds().size()); + Consumer> resourceListConsumer = + new ExpandResourcesConsumer(theStepExecutionDetails, theResourceWriter); // search the resources - List allResources = fetchAllResources(idList, parameters.getPartitionId()); - - // Apply post-fetch filtering - String resourceType = idList.getResourceType(); - List postFetchFilterUrls = parameters.getPostFetchFilterUrls().stream() - .filter(t -> t.substring(0, t.indexOf('?')).equals(resourceType)) - .collect(Collectors.toList()); - - if (!postFetchFilterUrls.isEmpty()) { - applyPostFetchFiltering(allResources, postFetchFilterUrls, instanceId, chunkId); - } - - // if necessary, expand resources - if (parameters.isExpandMdm()) { - myBulkExportProcessor.expandMdmResources(allResources); - } - - // Normalize terminology - if (myStorageSettings.isNormalizeTerminologyForBulkExportJobs()) { - ResponseTerminologyTranslationSvc terminologyTranslationSvc = myResponseTerminologyTranslationSvc; - if (terminologyTranslationSvc == null) { - terminologyTranslationSvc = myApplicationContext.getBean(ResponseTerminologyTranslationSvc.class); - myResponseTerminologyTranslationSvc = terminologyTranslationSvc; - } - terminologyTranslationSvc.processResourcesForTerminologyTranslation(allResources); - } - - // Interceptor call - if (myInterceptorService.hasHooks(Pointcut.STORAGE_BULK_EXPORT_RESOURCE_INCLUSION)) { - for (Iterator iter = allResources.iterator(); iter.hasNext(); ) { - HookParams params = new HookParams() - .add(BulkExportJobParameters.class, theStepExecutionDetails.getParameters()) - .add(IBaseResource.class, iter.next()); - boolean outcome = - myInterceptorService.callHooks(Pointcut.STORAGE_BULK_EXPORT_RESOURCE_INCLUSION, params); - if (!outcome) { - iter.remove(); - } - } - } - - // encode them - Key is resource type, Value is a collection of serialized resources of that type - ListMultimap resources = encodeToString(allResources, parameters); - - for (String nextResourceType : resources.keySet()) { - - ExpandedResourcesList output = new ExpandedResourcesList(); - output.setStringifiedResources(resources.get(nextResourceType)); - output.setResourceType(nextResourceType); - expandedResourcesList.add(output); - - ourLog.info( - "Expanding of {} resources of type {} completed", - idList.getIds().size(), - idList.getResourceType()); - } - return expandedResourcesList; + fetchResourcesByIdAndConsumeThem(idList, parameters.getPartitionId(), resourceListConsumer); } - private void applyPostFetchFiltering( - List theResources, - List thePostFetchFilterUrls, - String theInstanceId, - String theChunkId) { - int numRemoved = 0; - for (Iterator iter = theResources.iterator(); iter.hasNext(); ) { - boolean matched = applyPostFetchFilteringForSingleResource(thePostFetchFilterUrls, iter); - - if (!matched) { - iter.remove(); - numRemoved++; - } - } - - if (numRemoved > 0) { - ourLog.info( - "Bulk export instance[{}] chunk[{}] - {} resources were filtered out because of post-fetch filter URLs", - theInstanceId, - theChunkId, - numRemoved); - } - } - - private boolean applyPostFetchFilteringForSingleResource( - List thePostFetchFilterUrls, Iterator iter) { - IBaseResource nextResource = iter.next(); - String nextResourceType = myFhirContext.getResourceType(nextResource); - - for (String nextPostFetchFilterUrl : thePostFetchFilterUrls) { - if (nextPostFetchFilterUrl.contains("?")) { - String resourceType = nextPostFetchFilterUrl.substring(0, nextPostFetchFilterUrl.indexOf('?')); - if (nextResourceType.equals(resourceType)) { - InMemoryMatchResult matchResult = myInMemoryResourceMatcher.match( - nextPostFetchFilterUrl, nextResource, null, new SystemRequestDetails()); - if (matchResult.matched()) { - return true; - } - } - } - } - return false; - } - - private List fetchAllResources(ResourceIdList theIds, RequestPartitionId theRequestPartitionId) { + private void fetchResourcesByIdAndConsumeThem( + ResourceIdList theIds, + RequestPartitionId theRequestPartitionId, + Consumer> theResourceListConsumer) { ArrayListMultimap typeToIds = ArrayListMultimap.create(); theIds.getIds().forEach(t -> typeToIds.put(t.getResourceType(), t.getId())); - List resources = new ArrayList<>(theIds.getIds().size()); - for (String resourceType : typeToIds.keySet()) { IFhirResourceDao dao = myDaoRegistry.getResourceDao(resourceType); @@ -383,31 +203,9 @@ public class ExpandResourceAndWriteBinaryStep SearchParameterMap spMap = SearchParameterMap.newSynchronous().add(PARAM_ID, idListParam); IBundleProvider outcome = dao.search(spMap, new SystemRequestDetails().setRequestPartitionId(theRequestPartitionId)); - resources.addAll(outcome.getAllResources()); + theResourceListConsumer.accept(outcome.getAllResources()); } } - - return resources; - } - - private ListMultimap encodeToString( - List theResources, BulkExportJobParameters theParameters) { - IParser parser = getParser(theParameters); - - ListMultimap retVal = ArrayListMultimap.create(); - for (IBaseResource resource : theResources) { - String type = myFhirContext.getResourceType(resource); - String jsonResource = parser.encodeResourceToString(resource); - retVal.put(type, jsonResource); - } - return retVal; - } - - private IParser getParser(BulkExportJobParameters theParameters) { - // The parser depends on the output format - // but for now, only ndjson is supported - // see WriteBinaryStep as well - return myFhirContext.newJsonParser().setPrettyPrint(false); } /** @@ -462,4 +260,310 @@ public class ExpandResourceAndWriteBinaryStep public void setIdHelperServiceForUnitTest(IIdHelperService theIdHelperService) { myIdHelperService = theIdHelperService; } + + /** + * This class takes a collection of lists of resources read from the + * repository, and processes them, then converts them into + * {@link ExpandedResourcesList} instances, each one of which corresponds + * to a single output file. We try to avoid exceeding the maximum file + * size defined in + * {@link JpaStorageSettings#getBulkExportFileMaximumSize()} + * so we will do our best to emit multiple lists in favour of emitting + * a list that exceeds that threshold. + */ + private class ExpandResourcesConsumer implements Consumer> { + + private final Consumer myResourceWriter; + private final StepExecutionDetails myStepExecutionDetails; + + public ExpandResourcesConsumer( + StepExecutionDetails theStepExecutionDetails, + Consumer theResourceWriter) { + myStepExecutionDetails = theStepExecutionDetails; + myResourceWriter = theResourceWriter; + } + + @Override + public void accept(List theResources) throws JobExecutionFailedException { + String instanceId = myStepExecutionDetails.getInstance().getInstanceId(); + String chunkId = myStepExecutionDetails.getChunkId(); + ResourceIdList idList = myStepExecutionDetails.getData(); + BulkExportJobParameters parameters = myStepExecutionDetails.getParameters(); + + ourLog.info( + "Bulk export instance[{}] chunk[{}] - About to expand {} resource IDs into their full resource bodies.", + instanceId, + chunkId, + idList.getIds().size()); + + // Apply post-fetch filtering + String resourceType = idList.getResourceType(); + List postFetchFilterUrls = parameters.getPostFetchFilterUrls().stream() + .filter(t -> t.substring(0, t.indexOf('?')).equals(resourceType)) + .collect(Collectors.toList()); + + if (!postFetchFilterUrls.isEmpty()) { + applyPostFetchFiltering(theResources, postFetchFilterUrls, instanceId, chunkId); + } + + // if necessary, expand resources + if (parameters.isExpandMdm()) { + myBulkExportProcessor.expandMdmResources(theResources); + } + + // Normalize terminology + if (myStorageSettings.isNormalizeTerminologyForBulkExportJobs()) { + ResponseTerminologyTranslationSvc terminologyTranslationSvc = myResponseTerminologyTranslationSvc; + if (terminologyTranslationSvc == null) { + terminologyTranslationSvc = myApplicationContext.getBean(ResponseTerminologyTranslationSvc.class); + myResponseTerminologyTranslationSvc = terminologyTranslationSvc; + } + terminologyTranslationSvc.processResourcesForTerminologyTranslation(theResources); + } + + // Interceptor call + if (myInterceptorService.hasHooks(Pointcut.STORAGE_BULK_EXPORT_RESOURCE_INCLUSION)) { + for (Iterator iter = theResources.iterator(); iter.hasNext(); ) { + HookParams params = new HookParams() + .add(BulkExportJobParameters.class, myStepExecutionDetails.getParameters()) + .add(IBaseResource.class, iter.next()); + boolean outcome = + myInterceptorService.callHooks(Pointcut.STORAGE_BULK_EXPORT_RESOURCE_INCLUSION, params); + if (!outcome) { + iter.remove(); + } + } + } + + // encode them - Key is resource type, Value is a collection of serialized resources of that type + IParser parser = getParser(parameters); + + ListMultimap resourceTypeToStringifiedResources = ArrayListMultimap.create(); + Map resourceTypeToTotalSize = new HashMap<>(); + for (IBaseResource resource : theResources) { + String type = myFhirContext.getResourceType(resource); + int existingSize = resourceTypeToTotalSize.getOrDefault(type, 0); + + String jsonResource = parser.encodeResourceToString(resource); + int newSize = existingSize + jsonResource.length(); + + // If adding another stringified resource to the list for the given type + // would exceed the configured maximum allowed, then let's send the current + // list and flush it. Note that if a single resource exceeds the configurable + // maximum then we have no choice but to send it + long bulkExportFileMaximumSize = myStorageSettings.getBulkExportFileMaximumSize(); + if (newSize > bulkExportFileMaximumSize) { + if (existingSize == 0) { + // If no files are already in the collection, then this one file + // is bigger than the maximum allowable. We'll allow it in that + // case + ourLog.warn( + "Single resource size {} exceeds allowable maximum of {}, so will ignore maximum", + newSize, + bulkExportFileMaximumSize); + } else { + // Otherwise, flush the contents now before adding the next file + List stringifiedResources = resourceTypeToStringifiedResources.get(type); + writeStringifiedResources(type, stringifiedResources); + + resourceTypeToStringifiedResources.removeAll(type); + newSize = jsonResource.length(); + } + } + + resourceTypeToStringifiedResources.put(type, jsonResource); + resourceTypeToTotalSize.put(type, newSize); + } + + for (String nextResourceType : resourceTypeToStringifiedResources.keySet()) { + List stringifiedResources = resourceTypeToStringifiedResources.get(nextResourceType); + writeStringifiedResources(nextResourceType, stringifiedResources); + } + } + + private void writeStringifiedResources(String theResourceType, List theStringifiedResources) { + if (!theStringifiedResources.isEmpty()) { + + ExpandedResourcesList output = new ExpandedResourcesList(); + output.setStringifiedResources(theStringifiedResources); + output.setResourceType(theResourceType); + myResourceWriter.accept(output); + + ourLog.info( + "Expanding of {} resources of type {} completed", + theStringifiedResources.size(), + theResourceType); + } + } + + private void applyPostFetchFiltering( + List theResources, + List thePostFetchFilterUrls, + String theInstanceId, + String theChunkId) { + int numRemoved = 0; + for (Iterator iter = theResources.iterator(); iter.hasNext(); ) { + boolean matched = applyPostFetchFilteringForSingleResource(thePostFetchFilterUrls, iter); + + if (!matched) { + iter.remove(); + numRemoved++; + } + } + + if (numRemoved > 0) { + ourLog.info( + "Bulk export instance[{}] chunk[{}] - {} resources were filtered out because of post-fetch filter URLs", + theInstanceId, + theChunkId, + numRemoved); + } + } + + private boolean applyPostFetchFilteringForSingleResource( + List thePostFetchFilterUrls, Iterator iter) { + IBaseResource nextResource = iter.next(); + String nextResourceType = myFhirContext.getResourceType(nextResource); + + for (String nextPostFetchFilterUrl : thePostFetchFilterUrls) { + if (nextPostFetchFilterUrl.contains("?")) { + String resourceType = nextPostFetchFilterUrl.substring(0, nextPostFetchFilterUrl.indexOf('?')); + if (nextResourceType.equals(resourceType)) { + InMemoryMatchResult matchResult = myInMemoryResourceMatcher.match( + nextPostFetchFilterUrl, nextResource, null, new SystemRequestDetails()); + if (matchResult.matched()) { + return true; + } + } + } + } + return false; + } + + private IParser getParser(BulkExportJobParameters theParameters) { + // The parser depends on the output format + // but for now, only ndjson is supported + // see WriteBinaryStep as well + return myFhirContext.newJsonParser().setPrettyPrint(false); + } + } + + /** + * This class takes a collection of expanded resources, and expands it to + * an NDJSON file, which is written to a Binary resource. + */ + private class NdJsonResourceWriter implements Consumer { + + private final StepExecutionDetails myStepExecutionDetails; + private final IJobDataSink myDataSink; + private int myNumResourcesProcessed = 0; + + public NdJsonResourceWriter( + StepExecutionDetails theStepExecutionDetails, + IJobDataSink theDataSink) { + this.myStepExecutionDetails = theStepExecutionDetails; + this.myDataSink = theDataSink; + } + + public int getNumResourcesProcessed() { + return myNumResourcesProcessed; + } + + @Override + public void accept(ExpandedResourcesList theExpandedResourcesList) throws JobExecutionFailedException { + int batchSize = theExpandedResourcesList.getStringifiedResources().size(); + ourLog.info("Writing {} resources to binary file", batchSize); + + myNumResourcesProcessed += batchSize; + + @SuppressWarnings("unchecked") + IFhirResourceDao binaryDao = myDaoRegistry.getResourceDao("Binary"); + + IBaseBinary binary = BinaryUtil.newBinary(myFhirContext); + + addMetadataExtensionsToBinary(myStepExecutionDetails, theExpandedResourcesList, binary); + + binary.setContentType(Constants.CT_FHIR_NDJSON); + + int processedRecordsCount = 0; + try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) { + try (OutputStreamWriter streamWriter = getStreamWriter(outputStream)) { + for (String stringified : theExpandedResourcesList.getStringifiedResources()) { + streamWriter.append(stringified); + streamWriter.append("\n"); + processedRecordsCount++; + } + streamWriter.flush(); + outputStream.flush(); + } + binary.setContent(outputStream.toByteArray()); + } catch (IOException ex) { + String errorMsg = String.format( + "Failure to process resource of type %s : %s", + theExpandedResourcesList.getResourceType(), ex.getMessage()); + ourLog.error(errorMsg); + + throw new JobExecutionFailedException(Msg.code(2431) + errorMsg); + } + + SystemRequestDetails srd = new SystemRequestDetails(); + BulkExportJobParameters jobParameters = myStepExecutionDetails.getParameters(); + RequestPartitionId partitionId = jobParameters.getPartitionId(); + if (partitionId == null) { + srd.setRequestPartitionId(RequestPartitionId.defaultPartition()); + } else { + srd.setRequestPartitionId(partitionId); + } + + // Pick a unique ID and retry until we get one that isn't already used. This is just to + // avoid any possibility of people guessing the IDs of these Binaries and fishing for them. + while (true) { + // Use a random ID to make it harder to guess IDs - 32 characters of a-zA-Z0-9 + // has 190 bts of entropy according to https://www.omnicalculator.com/other/password-entropy + String proposedId = RandomTextUtils.newSecureRandomAlphaNumericString(32); + binary.setId(proposedId); + + // Make sure we don't accidentally reuse an ID. This should be impossible given the + // amount of entropy in the IDs but might as well be sure. + try { + IBaseBinary output = binaryDao.read(binary.getIdElement(), new SystemRequestDetails(), true); + if (output != null) { + continue; + } + } catch (ResourceNotFoundException e) { + // good + } + + break; + } + + if (myFhirContext.getVersion().getVersion().isNewerThan(FhirVersionEnum.DSTU2)) { + if (isNotBlank(jobParameters.getBinarySecurityContextIdentifierSystem()) + || isNotBlank(jobParameters.getBinarySecurityContextIdentifierValue())) { + FhirTerser terser = myFhirContext.newTerser(); + terser.setElement( + binary, + "securityContext.identifier.system", + jobParameters.getBinarySecurityContextIdentifierSystem()); + terser.setElement( + binary, + "securityContext.identifier.value", + jobParameters.getBinarySecurityContextIdentifierValue()); + } + } + + DaoMethodOutcome outcome = binaryDao.update(binary, srd); + IIdType id = outcome.getId(); + + BulkExportBinaryFileId bulkExportBinaryFileId = new BulkExportBinaryFileId(); + bulkExportBinaryFileId.setBinaryId(id.getValueAsString()); + bulkExportBinaryFileId.setResourceType(theExpandedResourcesList.getResourceType()); + myDataSink.accept(bulkExportBinaryFileId); + + ourLog.info( + "Binary writing complete for {} resources of type {}.", + processedRecordsCount, + theExpandedResourcesList.getResourceType()); + } + } } diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourcesStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourcesStep.java index e2cce9c5f31..8c8f0490d9e 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourcesStep.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourcesStep.java @@ -26,18 +26,19 @@ import ca.uhn.fhir.batch2.api.RunOutcome; import ca.uhn.fhir.batch2.api.StepExecutionDetails; import ca.uhn.fhir.batch2.jobs.export.models.ExpandedResourcesList; import ca.uhn.fhir.batch2.jobs.export.models.ResourceIdList; +import ca.uhn.fhir.batch2.jobs.models.BatchResourceId; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.interceptor.api.HookParams; import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.interceptor.executor.InterceptorService; import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.model.PersistentIdToForcedIdMap; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; import ca.uhn.fhir.jpa.bulk.export.api.IBulkExportProcessor; import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService; -import ca.uhn.fhir.jpa.model.entity.StorageSettings; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult; import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryResourceMatcher; @@ -52,6 +53,7 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ListMultimap; import jakarta.annotation.Nonnull; +import org.apache.commons.collections4.ListUtils; import org.hl7.fhir.instance.model.api.IBaseResource; import org.slf4j.Logger; import org.springframework.beans.factory.annotation.Autowired; @@ -84,7 +86,7 @@ public class ExpandResourcesStep private ApplicationContext myApplicationContext; @Autowired - private StorageSettings myStorageSettings; + private JpaStorageSettings myStorageSettings; @Autowired private IIdHelperService myIdHelperService; @@ -108,72 +110,99 @@ public class ExpandResourcesStep throws JobExecutionFailedException { String instanceId = theStepExecutionDetails.getInstance().getInstanceId(); String chunkId = theStepExecutionDetails.getChunkId(); - ResourceIdList idList = theStepExecutionDetails.getData(); + ResourceIdList data = theStepExecutionDetails.getData(); BulkExportJobParameters parameters = theStepExecutionDetails.getParameters(); ourLog.info( "Bulk export instance[{}] chunk[{}] - About to expand {} resource IDs into their full resource bodies.", instanceId, chunkId, - idList.getIds().size()); + data.getIds().size()); - // search the resources - List allResources = fetchAllResources(idList, parameters.getPartitionId()); + // Partition the ID list in order to only fetch a reasonable number at a time + List> idLists = ListUtils.partition(data.getIds(), 100); - // Apply post-fetch filtering - String resourceType = idList.getResourceType(); - List postFetchFilterUrls = parameters.getPostFetchFilterUrls().stream() - .filter(t -> t.substring(0, t.indexOf('?')).equals(resourceType)) - .collect(Collectors.toList()); + for (List idList : idLists) { - if (!postFetchFilterUrls.isEmpty()) { - applyPostFetchFiltering(allResources, postFetchFilterUrls, instanceId, chunkId); - } + // search the resources + List allResources = fetchAllResources(idList, parameters.getPartitionId()); - // if necessary, expand resources - if (parameters.isExpandMdm()) { - myBulkExportProcessor.expandMdmResources(allResources); - } + // Apply post-fetch filtering + String resourceType = data.getResourceType(); + List postFetchFilterUrls = parameters.getPostFetchFilterUrls().stream() + .filter(t -> t.substring(0, t.indexOf('?')).equals(resourceType)) + .collect(Collectors.toList()); - // Normalize terminology - if (myStorageSettings.isNormalizeTerminologyForBulkExportJobs()) { - ResponseTerminologyTranslationSvc terminologyTranslationSvc = myResponseTerminologyTranslationSvc; - if (terminologyTranslationSvc == null) { - terminologyTranslationSvc = myApplicationContext.getBean(ResponseTerminologyTranslationSvc.class); - myResponseTerminologyTranslationSvc = terminologyTranslationSvc; + if (!postFetchFilterUrls.isEmpty()) { + applyPostFetchFiltering(allResources, postFetchFilterUrls, instanceId, chunkId); } - terminologyTranslationSvc.processResourcesForTerminologyTranslation(allResources); - } - // Interceptor call - if (myInterceptorService.hasHooks(Pointcut.STORAGE_BULK_EXPORT_RESOURCE_INCLUSION)) { - for (Iterator iter = allResources.iterator(); iter.hasNext(); ) { - HookParams params = new HookParams() - .add(BulkExportJobParameters.class, theStepExecutionDetails.getParameters()) - .add(IBaseResource.class, iter.next()); - boolean outcome = - myInterceptorService.callHooks(Pointcut.STORAGE_BULK_EXPORT_RESOURCE_INCLUSION, params); - if (!outcome) { - iter.remove(); + // if necessary, expand resources + if (parameters.isExpandMdm()) { + myBulkExportProcessor.expandMdmResources(allResources); + } + + // Normalize terminology + if (myStorageSettings.isNormalizeTerminologyForBulkExportJobs()) { + ResponseTerminologyTranslationSvc terminologyTranslationSvc = myResponseTerminologyTranslationSvc; + if (terminologyTranslationSvc == null) { + terminologyTranslationSvc = myApplicationContext.getBean(ResponseTerminologyTranslationSvc.class); + myResponseTerminologyTranslationSvc = terminologyTranslationSvc; + } + terminologyTranslationSvc.processResourcesForTerminologyTranslation(allResources); + } + + // Interceptor call + if (myInterceptorService.hasHooks(Pointcut.STORAGE_BULK_EXPORT_RESOURCE_INCLUSION)) { + for (Iterator iter = allResources.iterator(); iter.hasNext(); ) { + HookParams params = new HookParams() + .add(BulkExportJobParameters.class, theStepExecutionDetails.getParameters()) + .add(IBaseResource.class, iter.next()); + boolean outcome = + myInterceptorService.callHooks(Pointcut.STORAGE_BULK_EXPORT_RESOURCE_INCLUSION, params); + if (!outcome) { + iter.remove(); + } } } - } - // encode them - Key is resource type, Value is a collection of serialized resources of that type - ListMultimap resources = encodeToString(allResources, parameters); + // encode them - Key is resource type, Value is a collection of serialized resources of that type + ListMultimap resources = encodeToString(allResources, parameters); - // set to datasink - for (String nextResourceType : resources.keySet()) { + // send to datasink + long maxFileSize = myStorageSettings.getBulkExportFileMaximumSize(); + long currentFileSize = 0; + for (String nextResourceType : resources.keySet()) { - ExpandedResourcesList output = new ExpandedResourcesList(); - output.setStringifiedResources(resources.get(nextResourceType)); - output.setResourceType(nextResourceType); - theDataSink.accept(output); + List stringifiedResources = resources.get(nextResourceType); + List currentFileStringifiedResources = new ArrayList<>(); - ourLog.info( - "Expanding of {} resources of type {} completed", - idList.getIds().size(), - idList.getResourceType()); + for (String nextStringifiedResource : stringifiedResources) { + + if (currentFileSize + nextStringifiedResource.length() > maxFileSize + && !currentFileStringifiedResources.isEmpty()) { + ExpandedResourcesList output = new ExpandedResourcesList(); + output.setStringifiedResources(currentFileStringifiedResources); + output.setResourceType(nextResourceType); + theDataSink.accept(output); + + currentFileStringifiedResources = new ArrayList<>(); + currentFileSize = 0; + } + + currentFileStringifiedResources.add(nextStringifiedResource); + currentFileSize += nextStringifiedResource.length(); + } + + if (!currentFileStringifiedResources.isEmpty()) { + ExpandedResourcesList output = new ExpandedResourcesList(); + output.setStringifiedResources(currentFileStringifiedResources); + output.setResourceType(nextResourceType); + theDataSink.accept(output); + } + + ourLog.info("Expanding of {} resources of type {} completed", idList.size(), data.getResourceType()); + } } // and return @@ -224,42 +253,36 @@ public class ExpandResourcesStep return false; } - private List fetchAllResources(ResourceIdList theIds, RequestPartitionId theRequestPartitionId) { + private List fetchAllResources( + List theIds, RequestPartitionId theRequestPartitionId) { ArrayListMultimap typeToIds = ArrayListMultimap.create(); - theIds.getIds().forEach(t -> typeToIds.put(t.getResourceType(), t.getId())); + theIds.forEach(t -> typeToIds.put(t.getResourceType(), t.getId())); - List resources = new ArrayList<>(theIds.getIds().size()); + List resources = new ArrayList<>(theIds.size()); for (String resourceType : typeToIds.keySet()) { IFhirResourceDao dao = myDaoRegistry.getResourceDao(resourceType); List allIds = typeToIds.get(resourceType); - while (!allIds.isEmpty()) { - // Load in batches in order to avoid having too many PIDs go into a - // single SQ statement at once - int batchSize = Math.min(500, allIds.size()); + Set nextBatchOfPids = allIds.stream() + .map(t -> myIdHelperService.newPidFromStringIdAndResourceName(t, resourceType)) + .collect(Collectors.toSet()); - Set nextBatchOfPids = allIds.subList(0, batchSize).stream() - .map(t -> myIdHelperService.newPidFromStringIdAndResourceName(t, resourceType)) - .collect(Collectors.toSet()); - allIds = allIds.subList(batchSize, allIds.size()); + PersistentIdToForcedIdMap nextBatchOfResourceIds = myTransactionService + .withRequest(null) + .execute(() -> myIdHelperService.translatePidsToForcedIds(nextBatchOfPids)); - PersistentIdToForcedIdMap nextBatchOfResourceIds = myTransactionService - .withRequest(null) - .execute(() -> myIdHelperService.translatePidsToForcedIds(nextBatchOfPids)); - - TokenOrListParam idListParam = new TokenOrListParam(); - for (IResourcePersistentId nextPid : nextBatchOfPids) { - Optional resourceId = nextBatchOfResourceIds.get(nextPid); - idListParam.add(resourceId.orElse(nextPid.getId().toString())); - } - - SearchParameterMap spMap = SearchParameterMap.newSynchronous().add(PARAM_ID, idListParam); - IBundleProvider outcome = - dao.search(spMap, new SystemRequestDetails().setRequestPartitionId(theRequestPartitionId)); - resources.addAll(outcome.getAllResources()); + TokenOrListParam idListParam = new TokenOrListParam(); + for (IResourcePersistentId nextPid : nextBatchOfPids) { + Optional resourceId = nextBatchOfResourceIds.get(nextPid); + idListParam.add(resourceId.orElse(nextPid.getId().toString())); } + + SearchParameterMap spMap = SearchParameterMap.newSynchronous().add(PARAM_ID, idListParam); + IBundleProvider outcome = + dao.search(spMap, new SystemRequestDetails().setRequestPartitionId(theRequestPartitionId)); + resources.addAll(outcome.getAllResources()); } return resources; diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/FetchResourceIdsStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/FetchResourceIdsStep.java index 16ec8987a1d..e576f9362b4 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/FetchResourceIdsStep.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/FetchResourceIdsStep.java @@ -102,6 +102,8 @@ public class FetchResourceIdsStep implements IFirstJobStepWorker idsToSubmit = new ArrayList<>(); + int estimatedChunkSize = 0; + if (!pidIterator.hasNext()) { ourLog.debug("Bulk Export generated an iterator with no results!"); } @@ -121,17 +123,25 @@ public class FetchResourceIdsStep implements IFirstJobStepWorker 0) { + // Account for comma between array entries + estimatedChunkSize++; + } + estimatedChunkSize += batchResourceId.estimateSerializedSize(); + // Make sure resources stored in each batch does not go over the max capacity - if (idsToSubmit.size() >= myStorageSettings.getBulkExportFileMaximumCapacity()) { - submitWorkChunk(idsToSubmit, resourceType, params, theDataSink); + if (idsToSubmit.size() >= myStorageSettings.getBulkExportFileMaximumCapacity() + || estimatedChunkSize >= myStorageSettings.getBulkExportFileMaximumSize()) { + submitWorkChunk(idsToSubmit, resourceType, theDataSink); submissionCount++; idsToSubmit = new ArrayList<>(); + estimatedChunkSize = 0; } } // if we have any other Ids left, submit them now if (!idsToSubmit.isEmpty()) { - submitWorkChunk(idsToSubmit, resourceType, params, theDataSink); + submitWorkChunk(idsToSubmit, resourceType, theDataSink); submissionCount++; } } @@ -150,7 +160,6 @@ public class FetchResourceIdsStep implements IFirstJobStepWorker theBatchResourceIds, String theResourceType, - BulkExportJobParameters theParams, IJobDataSink theDataSink) { ResourceIdList idList = new ResourceIdList(); diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/models/BatchResourceId.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/models/BatchResourceId.java index 61c3a5daf64..4070d4d1a20 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/models/BatchResourceId.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/models/BatchResourceId.java @@ -22,10 +22,13 @@ package ca.uhn.fhir.batch2.jobs.models; import ca.uhn.fhir.model.api.IModelJson; import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; import com.fasterxml.jackson.annotation.JsonProperty; +import jakarta.annotation.Nonnull; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; -public class BatchResourceId implements IModelJson { +import static org.apache.commons.lang3.StringUtils.defaultString; + +public class BatchResourceId implements IModelJson, Comparable { @JsonProperty("type") private String myResourceType; @@ -77,6 +80,24 @@ public class BatchResourceId implements IModelJson { return new HashCodeBuilder(17, 37).append(myResourceType).append(myId).toHashCode(); } + /** + * Returns an estimate of how long the JSON serialized (non-pretty printed) form + * of this object will be. + */ + public int estimateSerializedSize() { + // 19 chars: {"id":"","type":""} + return 19 + defaultString(myId).length() + defaultString(myResourceType).length(); + } + + @Override + public int compareTo(@Nonnull BatchResourceId o) { + int retVal = o.myResourceType.compareTo(myResourceType); + if (retVal == 0) { + retVal = o.myId.compareTo(myId); + } + return retVal; + } + public static BatchResourceId getIdFromPID(IResourcePersistentId thePID, String theResourceType) { BatchResourceId batchResourceId = new BatchResourceId(); batchResourceId.setId(thePID.getId().toString()); diff --git a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourceAndWriteBinaryStepTest.java b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourceAndWriteBinaryStepTest.java index 576da3269aa..48b63d094ec 100644 --- a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourceAndWriteBinaryStepTest.java +++ b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourceAndWriteBinaryStepTest.java @@ -12,6 +12,7 @@ import ca.uhn.fhir.batch2.model.JobInstance; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.interceptor.executor.InterceptorService; import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; @@ -112,7 +113,7 @@ public class ExpandResourceAndWriteBinaryStepTest { private FhirContext myFhirContext = FhirContext.forR4Cached(); @Spy - private StorageSettings myStorageSettings = new StorageSettings(); + private JpaStorageSettings myStorageSettings = new JpaStorageSettings(); @Spy private IHapiTransactionService myTransactionService = new NonTransactionalHapiTransactionService(); diff --git a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourcesStepTest.java b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourcesStepTest.java index 5ec874649fe..b1ce4c1d63a 100644 --- a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourcesStepTest.java +++ b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourcesStepTest.java @@ -4,14 +4,14 @@ package ca.uhn.fhir.batch2.jobs.export; import ca.uhn.fhir.batch2.api.IJobDataSink; import ca.uhn.fhir.batch2.api.RunOutcome; import ca.uhn.fhir.batch2.api.StepExecutionDetails; -import ca.uhn.fhir.interceptor.executor.InterceptorService; -import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters; import ca.uhn.fhir.batch2.jobs.export.models.ExpandedResourcesList; import ca.uhn.fhir.batch2.jobs.export.models.ResourceIdList; import ca.uhn.fhir.batch2.jobs.models.BatchResourceId; import ca.uhn.fhir.batch2.model.JobInstance; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.interceptor.executor.InterceptorService; import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.model.PersistentIdToForcedIdMap; @@ -20,8 +20,8 @@ import ca.uhn.fhir.jpa.bulk.export.api.IBulkExportProcessor; import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService; import ca.uhn.fhir.jpa.dao.tx.NonTransactionalHapiTransactionService; import ca.uhn.fhir.jpa.model.dao.JpaPid; -import ca.uhn.fhir.jpa.model.entity.StorageSettings; import ca.uhn.fhir.rest.api.server.SystemRequestDetails; +import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters; import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; import ca.uhn.fhir.rest.server.SimpleBundleProvider; import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationSvc; @@ -75,7 +75,7 @@ public class ExpandResourcesStepTest { private FhirContext myFhirContext = FhirContext.forR4Cached(); @Spy - private StorageSettings myStorageSettings = new StorageSettings(); + private JpaStorageSettings myStorageSettings = new JpaStorageSettings(); @Spy private IHapiTransactionService myTransactionService = new NonTransactionalHapiTransactionService(); diff --git a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/FetchResourceIdsStepTest.java b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/FetchResourceIdsStepTest.java index da80c5bea69..14e717d7ebd 100644 --- a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/FetchResourceIdsStepTest.java +++ b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/export/FetchResourceIdsStepTest.java @@ -130,6 +130,7 @@ public class FetchResourceIdsStepTest { .thenReturn(observationIds.iterator()); int maxFileCapacity = 1000; when(myStorageSettings.getBulkExportFileMaximumCapacity()).thenReturn(maxFileCapacity); + when(myStorageSettings.getBulkExportFileMaximumSize()).thenReturn(10000L); // test RunOutcome outcome = myFirstStep.run(input, sink); @@ -191,6 +192,7 @@ public class FetchResourceIdsStepTest { // when int maxFileCapacity = 5; when(myStorageSettings.getBulkExportFileMaximumCapacity()).thenReturn(maxFileCapacity); + when(myStorageSettings.getBulkExportFileMaximumSize()).thenReturn(10000L); for (int i = 0; i <= maxFileCapacity; i++) { JpaPid id = JpaPid.fromId((long) i); diff --git a/hapi-fhir-storage-batch2-test-utilities/pom.xml b/hapi-fhir-storage-batch2-test-utilities/pom.xml index fc56dda9e44..ce8ac389e76 100644 --- a/hapi-fhir-storage-batch2-test-utilities/pom.xml +++ b/hapi-fhir-storage-batch2-test-utilities/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-batch2/pom.xml b/hapi-fhir-storage-batch2/pom.xml index f48578a48ac..2d86204e222 100644 --- a/hapi-fhir-storage-batch2/pom.xml +++ b/hapi-fhir-storage-batch2/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-cr/pom.xml b/hapi-fhir-storage-cr/pom.xml index 1c37b3375d2..deec953b4d9 100644 --- a/hapi-fhir-storage-cr/pom.xml +++ b/hapi-fhir-storage-cr/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/CanonicalHelper.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/CanonicalHelper.java new file mode 100644 index 00000000000..a0eee8f5175 --- /dev/null +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/CanonicalHelper.java @@ -0,0 +1,50 @@ +/*- + * #%L + * HAPI FHIR - Clinical Reasoning + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.cr.common; + +import ca.uhn.fhir.context.FhirVersionEnum; +import org.hl7.fhir.instance.model.api.IPrimitiveType; + +public class CanonicalHelper { + public static > C getCanonicalType( + FhirVersionEnum fhirVersion, String theCanonical, String theUrl, String theVersion) { + String url = theVersion == null ? theUrl : String.format("%s|%s", theUrl, theVersion); + String canonical = theCanonical == null ? url : theCanonical; + return newCanonicalType(fhirVersion, canonical); + } + + @SuppressWarnings("unchecked") + private static > C newCanonicalType( + FhirVersionEnum fhirVersion, String theCanonical) { + if (theCanonical == null) { + return null; + } + switch (fhirVersion) { + case DSTU3: + return (C) new org.hl7.fhir.dstu3.model.StringType(theCanonical); + case R4: + return (C) new org.hl7.fhir.r4.model.CanonicalType(theCanonical); + case R5: + return (C) new org.hl7.fhir.r5.model.CanonicalType(theCanonical); + default: + return null; + } + } +} diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/IActivityDefinitionProcessorFactory.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/IActivityDefinitionProcessorFactory.java similarity index 88% rename from hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/IActivityDefinitionProcessorFactory.java rename to hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/IActivityDefinitionProcessorFactory.java index 996d1127b31..5cb284a9873 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/IActivityDefinitionProcessorFactory.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/IActivityDefinitionProcessorFactory.java @@ -17,10 +17,10 @@ * limitations under the License. * #L% */ -package ca.uhn.fhir.cr.r4; +package ca.uhn.fhir.cr.common; import ca.uhn.fhir.rest.api.server.RequestDetails; -import org.opencds.cqf.fhir.cr.activitydefinition.r4.ActivityDefinitionProcessor; +import org.opencds.cqf.fhir.cr.activitydefinition.ActivityDefinitionProcessor; @FunctionalInterface public interface IActivityDefinitionProcessorFactory { diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/IPlanDefinitionProcessorFactory.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/IPlanDefinitionProcessorFactory.java similarity index 89% rename from hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/IPlanDefinitionProcessorFactory.java rename to hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/IPlanDefinitionProcessorFactory.java index c370600573d..7822a59a6c7 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/IPlanDefinitionProcessorFactory.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/IPlanDefinitionProcessorFactory.java @@ -17,10 +17,10 @@ * limitations under the License. * #L% */ -package ca.uhn.fhir.cr.r4; +package ca.uhn.fhir.cr.common; import ca.uhn.fhir.rest.api.server.RequestDetails; -import org.opencds.cqf.fhir.cr.plandefinition.r4.PlanDefinitionProcessor; +import org.opencds.cqf.fhir.cr.plandefinition.PlanDefinitionProcessor; @FunctionalInterface public interface IPlanDefinitionProcessorFactory { diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/IQuestionnaireProcessorFactory.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/IQuestionnaireProcessorFactory.java similarity index 88% rename from hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/IQuestionnaireProcessorFactory.java rename to hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/IQuestionnaireProcessorFactory.java index d08f284fe8e..e9f1e153030 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/IQuestionnaireProcessorFactory.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/IQuestionnaireProcessorFactory.java @@ -17,10 +17,10 @@ * limitations under the License. * #L% */ -package ca.uhn.fhir.cr.r4; +package ca.uhn.fhir.cr.common; import ca.uhn.fhir.rest.api.server.RequestDetails; -import org.opencds.cqf.fhir.cr.questionnaire.r4.processor.QuestionnaireProcessor; +import org.opencds.cqf.fhir.cr.questionnaire.QuestionnaireProcessor; @FunctionalInterface public interface IQuestionnaireProcessorFactory { diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/IQuestionnaireResponseProcessorFactory.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/IQuestionnaireResponseProcessorFactory.java similarity index 88% rename from hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/IQuestionnaireResponseProcessorFactory.java rename to hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/IQuestionnaireResponseProcessorFactory.java index d3e3d3b1719..e6d9ebc87c1 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/IQuestionnaireResponseProcessorFactory.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/IQuestionnaireResponseProcessorFactory.java @@ -17,10 +17,10 @@ * limitations under the License. * #L% */ -package ca.uhn.fhir.cr.r4; +package ca.uhn.fhir.cr.common; import ca.uhn.fhir.rest.api.server.RequestDetails; -import org.opencds.cqf.fhir.cr.questionnaireresponse.r4.QuestionnaireResponseProcessor; +import org.opencds.cqf.fhir.cr.questionnaireresponse.QuestionnaireResponseProcessor; @FunctionalInterface public interface IQuestionnaireResponseProcessorFactory { diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/CrProcessorConfig.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/CrProcessorConfig.java similarity index 69% rename from hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/CrProcessorConfig.java rename to hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/CrProcessorConfig.java index d0d7704958c..a83b10d0b2d 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/CrProcessorConfig.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/CrProcessorConfig.java @@ -17,7 +17,7 @@ * limitations under the License. * #L% */ -package ca.uhn.fhir.cr.config.r4; +package ca.uhn.fhir.cr.config; import ca.uhn.fhir.cr.common.IRepositoryFactory; import org.opencds.cqf.fhir.cql.EvaluationSettings; @@ -27,30 +27,30 @@ import org.springframework.context.annotation.Configuration; @Configuration public class CrProcessorConfig { @Bean - ca.uhn.fhir.cr.r4.IActivityDefinitionProcessorFactory r4ActivityDefinitionProcessorFactory( + ca.uhn.fhir.cr.common.IActivityDefinitionProcessorFactory activityDefinitionProcessorFactory( IRepositoryFactory theRepositoryFactory, EvaluationSettings theEvaluationSettings) { - return rd -> new org.opencds.cqf.fhir.cr.activitydefinition.r4.ActivityDefinitionProcessor( + return rd -> new org.opencds.cqf.fhir.cr.activitydefinition.ActivityDefinitionProcessor( theRepositoryFactory.create(rd), theEvaluationSettings); } @Bean - ca.uhn.fhir.cr.r4.IPlanDefinitionProcessorFactory r4PlanDefinitionProcessorFactory( + ca.uhn.fhir.cr.common.IPlanDefinitionProcessorFactory planDefinitionProcessorFactory( IRepositoryFactory theRepositoryFactory, EvaluationSettings theEvaluationSettings) { - return rd -> new org.opencds.cqf.fhir.cr.plandefinition.r4.PlanDefinitionProcessor( + return rd -> new org.opencds.cqf.fhir.cr.plandefinition.PlanDefinitionProcessor( theRepositoryFactory.create(rd), theEvaluationSettings); } @Bean - ca.uhn.fhir.cr.r4.IQuestionnaireProcessorFactory r4QuestionnaireProcessorFactory( + ca.uhn.fhir.cr.common.IQuestionnaireProcessorFactory questionnaireProcessorFactory( IRepositoryFactory theRepositoryFactory, EvaluationSettings theEvaluationSettings) { - return rd -> new org.opencds.cqf.fhir.cr.questionnaire.r4.processor.QuestionnaireProcessor( + return rd -> new org.opencds.cqf.fhir.cr.questionnaire.QuestionnaireProcessor( theRepositoryFactory.create(rd), theEvaluationSettings); } @Bean - ca.uhn.fhir.cr.r4.IQuestionnaireResponseProcessorFactory r4QuestionnaireResponseProcessorFactory( + ca.uhn.fhir.cr.common.IQuestionnaireResponseProcessorFactory questionnaireResponseProcessorFactory( IRepositoryFactory theRepositoryFactory, EvaluationSettings theEvaluationSettings) { - return rd -> new org.opencds.cqf.fhir.cr.questionnaireresponse.r4.QuestionnaireResponseProcessor( + return rd -> new org.opencds.cqf.fhir.cr.questionnaireresponse.QuestionnaireResponseProcessor( theRepositoryFactory.create(rd), theEvaluationSettings); } } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/dstu3/ApplyOperationConfig.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/dstu3/ApplyOperationConfig.java index b8a9adb3d7d..71c16e74a65 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/dstu3/ApplyOperationConfig.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/dstu3/ApplyOperationConfig.java @@ -21,6 +21,7 @@ package ca.uhn.fhir.cr.config.dstu3; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.cr.config.CrProcessorConfig; import ca.uhn.fhir.cr.config.ProviderLoader; import ca.uhn.fhir.cr.config.ProviderSelector; import ca.uhn.fhir.rest.server.RestfulServer; diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/dstu3/CrProcessorConfig.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/dstu3/CrProcessorConfig.java deleted file mode 100644 index 1ffd8985a89..00000000000 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/dstu3/CrProcessorConfig.java +++ /dev/null @@ -1,56 +0,0 @@ -/*- - * #%L - * HAPI FHIR - Clinical Reasoning - * %% - * Copyright (C) 2014 - 2024 Smile CDR, Inc. - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ -package ca.uhn.fhir.cr.config.dstu3; - -import ca.uhn.fhir.cr.common.IRepositoryFactory; -import org.opencds.cqf.fhir.cql.EvaluationSettings; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -@Configuration -public class CrProcessorConfig { - @Bean - ca.uhn.fhir.cr.dstu3.IActivityDefinitionProcessorFactory dstu3ActivityDefinitionProcessorFactory( - IRepositoryFactory theRepositoryFactory, EvaluationSettings theEvaluationSettings) { - return rd -> new org.opencds.cqf.fhir.cr.activitydefinition.dstu3.ActivityDefinitionProcessor( - theRepositoryFactory.create(rd), theEvaluationSettings); - } - - @Bean - ca.uhn.fhir.cr.dstu3.IPlanDefinitionProcessorFactory dstu3PlanDefinitionProcessorFactory( - IRepositoryFactory theRepositoryFactory, EvaluationSettings theEvaluationSettings) { - return rd -> new org.opencds.cqf.fhir.cr.plandefinition.dstu3.PlanDefinitionProcessor( - theRepositoryFactory.create(rd), theEvaluationSettings); - } - - @Bean - ca.uhn.fhir.cr.dstu3.IQuestionnaireProcessorFactory dstu3QuestionnaireProcessorFactory( - IRepositoryFactory theRepositoryFactory, EvaluationSettings theEvaluationSettings) { - return rd -> new org.opencds.cqf.fhir.cr.questionnaire.dstu3.processor.QuestionnaireProcessor( - theRepositoryFactory.create(rd), theEvaluationSettings); - } - - @Bean - ca.uhn.fhir.cr.dstu3.IQuestionnaireResponseProcessorFactory dstu3QuestionnaireResponseProcessorFactory( - IRepositoryFactory theRepositoryFactory, EvaluationSettings theEvaluationSettings) { - return rd -> new org.opencds.cqf.fhir.cr.questionnaireresponse.dstu3.QuestionnaireResponseProcessor( - theRepositoryFactory.create(rd), theEvaluationSettings); - } -} diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/dstu3/ExtractOperationConfig.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/dstu3/ExtractOperationConfig.java index e58a5305c2f..2ea4d039801 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/dstu3/ExtractOperationConfig.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/dstu3/ExtractOperationConfig.java @@ -21,6 +21,7 @@ package ca.uhn.fhir.cr.config.dstu3; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.cr.config.CrProcessorConfig; import ca.uhn.fhir.cr.config.ProviderLoader; import ca.uhn.fhir.cr.config.ProviderSelector; import ca.uhn.fhir.rest.server.RestfulServer; diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/dstu3/PackageOperationConfig.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/dstu3/PackageOperationConfig.java index 5fb0c3a0345..8be36b6777e 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/dstu3/PackageOperationConfig.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/dstu3/PackageOperationConfig.java @@ -21,6 +21,7 @@ package ca.uhn.fhir.cr.config.dstu3; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.cr.config.CrProcessorConfig; import ca.uhn.fhir.cr.config.ProviderLoader; import ca.uhn.fhir.cr.config.ProviderSelector; import ca.uhn.fhir.rest.server.RestfulServer; diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/dstu3/PopulateOperationConfig.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/dstu3/PopulateOperationConfig.java index a60f57d594c..eedd452b5ab 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/dstu3/PopulateOperationConfig.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/dstu3/PopulateOperationConfig.java @@ -21,6 +21,7 @@ package ca.uhn.fhir.cr.config.dstu3; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.cr.config.CrProcessorConfig; import ca.uhn.fhir.cr.config.ProviderLoader; import ca.uhn.fhir.cr.config.ProviderSelector; import ca.uhn.fhir.rest.server.RestfulServer; diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/dstu3/QuestionnaireOperationConfig.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/dstu3/QuestionnaireOperationConfig.java new file mode 100644 index 00000000000..0e25fe57e09 --- /dev/null +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/dstu3/QuestionnaireOperationConfig.java @@ -0,0 +1,53 @@ +/*- + * #%L + * HAPI FHIR - Clinical Reasoning + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.cr.config.dstu3; + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.cr.config.ProviderLoader; +import ca.uhn.fhir.cr.config.ProviderSelector; +import ca.uhn.fhir.rest.server.RestfulServer; +import org.springframework.context.ApplicationContext; +import org.springframework.context.annotation.Bean; + +import java.util.Arrays; +import java.util.Map; + +public class QuestionnaireOperationConfig { + @Bean + ca.uhn.fhir.cr.dstu3.structuredefinition.StructureDefinitionQuestionnaireProvider + dstu3StructureDefinitionQuestionnaireProvider() { + return new ca.uhn.fhir.cr.dstu3.structuredefinition.StructureDefinitionQuestionnaireProvider(); + } + + @Bean(name = "questionnaireOperationLoader") + public ProviderLoader questionnaireOperationLoader( + ApplicationContext theApplicationContext, FhirContext theFhirContext, RestfulServer theRestfulServer) { + var selector = new ProviderSelector( + theFhirContext, + Map.of( + FhirVersionEnum.DSTU3, + Arrays.asList( + ca.uhn.fhir.cr.dstu3.structuredefinition.StructureDefinitionQuestionnaireProvider + .class))); + + return new ProviderLoader(theRestfulServer, theApplicationContext, selector); + } +} diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/ApplyOperationConfig.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/ApplyOperationConfig.java index d67f7dc3963..2c859695c0b 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/ApplyOperationConfig.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/ApplyOperationConfig.java @@ -21,6 +21,7 @@ package ca.uhn.fhir.cr.config.r4; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.cr.config.CrProcessorConfig; import ca.uhn.fhir.cr.config.ProviderLoader; import ca.uhn.fhir.cr.config.ProviderSelector; import ca.uhn.fhir.rest.server.RestfulServer; diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/CrR4Config.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/CrR4Config.java index 716055860e1..f08743d7909 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/CrR4Config.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/CrR4Config.java @@ -101,7 +101,6 @@ public class CrR4Config { theCareGapsProperties, theRepositoryFactory.create(rd), theMeasureEvaluationOptions, - theExecutor, rd.getFhirServerBase()); } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/ExtractOperationConfig.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/ExtractOperationConfig.java index b2a4f04fd64..a2be7b791f5 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/ExtractOperationConfig.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/ExtractOperationConfig.java @@ -21,6 +21,7 @@ package ca.uhn.fhir.cr.config.r4; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.cr.config.CrProcessorConfig; import ca.uhn.fhir.cr.config.ProviderLoader; import ca.uhn.fhir.cr.config.ProviderSelector; import ca.uhn.fhir.rest.server.RestfulServer; diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/PackageOperationConfig.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/PackageOperationConfig.java index f02d092c417..9f492e095f9 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/PackageOperationConfig.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/PackageOperationConfig.java @@ -21,6 +21,7 @@ package ca.uhn.fhir.cr.config.r4; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.cr.config.CrProcessorConfig; import ca.uhn.fhir.cr.config.ProviderLoader; import ca.uhn.fhir.cr.config.ProviderSelector; import ca.uhn.fhir.rest.server.RestfulServer; diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/PopulateOperationConfig.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/PopulateOperationConfig.java index ea91d88a727..ad7fdcd85c1 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/PopulateOperationConfig.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/PopulateOperationConfig.java @@ -21,6 +21,7 @@ package ca.uhn.fhir.cr.config.r4; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.cr.config.CrProcessorConfig; import ca.uhn.fhir.cr.config.ProviderLoader; import ca.uhn.fhir.cr.config.ProviderSelector; import ca.uhn.fhir.rest.server.RestfulServer; diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/QuestionnaireOperationConfig.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/QuestionnaireOperationConfig.java new file mode 100644 index 00000000000..12ee29eeb63 --- /dev/null +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/r4/QuestionnaireOperationConfig.java @@ -0,0 +1,52 @@ +/*- + * #%L + * HAPI FHIR - Clinical Reasoning + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.cr.config.r4; + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.cr.config.ProviderLoader; +import ca.uhn.fhir.cr.config.ProviderSelector; +import ca.uhn.fhir.rest.server.RestfulServer; +import org.springframework.context.ApplicationContext; +import org.springframework.context.annotation.Bean; + +import java.util.Arrays; +import java.util.Map; + +public class QuestionnaireOperationConfig { + @Bean + ca.uhn.fhir.cr.r4.structuredefinition.StructureDefinitionQuestionnaireProvider + r4StructureDefinitionQuestionnaireProvider() { + return new ca.uhn.fhir.cr.r4.structuredefinition.StructureDefinitionQuestionnaireProvider(); + } + + @Bean(name = "questionnaireOperationLoader") + public ProviderLoader questionnaireOperationLoader( + ApplicationContext theApplicationContext, FhirContext theFhirContext, RestfulServer theRestfulServer) { + var selector = new ProviderSelector( + theFhirContext, + Map.of( + FhirVersionEnum.R4, + Arrays.asList( + ca.uhn.fhir.cr.r4.structuredefinition.StructureDefinitionQuestionnaireProvider.class))); + + return new ProviderLoader(theRestfulServer, theApplicationContext, selector); + } +} diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/IActivityDefinitionProcessorFactory.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/IActivityDefinitionProcessorFactory.java deleted file mode 100644 index e57856900c0..00000000000 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/IActivityDefinitionProcessorFactory.java +++ /dev/null @@ -1,28 +0,0 @@ -/*- - * #%L - * HAPI FHIR - Clinical Reasoning - * %% - * Copyright (C) 2014 - 2024 Smile CDR, Inc. - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ -package ca.uhn.fhir.cr.dstu3; - -import ca.uhn.fhir.rest.api.server.RequestDetails; -import org.opencds.cqf.fhir.cr.activitydefinition.dstu3.ActivityDefinitionProcessor; - -@FunctionalInterface -public interface IActivityDefinitionProcessorFactory { - ActivityDefinitionProcessor create(RequestDetails theRequestDetails); -} diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/IPlanDefinitionProcessorFactory.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/IPlanDefinitionProcessorFactory.java deleted file mode 100644 index d4494e8d908..00000000000 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/IPlanDefinitionProcessorFactory.java +++ /dev/null @@ -1,28 +0,0 @@ -/*- - * #%L - * HAPI FHIR - Clinical Reasoning - * %% - * Copyright (C) 2014 - 2024 Smile CDR, Inc. - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ -package ca.uhn.fhir.cr.dstu3; - -import ca.uhn.fhir.rest.api.server.RequestDetails; -import org.opencds.cqf.fhir.cr.plandefinition.dstu3.PlanDefinitionProcessor; - -@FunctionalInterface -public interface IPlanDefinitionProcessorFactory { - PlanDefinitionProcessor create(RequestDetails theRequestDetails); -} diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/IQuestionnaireProcessorFactory.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/IQuestionnaireProcessorFactory.java deleted file mode 100644 index 68ea8e1c023..00000000000 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/IQuestionnaireProcessorFactory.java +++ /dev/null @@ -1,28 +0,0 @@ -/*- - * #%L - * HAPI FHIR - Clinical Reasoning - * %% - * Copyright (C) 2014 - 2024 Smile CDR, Inc. - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ -package ca.uhn.fhir.cr.dstu3; - -import ca.uhn.fhir.rest.api.server.RequestDetails; -import org.opencds.cqf.fhir.cr.questionnaire.dstu3.processor.QuestionnaireProcessor; - -@FunctionalInterface -public interface IQuestionnaireProcessorFactory { - QuestionnaireProcessor create(RequestDetails theRequestDetails); -} diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/IQuestionnaireResponseProcessorFactory.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/IQuestionnaireResponseProcessorFactory.java deleted file mode 100644 index e699630f7dd..00000000000 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/IQuestionnaireResponseProcessorFactory.java +++ /dev/null @@ -1,28 +0,0 @@ -/*- - * #%L - * HAPI FHIR - Clinical Reasoning - * %% - * Copyright (C) 2014 - 2024 Smile CDR, Inc. - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ -package ca.uhn.fhir.cr.dstu3; - -import ca.uhn.fhir.rest.api.server.RequestDetails; -import org.opencds.cqf.fhir.cr.questionnaireresponse.dstu3.QuestionnaireResponseProcessor; - -@FunctionalInterface -public interface IQuestionnaireResponseProcessorFactory { - QuestionnaireResponseProcessor create(RequestDetails theRequestDetails); -} diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/activitydefinition/ActivityDefinitionApplyProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/activitydefinition/ActivityDefinitionApplyProvider.java index 6940cd1410c..3bcc5beabec 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/activitydefinition/ActivityDefinitionApplyProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/activitydefinition/ActivityDefinitionApplyProvider.java @@ -20,7 +20,7 @@ package ca.uhn.fhir.cr.dstu3.activitydefinition; * #L% */ -import ca.uhn.fhir.cr.dstu3.IActivityDefinitionProcessorFactory; +import ca.uhn.fhir.cr.common.IActivityDefinitionProcessorFactory; import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.Operation; import ca.uhn.fhir.rest.annotation.OperationParam; @@ -37,13 +37,14 @@ import org.hl7.fhir.dstu3.model.Parameters; import org.hl7.fhir.dstu3.model.StringType; import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.instance.model.api.IBaseResource; +import org.opencds.cqf.fhir.utility.monad.Eithers; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @Component public class ActivityDefinitionApplyProvider { @Autowired - IActivityDefinitionProcessorFactory myDstu3ActivityDefinitionProcessorFactory; + IActivityDefinitionProcessorFactory myActivityDefinitionProcessorFactory; /** * Implements the . * * @param theId The id of the PlanDefinition to apply - * @param theCanonical The canonical identifier for the PlanDefinition to apply (optionally version-specific) * @param thePlanDefinition The PlanDefinition to be applied + * @param theCanonical The canonical url of the plan definition to be applied. If the operation is invoked at the instance level, this parameter is not allowed; if the operation is invoked at the type level, this parameter (and optionally the version), or the planDefinition parameter must be supplied. + * @param theUrl Canonical URL of the PlanDefinition when invoked at the resource type level. This is exclusive with the planDefinition and canonical parameters. + * @param theVersion Version of the PlanDefinition when invoked at the resource type level. This is exclusive with the planDefinition and canonical parameters. * @param theSubject The subject(s) that is/are the target of the plan definition to be applied. * @param theEncounter The encounter in context * @param thePractitioner The practitioner in context @@ -82,8 +88,10 @@ public class PlanDefinitionApplyProvider { @Operation(name = ProviderConstants.CR_OPERATION_APPLY, idempotent = true, type = PlanDefinition.class) public IBaseResource apply( @IdParam IdType theId, + @OperationParam(name = "planDefinition") org.hl7.fhir.r4.model.PlanDefinition thePlanDefinition, @OperationParam(name = "canonical") String theCanonical, - @OperationParam(name = "planDefinition") PlanDefinition thePlanDefinition, + @OperationParam(name = "url") String theUrl, + @OperationParam(name = "version") String theVersion, @OperationParam(name = "subject") String theSubject, @OperationParam(name = "encounter") String theEncounter, @OperationParam(name = "practitioner") String thePractitioner, @@ -101,12 +109,11 @@ public class PlanDefinitionApplyProvider { @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { - return myDstu3PlanDefinitionProcessorFactory + StringType canonicalType = getCanonicalType(FhirVersionEnum.DSTU3, theCanonical, theUrl, theVersion); + return myPlanDefinitionProcessorFactory .create(theRequestDetails) .apply( - theId, - new StringType(theCanonical), - thePlanDefinition, + Eithers.for3(canonicalType, theId, thePlanDefinition), theSubject, theEncounter, thePractitioner, @@ -117,7 +124,7 @@ public class PlanDefinitionApplyProvider { theSetting, theSettingContext, theParameters, - theUseServerData == null ? true : theUseServerData.booleanValue(), + theUseServerData == null ? Boolean.TRUE : theUseServerData.booleanValue(), theData, null, theDataEndpoint, @@ -127,8 +134,10 @@ public class PlanDefinitionApplyProvider { @Operation(name = ProviderConstants.CR_OPERATION_APPLY, idempotent = true, type = PlanDefinition.class) public IBaseResource apply( + @OperationParam(name = "planDefinition") org.hl7.fhir.r4.model.PlanDefinition thePlanDefinition, @OperationParam(name = "canonical") String theCanonical, - @OperationParam(name = "planDefinition") PlanDefinition thePlanDefinition, + @OperationParam(name = "url") String theUrl, + @OperationParam(name = "version") String theVersion, @OperationParam(name = "subject") String theSubject, @OperationParam(name = "encounter") String theEncounter, @OperationParam(name = "practitioner") String thePractitioner, @@ -146,12 +155,11 @@ public class PlanDefinitionApplyProvider { @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { - return myDstu3PlanDefinitionProcessorFactory + StringType canonicalType = getCanonicalType(FhirVersionEnum.DSTU3, theCanonical, theUrl, theVersion); + return myPlanDefinitionProcessorFactory .create(theRequestDetails) .apply( - null, - new StringType(theCanonical), - thePlanDefinition, + Eithers.for3(canonicalType, null, thePlanDefinition), theSubject, theEncounter, thePractitioner, @@ -162,7 +170,7 @@ public class PlanDefinitionApplyProvider { theSetting, theSettingContext, theParameters, - theUseServerData == null ? true : theUseServerData.booleanValue(), + theUseServerData == null ? Boolean.TRUE : theUseServerData.booleanValue(), theData, null, theDataEndpoint, diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/plandefinition/PlanDefinitionPackageProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/plandefinition/PlanDefinitionPackageProvider.java index a2666879b50..fee4cc6f240 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/plandefinition/PlanDefinitionPackageProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/plandefinition/PlanDefinitionPackageProvider.java @@ -19,49 +19,61 @@ */ package ca.uhn.fhir.cr.dstu3.plandefinition; -import ca.uhn.fhir.cr.dstu3.IPlanDefinitionProcessorFactory; +import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.cr.common.IPlanDefinitionProcessorFactory; import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.Operation; import ca.uhn.fhir.rest.annotation.OperationParam; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.provider.ProviderConstants; +import org.hl7.fhir.dstu3.model.BooleanType; import org.hl7.fhir.dstu3.model.IdType; import org.hl7.fhir.dstu3.model.PlanDefinition; import org.hl7.fhir.dstu3.model.StringType; import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.instance.model.api.IBaseBundle; +import org.opencds.cqf.fhir.utility.monad.Eithers; import org.springframework.beans.factory.annotation.Autowired; +import static ca.uhn.fhir.cr.common.CanonicalHelper.getCanonicalType; + public class PlanDefinitionPackageProvider { @Autowired - IPlanDefinitionProcessorFactory mydstu3PlanDefinitionProcessorFactory; + IPlanDefinitionProcessorFactory myPlanDefinitionProcessorFactory; @Operation(name = ProviderConstants.CR_OPERATION_PACKAGE, idempotent = true, type = PlanDefinition.class) public IBaseBundle packagePlanDefinition( @IdParam IdType theId, @OperationParam(name = "canonical") String theCanonical, - @OperationParam(name = "usePut") String theIsPut, + @OperationParam(name = "url") String theUrl, + @OperationParam(name = "version") String theVersion, + @OperationParam(name = "usePut") BooleanType theIsPut, RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { - return mydstu3PlanDefinitionProcessorFactory + StringType canonicalType = getCanonicalType(FhirVersionEnum.DSTU3, theCanonical, theUrl, theVersion); + return myPlanDefinitionProcessorFactory .create(theRequestDetails) - .packagePlanDefinition(theId, new StringType(theCanonical), null, Boolean.parseBoolean(theIsPut)); + .packagePlanDefinition( + Eithers.for3(canonicalType, theId, null), + theIsPut == null ? Boolean.FALSE : theIsPut.booleanValue()); } @Operation(name = ProviderConstants.CR_OPERATION_PACKAGE, idempotent = true, type = PlanDefinition.class) public IBaseBundle packagePlanDefinition( @OperationParam(name = "id") String theId, @OperationParam(name = "canonical") String theCanonical, - @OperationParam(name = "usePut") String theIsPut, + @OperationParam(name = "url") String theUrl, + @OperationParam(name = "version") String theVersion, + @OperationParam(name = "usePut") BooleanType theIsPut, RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { - return mydstu3PlanDefinitionProcessorFactory + IdType id = theId == null ? null : new IdType("PlanDefinition", theId); + StringType canonicalType = getCanonicalType(FhirVersionEnum.DSTU3, theCanonical, theUrl, theVersion); + return myPlanDefinitionProcessorFactory .create(theRequestDetails) .packagePlanDefinition( - new IdType("PlanDefinition", theId), - new StringType(theCanonical), - null, - Boolean.parseBoolean(theIsPut)); + Eithers.for3(canonicalType, id, null), + theIsPut == null ? Boolean.FALSE : theIsPut.booleanValue()); } } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/questionnaire/QuestionnairePackageProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/questionnaire/QuestionnairePackageProvider.java index 26fb7516283..1e3b33b4850 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/questionnaire/QuestionnairePackageProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/questionnaire/QuestionnairePackageProvider.java @@ -19,7 +19,8 @@ */ package ca.uhn.fhir.cr.dstu3.questionnaire; -import ca.uhn.fhir.cr.dstu3.IQuestionnaireProcessorFactory; +import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.cr.common.IQuestionnaireProcessorFactory; import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.Operation; import ca.uhn.fhir.rest.annotation.OperationParam; @@ -29,11 +30,14 @@ import org.hl7.fhir.dstu3.model.Bundle; import org.hl7.fhir.dstu3.model.IdType; import org.hl7.fhir.dstu3.model.Questionnaire; import org.hl7.fhir.dstu3.model.StringType; +import org.opencds.cqf.fhir.utility.monad.Eithers; import org.springframework.beans.factory.annotation.Autowired; +import static ca.uhn.fhir.cr.common.CanonicalHelper.getCanonicalType; + public class QuestionnairePackageProvider { @Autowired - IQuestionnaireProcessorFactory myDstu3QuestionnaireProcessorFactory; + IQuestionnaireProcessorFactory myQuestionnaireProcessorFactory; /** * Implements a $package operation following the Structured Data Capture (SDC) IG. * * @param theId The id of the Questionnaire to populate. - * @param theCanonical The canonical identifier for the questionnaire (optionally version-specific). * @param theQuestionnaire The Questionnaire to populate. Used when the operation is invoked at the 'type' level. + * @param theCanonical The canonical identifier for the questionnaire (optionally version-specific). + * @param theUrl Canonical URL of the Questionnaire when invoked at the resource type level. This is exclusive with the questionnaire and canonical parameters. + * @param theVersion Version of the Questionnaire when invoked at the resource type level. This is exclusive with the questionnaire and canonical parameters. * @param theSubject The subject(s) that is/are the target of the Questionnaire. * @param theParameters Any input parameters defined in libraries referenced by the Questionnaire. - * @param theBundle Data to be made available during CQL evaluation. + * @param theData Data to be made available during CQL evaluation. + * @param theUseServerData Whether to use data from the server performing the evaluation. * @param theDataEndpoint An endpoint to use to access data referenced by retrieve operations in libraries * referenced by the Questionnaire. * @param theContentEndpoint An endpoint to use to access content (i.e. libraries) referenced by the Questionnaire. @@ -141,25 +158,28 @@ public class QuestionnairePopulateProvider { @Operation(name = ProviderConstants.CR_OPERATION_POPULATE, idempotent = true, type = Questionnaire.class) public QuestionnaireResponse populate( @IdParam IdType theId, - @OperationParam(name = "canonical") String theCanonical, @OperationParam(name = "questionnaire") Questionnaire theQuestionnaire, + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "url") String theUrl, + @OperationParam(name = "version") String theVersion, @OperationParam(name = "subject") String theSubject, @OperationParam(name = "parameters") Parameters theParameters, - @OperationParam(name = "bundle") Bundle theBundle, + @OperationParam(name = "data") Bundle theData, + @OperationParam(name = "useServerData") BooleanType theUseServerData, @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { - return (QuestionnaireResponse) myDstu3QuestionnaireProcessorFactory + StringType canonicalType = getCanonicalType(FhirVersionEnum.DSTU3, theCanonical, theUrl, theVersion); + return (QuestionnaireResponse) myQuestionnaireProcessorFactory .create(theRequestDetails) .populate( - theId, - new StringType(theCanonical), - theQuestionnaire, + Eithers.for3(canonicalType, theId, theQuestionnaire), theSubject, theParameters, - theBundle, + theData, + theUseServerData == null ? Boolean.TRUE : theUseServerData.booleanValue(), theDataEndpoint, theContentEndpoint, theTerminologyEndpoint); @@ -167,25 +187,28 @@ public class QuestionnairePopulateProvider { @Operation(name = ProviderConstants.CR_OPERATION_POPULATE, idempotent = true, type = Questionnaire.class) public QuestionnaireResponse populate( - @OperationParam(name = "canonical") String theCanonical, @OperationParam(name = "questionnaire") Questionnaire theQuestionnaire, + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "url") String theUrl, + @OperationParam(name = "version") String theVersion, @OperationParam(name = "subject") String theSubject, @OperationParam(name = "parameters") Parameters theParameters, - @OperationParam(name = "bundle") Bundle theBundle, + @OperationParam(name = "data") Bundle theData, + @OperationParam(name = "useServerData") BooleanType theUseServerData, @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { - return (QuestionnaireResponse) myDstu3QuestionnaireProcessorFactory + StringType canonicalType = getCanonicalType(FhirVersionEnum.DSTU3, theCanonical, theUrl, theVersion); + return (QuestionnaireResponse) myQuestionnaireProcessorFactory .create(theRequestDetails) .populate( - null, - new StringType(theCanonical), - theQuestionnaire, + Eithers.for3(canonicalType, null, theQuestionnaire), theSubject, theParameters, - theBundle, + theData, + theUseServerData == null ? Boolean.TRUE : theUseServerData.booleanValue(), theDataEndpoint, theContentEndpoint, theTerminologyEndpoint); diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/questionnaireresponse/QuestionnaireResponseExtractProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/questionnaireresponse/QuestionnaireResponseExtractProvider.java index bca366e0f8e..73cd076ef7c 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/questionnaireresponse/QuestionnaireResponseExtractProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/questionnaireresponse/QuestionnaireResponseExtractProvider.java @@ -20,31 +20,35 @@ package ca.uhn.fhir.cr.dstu3.questionnaireresponse; * #L% */ -import ca.uhn.fhir.cr.dstu3.IQuestionnaireResponseProcessorFactory; +import ca.uhn.fhir.cr.common.IQuestionnaireResponseProcessorFactory; import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.Operation; import ca.uhn.fhir.rest.annotation.OperationParam; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.provider.ProviderConstants; +import org.hl7.fhir.dstu3.model.Bundle; import org.hl7.fhir.dstu3.model.IdType; +import org.hl7.fhir.dstu3.model.Parameters; import org.hl7.fhir.dstu3.model.QuestionnaireResponse; import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.instance.model.api.IBaseBundle; +import org.opencds.cqf.fhir.utility.monad.Eithers; import org.springframework.beans.factory.annotation.Autowired; public class QuestionnaireResponseExtractProvider { @Autowired - IQuestionnaireResponseProcessorFactory myDstu3QuestionnaireResponseProcessorFactory; + IQuestionnaireResponseProcessorFactory myQuestionnaireResponseProcessorFactory; /** - * Implements the $extract + * Implements the Structured Data Capture (SDC) IG. * * @param theId The id of the QuestionnaireResponse to extract data from. * @param theQuestionnaireResponse The QuestionnaireResponse to extract data from. Used when the operation is invoked at the 'type' level. + * @param theParameters Any input parameters defined in libraries referenced by the Questionnaire. + * @param theData Data to be made available during CQL evaluation. * @param theRequestDetails The details (such as tenant) of this request. Usually * autopopulated HAPI. * @return The resulting FHIR resource produced after extracting data. This will either be a single resource or a Transaction Bundle that contains multiple resources. @@ -53,20 +57,24 @@ public class QuestionnaireResponseExtractProvider { public IBaseBundle extract( @IdParam IdType theId, @OperationParam(name = "questionnaire-response") QuestionnaireResponse theQuestionnaireResponse, + @OperationParam(name = "parameters") Parameters theParameters, + @OperationParam(name = "data") Bundle theData, RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { - return myDstu3QuestionnaireResponseProcessorFactory + return myQuestionnaireResponseProcessorFactory .create(theRequestDetails) - .extract(theId, theQuestionnaireResponse, null, null, null); + .extract(Eithers.for2(theId, theQuestionnaireResponse), theParameters, theData); } @Operation(name = ProviderConstants.CR_OPERATION_EXTRACT, idempotent = true, type = QuestionnaireResponse.class) public IBaseBundle extract( @OperationParam(name = "questionnaire-response") QuestionnaireResponse theQuestionnaireResponse, + @OperationParam(name = "parameters") Parameters theParameters, + @OperationParam(name = "data") Bundle theData, RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { - return myDstu3QuestionnaireResponseProcessorFactory + return myQuestionnaireResponseProcessorFactory .create(theRequestDetails) - .extract(null, theQuestionnaireResponse, null, null, null); + .extract(Eithers.for2(null, theQuestionnaireResponse), theParameters, theData); } } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/structuredefinition/StructureDefinitionQuestionnaireProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/structuredefinition/StructureDefinitionQuestionnaireProvider.java new file mode 100644 index 00000000000..a8cda85301e --- /dev/null +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/structuredefinition/StructureDefinitionQuestionnaireProvider.java @@ -0,0 +1,138 @@ +/*- + * #%L + * HAPI FHIR - Clinical Reasoning + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.cr.dstu3.structuredefinition; + +import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.cr.common.IQuestionnaireProcessorFactory; +import ca.uhn.fhir.rest.annotation.IdParam; +import ca.uhn.fhir.rest.annotation.Operation; +import ca.uhn.fhir.rest.annotation.OperationParam; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.server.provider.ProviderConstants; +import org.hl7.fhir.dstu3.model.BooleanType; +import org.hl7.fhir.dstu3.model.Bundle; +import org.hl7.fhir.dstu3.model.Endpoint; +import org.hl7.fhir.dstu3.model.IdType; +import org.hl7.fhir.dstu3.model.Parameters; +import org.hl7.fhir.dstu3.model.Questionnaire; +import org.hl7.fhir.dstu3.model.StringType; +import org.hl7.fhir.dstu3.model.StructureDefinition; +import org.opencds.cqf.fhir.utility.monad.Eithers; +import org.springframework.beans.factory.annotation.Autowired; + +import static ca.uhn.fhir.cr.common.CanonicalHelper.getCanonicalType; + +public class StructureDefinitionQuestionnaireProvider { + @Autowired + IQuestionnaireProcessorFactory myQuestionnaireProcessorFactory; + + /** + * Implements the $populate + * operation found in the + * Structured Data Capture (SDC) IG. + * + * @param theId The id of the StructureDefinition. + * @param theProfile The StructureDefinition to base the Questionnaire on. Used when the operation is invoked at the 'type' level. + * @param theCanonical The canonical identifier for the StructureDefinition (optionally version-specific). + * @param theUrl Canonical URL of the StructureDefinition when invoked at the resource type level. This is exclusive with the profile and canonical parameters. + * @param theVersion Version of the StructureDefinition when invoked at the resource type level. This is exclusive with the profile and canonical parameters. + * @param theSupportedOnly If true (default: false), the questionnaire will only include those elements marked as "mustSupport='true'" in the StructureDefinition. + * @param theRequiredOnly If true (default: false), the questionnaire will only include those elements marked as "min>0" in the StructureDefinition. + * @param theSubject The subject(s) that is/are the target of the Questionnaire. + * @param theParameters Any input parameters defined in libraries referenced by the StructureDefinition. + * @param theUseServerData Whether to use data from the server performing the evaluation. + * @param theData Data to be made available during CQL evaluation. + * @param theDataEndpoint An endpoint to use to access data referenced by retrieve operations in libraries + * referenced by the StructureDefinition. + * @param theContentEndpoint An endpoint to use to access content (i.e. libraries) referenced by the StructureDefinition. + * @param theTerminologyEndpoint An endpoint to use to access terminology (i.e. valuesets, codesystems, and membership testing) + * referenced by the StructureDefinition. + * @param theRequestDetails The details (such as tenant) of this request. Usually + * autopopulated HAPI. + * @return The questionnaire form generated based on the StructureDefinition. + */ + @Operation(name = ProviderConstants.CR_OPERATION_QUESTIONNAIRE, idempotent = true, type = StructureDefinition.class) + public Questionnaire questionnaire( + @IdParam IdType theId, + @OperationParam(name = "profile") StructureDefinition theProfile, + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "url") String theUrl, + @OperationParam(name = "version") String theVersion, + @OperationParam(name = "supportedOnly") BooleanType theSupportedOnly, + @OperationParam(name = "requiredOnly") BooleanType theRequiredOnly, + @OperationParam(name = "subject") String theSubject, + @OperationParam(name = "parameters") Parameters theParameters, + @OperationParam(name = "useServerData") BooleanType theUseServerData, + @OperationParam(name = "data") Bundle theData, + @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, + @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, + @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, + RequestDetails theRequestDetails) { + StringType canonicalType = getCanonicalType(FhirVersionEnum.DSTU3, theCanonical, theUrl, theVersion); + return (Questionnaire) myQuestionnaireProcessorFactory + .create(theRequestDetails) + .generateQuestionnaire( + Eithers.for3(canonicalType, theId, theProfile), + theSupportedOnly == null ? Boolean.TRUE : theSupportedOnly.booleanValue(), + theRequiredOnly == null ? Boolean.TRUE : theRequiredOnly.booleanValue(), + theSubject, + theParameters, + theData, + theUseServerData == null ? Boolean.TRUE : theUseServerData.booleanValue(), + theDataEndpoint, + theContentEndpoint, + theTerminologyEndpoint, + null); + } + + @Operation(name = ProviderConstants.CR_OPERATION_QUESTIONNAIRE, idempotent = true, type = StructureDefinition.class) + public Questionnaire questionnaire( + @OperationParam(name = "profile") StructureDefinition theProfile, + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "url") String theUrl, + @OperationParam(name = "version") String theVersion, + @OperationParam(name = "supportedOnly") BooleanType theSupportedOnly, + @OperationParam(name = "requiredOnly") BooleanType theRequiredOnly, + @OperationParam(name = "subject") String theSubject, + @OperationParam(name = "parameters") Parameters theParameters, + @OperationParam(name = "useServerData") BooleanType theUseServerData, + @OperationParam(name = "data") Bundle theData, + @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, + @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, + @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, + RequestDetails theRequestDetails) { + StringType canonicalType = getCanonicalType(FhirVersionEnum.DSTU3, theCanonical, theUrl, theVersion); + return (Questionnaire) myQuestionnaireProcessorFactory + .create(theRequestDetails) + .generateQuestionnaire( + Eithers.for3(canonicalType, null, theProfile), + theSupportedOnly == null ? Boolean.TRUE : theSupportedOnly.booleanValue(), + theRequiredOnly == null ? Boolean.TRUE : theRequiredOnly.booleanValue(), + theSubject, + theParameters, + theData, + theUseServerData == null ? Boolean.TRUE : theUseServerData.booleanValue(), + theDataEndpoint, + theContentEndpoint, + theTerminologyEndpoint, + null); + } +} diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/activitydefinition/ActivityDefinitionApplyProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/activitydefinition/ActivityDefinitionApplyProvider.java index 2bd2c8acf5d..eefa18ce1e9 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/activitydefinition/ActivityDefinitionApplyProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/activitydefinition/ActivityDefinitionApplyProvider.java @@ -20,7 +20,7 @@ package ca.uhn.fhir.cr.r4.activitydefinition; * #L% */ -import ca.uhn.fhir.cr.r4.IActivityDefinitionProcessorFactory; +import ca.uhn.fhir.cr.common.IActivityDefinitionProcessorFactory; import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.Operation; import ca.uhn.fhir.rest.annotation.OperationParam; @@ -29,14 +29,22 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.provider.ProviderConstants; import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.r4.model.*; +import org.hl7.fhir.r4.model.ActivityDefinition; +import org.hl7.fhir.r4.model.BooleanType; +import org.hl7.fhir.r4.model.Bundle; +import org.hl7.fhir.r4.model.CanonicalType; +import org.hl7.fhir.r4.model.CodeableConcept; +import org.hl7.fhir.r4.model.Endpoint; +import org.hl7.fhir.r4.model.IdType; +import org.hl7.fhir.r4.model.Parameters; +import org.opencds.cqf.fhir.utility.monad.Eithers; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @Component public class ActivityDefinitionApplyProvider { @Autowired - IActivityDefinitionProcessorFactory myR4ActivityDefinitionProcessorFactory; + IActivityDefinitionProcessorFactory myActivityDefinitionProcessorFactory; /** * Implements the . * * @param theId The id of the PlanDefinition to apply - * @param theCanonical The canonical identifier for the PlanDefinition to apply (optionally version-specific) * @param thePlanDefinition The PlanDefinition to be applied + * @param theCanonical The canonical url of the plan definition to be applied. If the operation is invoked at the instance level, this parameter is not allowed; if the operation is invoked at the type level, this parameter (and optionally the version), or the planDefinition parameter must be supplied. + * @param theUrl Canonical URL of the PlanDefinition when invoked at the resource type level. This is exclusive with the planDefinition and canonical parameters. + * @param theVersion Version of the PlanDefinition when invoked at the resource type level. This is exclusive with the planDefinition and canonical parameters. * @param theSubject The subject(s) that is/are the target of the plan definition to be applied. * @param theEncounter The encounter in context * @param thePractitioner The practitioner in context @@ -77,8 +90,10 @@ public class PlanDefinitionApplyProvider { @Operation(name = ProviderConstants.CR_OPERATION_APPLY, idempotent = true, type = PlanDefinition.class) public IBaseResource apply( @IdParam IdType theId, - @OperationParam(name = "canonical") String theCanonical, @OperationParam(name = "planDefinition") PlanDefinition thePlanDefinition, + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "url") String theUrl, + @OperationParam(name = "version") String theVersion, @OperationParam(name = "subject") String theSubject, @OperationParam(name = "encounter") String theEncounter, @OperationParam(name = "practitioner") String thePractitioner, @@ -96,12 +111,11 @@ public class PlanDefinitionApplyProvider { @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { - return myR4PlanDefinitionProcessorFactory + CanonicalType canonicalType = getCanonicalType(FhirVersionEnum.R4, theCanonical, theUrl, theVersion); + return myPlanDefinitionProcessorFactory .create(theRequestDetails) .apply( - theId, - new CanonicalType(theCanonical), - thePlanDefinition, + Eithers.for3(canonicalType, theId, thePlanDefinition), theSubject, theEncounter, thePractitioner, @@ -112,7 +126,7 @@ public class PlanDefinitionApplyProvider { theSetting, theSettingContext, theParameters, - theUseServerData == null ? true : theUseServerData.booleanValue(), + theUseServerData == null ? Boolean.TRUE : theUseServerData.booleanValue(), theData, null, theDataEndpoint, @@ -122,8 +136,10 @@ public class PlanDefinitionApplyProvider { @Operation(name = ProviderConstants.CR_OPERATION_APPLY, idempotent = true, type = PlanDefinition.class) public IBaseResource apply( - @OperationParam(name = "canonical") String theCanonical, @OperationParam(name = "planDefinition") PlanDefinition thePlanDefinition, + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "url") String theUrl, + @OperationParam(name = "version") String theVersion, @OperationParam(name = "subject") String theSubject, @OperationParam(name = "encounter") String theEncounter, @OperationParam(name = "practitioner") String thePractitioner, @@ -141,12 +157,11 @@ public class PlanDefinitionApplyProvider { @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { - return myR4PlanDefinitionProcessorFactory + CanonicalType canonicalType = getCanonicalType(FhirVersionEnum.R4, theCanonical, theUrl, theVersion); + return myPlanDefinitionProcessorFactory .create(theRequestDetails) .apply( - null, - new CanonicalType(theCanonical), - thePlanDefinition, + Eithers.for3(canonicalType, null, thePlanDefinition), theSubject, theEncounter, thePractitioner, @@ -157,7 +172,7 @@ public class PlanDefinitionApplyProvider { theSetting, theSettingContext, theParameters, - theUseServerData == null ? true : theUseServerData.booleanValue(), + theUseServerData == null ? Boolean.TRUE : theUseServerData.booleanValue(), theData, null, theDataEndpoint, @@ -175,8 +190,10 @@ public class PlanDefinitionApplyProvider { * CPG IG. This implementation follows the R5 specification and returns a bundle of RequestGroups rather than a CarePlan. * * @param theId The id of the PlanDefinition to apply - * @param theCanonical The canonical identifier for the PlanDefinition to apply (optionally version-specific) * @param thePlanDefinition The PlanDefinition to be applied + * @param theCanonical The canonical url of the plan definition to be applied. If the operation is invoked at the instance level, this parameter is not allowed; if the operation is invoked at the type level, this parameter (and optionally the version), or the planDefinition parameter must be supplied. + * @param theUrl Canonical URL of the PlanDefinition when invoked at the resource type level. This is exclusive with the planDefinition and canonical parameters. + * @param theVersion Version of the PlanDefinition when invoked at the resource type level. This is exclusive with the planDefinition and canonical parameters. * @param theSubject The subject(s) that is/are the target of the plan definition to be applied. * @param theEncounter The encounter in context * @param thePractitioner The practitioner in context @@ -204,8 +221,10 @@ public class PlanDefinitionApplyProvider { @Operation(name = ProviderConstants.CR_OPERATION_R5_APPLY, idempotent = true, type = PlanDefinition.class) public IBaseResource applyR5( @IdParam IdType theId, - @OperationParam(name = "canonical") String theCanonical, @OperationParam(name = "planDefinition") PlanDefinition thePlanDefinition, + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "url") String theUrl, + @OperationParam(name = "version") String theVersion, @OperationParam(name = "subject") String theSubject, @OperationParam(name = "encounter") String theEncounter, @OperationParam(name = "practitioner") String thePractitioner, @@ -223,12 +242,11 @@ public class PlanDefinitionApplyProvider { @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { - return myR4PlanDefinitionProcessorFactory + CanonicalType canonicalType = getCanonicalType(FhirVersionEnum.R4, theCanonical, theUrl, theVersion); + return myPlanDefinitionProcessorFactory .create(theRequestDetails) .applyR5( - theId, - new CanonicalType(theCanonical), - thePlanDefinition, + Eithers.for3(canonicalType, theId, thePlanDefinition), theSubject, theEncounter, thePractitioner, @@ -239,7 +257,7 @@ public class PlanDefinitionApplyProvider { theSetting, theSettingContext, theParameters, - theUseServerData == null ? true : theUseServerData.booleanValue(), + theUseServerData == null ? Boolean.TRUE : theUseServerData.booleanValue(), theData, null, theDataEndpoint, @@ -249,8 +267,10 @@ public class PlanDefinitionApplyProvider { @Operation(name = ProviderConstants.CR_OPERATION_R5_APPLY, idempotent = true, type = PlanDefinition.class) public IBaseResource applyR5( - @OperationParam(name = "canonical") String theCanonical, @OperationParam(name = "planDefinition") PlanDefinition thePlanDefinition, + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "url") String theUrl, + @OperationParam(name = "version") String theVersion, @OperationParam(name = "subject") String theSubject, @OperationParam(name = "encounter") String theEncounter, @OperationParam(name = "practitioner") String thePractitioner, @@ -268,12 +288,11 @@ public class PlanDefinitionApplyProvider { @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { - return myR4PlanDefinitionProcessorFactory + CanonicalType canonicalType = getCanonicalType(FhirVersionEnum.R4, theCanonical, theUrl, theVersion); + return myPlanDefinitionProcessorFactory .create(theRequestDetails) .applyR5( - null, - new CanonicalType(theCanonical), - thePlanDefinition, + Eithers.for3(canonicalType, null, thePlanDefinition), theSubject, theEncounter, thePractitioner, @@ -284,7 +303,7 @@ public class PlanDefinitionApplyProvider { theSetting, theSettingContext, theParameters, - theUseServerData == null ? true : theUseServerData.booleanValue(), + theUseServerData == null ? Boolean.TRUE : theUseServerData.booleanValue(), theData, null, theDataEndpoint, diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/plandefinition/PlanDefinitionPackageProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/plandefinition/PlanDefinitionPackageProvider.java index 8816a844b19..1edbae83f60 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/plandefinition/PlanDefinitionPackageProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/plandefinition/PlanDefinitionPackageProvider.java @@ -19,7 +19,8 @@ */ package ca.uhn.fhir.cr.r4.plandefinition; -import ca.uhn.fhir.cr.r4.IPlanDefinitionProcessorFactory; +import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.cr.common.IPlanDefinitionProcessorFactory; import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.Operation; import ca.uhn.fhir.rest.annotation.OperationParam; @@ -28,40 +29,51 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.provider.ProviderConstants; import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.instance.model.api.IBaseBundle; +import org.hl7.fhir.r4.model.BooleanType; import org.hl7.fhir.r4.model.CanonicalType; import org.hl7.fhir.r4.model.IdType; import org.hl7.fhir.r4.model.PlanDefinition; +import org.opencds.cqf.fhir.utility.monad.Eithers; import org.springframework.beans.factory.annotation.Autowired; +import static ca.uhn.fhir.cr.common.CanonicalHelper.getCanonicalType; + public class PlanDefinitionPackageProvider { @Autowired - IPlanDefinitionProcessorFactory myR4PlanDefinitionProcessorFactory; + IPlanDefinitionProcessorFactory myPlanDefinitionProcessorFactory; @Operation(name = ProviderConstants.CR_OPERATION_PACKAGE, idempotent = true, type = PlanDefinition.class) public IBaseBundle packagePlanDefinition( @IdParam IdType theId, @OperationParam(name = "canonical") String theCanonical, - @OperationParam(name = "usePut") String theIsPut, + @OperationParam(name = "url") String theUrl, + @OperationParam(name = "version") String theVersion, + @OperationParam(name = "usePut") BooleanType theIsPut, RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { - return myR4PlanDefinitionProcessorFactory + CanonicalType canonicalType = getCanonicalType(FhirVersionEnum.R4, theCanonical, theUrl, theVersion); + return myPlanDefinitionProcessorFactory .create(theRequestDetails) - .packagePlanDefinition(theId, new CanonicalType(theCanonical), null, Boolean.parseBoolean(theIsPut)); + .packagePlanDefinition( + Eithers.for3(canonicalType, theId, null), + theIsPut == null ? Boolean.FALSE : theIsPut.booleanValue()); } @Operation(name = ProviderConstants.CR_OPERATION_PACKAGE, idempotent = true, type = PlanDefinition.class) public IBaseBundle packagePlanDefinition( @OperationParam(name = "id") String theId, @OperationParam(name = "canonical") String theCanonical, - @OperationParam(name = "usePut") String theIsPut, + @OperationParam(name = "url") String theUrl, + @OperationParam(name = "version") String theVersion, + @OperationParam(name = "usePut") BooleanType theIsPut, RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { - return myR4PlanDefinitionProcessorFactory + IdType id = theId == null ? null : new IdType("PlanDefinition", theId); + CanonicalType canonicalType = getCanonicalType(FhirVersionEnum.R4, theCanonical, theUrl, theVersion); + return myPlanDefinitionProcessorFactory .create(theRequestDetails) .packagePlanDefinition( - new IdType("PlanDefinition", theId), - new CanonicalType(theCanonical), - null, - Boolean.parseBoolean(theIsPut)); + Eithers.for3(canonicalType, id, null), + theIsPut == null ? Boolean.FALSE : theIsPut.booleanValue()); } } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/questionnaire/QuestionnairePackageProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/questionnaire/QuestionnairePackageProvider.java index f00d398bc51..94599d0098a 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/questionnaire/QuestionnairePackageProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/questionnaire/QuestionnairePackageProvider.java @@ -19,29 +19,36 @@ */ package ca.uhn.fhir.cr.r4.questionnaire; -import ca.uhn.fhir.cr.r4.IQuestionnaireProcessorFactory; +import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.cr.common.IQuestionnaireProcessorFactory; import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.Operation; import ca.uhn.fhir.rest.annotation.OperationParam; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.provider.ProviderConstants; +import org.hl7.fhir.r4.model.BooleanType; import org.hl7.fhir.r4.model.Bundle; import org.hl7.fhir.r4.model.CanonicalType; import org.hl7.fhir.r4.model.IdType; import org.hl7.fhir.r4.model.Questionnaire; +import org.opencds.cqf.fhir.utility.monad.Eithers; import org.springframework.beans.factory.annotation.Autowired; +import static ca.uhn.fhir.cr.common.CanonicalHelper.getCanonicalType; + public class QuestionnairePackageProvider { @Autowired - IQuestionnaireProcessorFactory myR4QuestionnaireProcessorFactory; + IQuestionnaireProcessorFactory myQuestionnaireProcessorFactory; /** * Implements a $package operation following the CRMI IG. * * @param theId The id of the Questionnaire. - * @param theCanonical The canonical identifier for the questionnaire (optionally version-specific). - * @Param theIsPut A boolean value to determine if the Bundle returned uses PUT or POST request methods. Defaults to false. + * @param theCanonical The canonical identifier for the Questionnaire (optionally version-specific). + * @param theUrl Canonical URL of the Questionnaire when invoked at the resource type level. This is exclusive with the questionnaire and canonical parameters. + * @param theVersion Version of the Questionnaire when invoked at the resource type level. This is exclusive with the questionnaire and canonical parameters. + * @Param theIsPut A boolean value to determine if the Bundle returned uses PUT or POST request methods. Defaults to false. * @param theRequestDetails The details (such as tenant) of this request. Usually * autopopulated by HAPI. * @return A Bundle containing the Questionnaire and all related Library, CodeSystem and ValueSet resources @@ -50,20 +57,30 @@ public class QuestionnairePackageProvider { public Bundle packageQuestionnaire( @IdParam IdType theId, @OperationParam(name = "canonical") String theCanonical, - @OperationParam(name = "usePut") String theIsPut, + @OperationParam(name = "url") String theUrl, + @OperationParam(name = "version") String theVersion, + @OperationParam(name = "usePut") BooleanType theIsPut, RequestDetails theRequestDetails) { - return (Bundle) myR4QuestionnaireProcessorFactory + CanonicalType canonicalType = getCanonicalType(FhirVersionEnum.R4, theCanonical, theUrl, theVersion); + return (Bundle) myQuestionnaireProcessorFactory .create(theRequestDetails) - .packageQuestionnaire(theId, new CanonicalType(theCanonical), null, Boolean.parseBoolean(theIsPut)); + .packageQuestionnaire( + Eithers.for3(canonicalType, theId, null), + theIsPut == null ? Boolean.FALSE : theIsPut.booleanValue()); } @Operation(name = ProviderConstants.CR_OPERATION_PACKAGE, idempotent = true, type = Questionnaire.class) public Bundle packageQuestionnaire( @OperationParam(name = "canonical") String theCanonical, - @OperationParam(name = "usePut") String theIsPut, + @OperationParam(name = "url") String theUrl, + @OperationParam(name = "version") String theVersion, + @OperationParam(name = "usePut") BooleanType theIsPut, RequestDetails theRequestDetails) { - return (Bundle) myR4QuestionnaireProcessorFactory + CanonicalType canonicalType = getCanonicalType(FhirVersionEnum.R4, theCanonical, theUrl, theVersion); + return (Bundle) myQuestionnaireProcessorFactory .create(theRequestDetails) - .packageQuestionnaire(null, new CanonicalType(theCanonical), null, Boolean.parseBoolean(theIsPut)); + .packageQuestionnaire( + Eithers.for3(canonicalType, null, null), + theIsPut == null ? Boolean.FALSE : theIsPut.booleanValue()); } } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/questionnaire/QuestionnairePopulateProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/questionnaire/QuestionnairePopulateProvider.java index 0108c050fa3..4346fb25a46 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/questionnaire/QuestionnairePopulateProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/questionnaire/QuestionnairePopulateProvider.java @@ -20,7 +20,8 @@ package ca.uhn.fhir.cr.r4.questionnaire; * #L% */ -import ca.uhn.fhir.cr.r4.IQuestionnaireProcessorFactory; +import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.cr.common.IQuestionnaireProcessorFactory; import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.Operation; import ca.uhn.fhir.rest.annotation.OperationParam; @@ -28,6 +29,7 @@ import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.provider.ProviderConstants; import org.hl7.fhir.exceptions.FHIRException; +import org.hl7.fhir.r4.model.BooleanType; import org.hl7.fhir.r4.model.Bundle; import org.hl7.fhir.r4.model.CanonicalType; import org.hl7.fhir.r4.model.Endpoint; @@ -35,11 +37,14 @@ import org.hl7.fhir.r4.model.IdType; import org.hl7.fhir.r4.model.Parameters; import org.hl7.fhir.r4.model.Questionnaire; import org.hl7.fhir.r4.model.QuestionnaireResponse; +import org.opencds.cqf.fhir.utility.monad.Eithers; import org.springframework.beans.factory.annotation.Autowired; +import static ca.uhn.fhir.cr.common.CanonicalHelper.getCanonicalType; + public class QuestionnairePopulateProvider { @Autowired - IQuestionnaireProcessorFactory myR4QuestionnaireProcessorFactory; + IQuestionnaireProcessorFactory myQuestionnaireProcessorFactory; /** * Implements a modified version of the Structured Data Capture (SDC) IG. * * @param theId The id of the Questionnaire to populate. - * @param theCanonical The canonical identifier for the questionnaire (optionally version-specific). * @param theQuestionnaire The Questionnaire to populate. Used when the operation is invoked at the 'type' level. + * @param theCanonical The canonical identifier for the questionnaire (optionally version-specific). + * @param theUrl Canonical URL of the Questionnaire when invoked at the resource type level. This is exclusive with the questionnaire and canonical parameters. + * @param theVersion Version of the Questionnaire when invoked at the resource type level. This is exclusive with the questionnaire and canonical parameters. * @param theSubject The subject(s) that is/are the target of the Questionnaire. - * @param theParameters Any input parameters defined in libraries referenced by the Questionnaire. - * @param theBundle Data to be made available during CQL evaluation. + * @param theUseServerData Whether to use data from the server performing the evaluation. + * @param theData Data to be made available during CQL evaluation. + * @param theBundle Legacy support for data parameter. * @param theDataEndpoint An endpoint to use to access data referenced by retrieve operations in libraries * referenced by the Questionnaire. * @param theContentEndpoint An endpoint to use to access content (i.e. libraries) referenced by the Questionnaire. @@ -141,25 +163,30 @@ public class QuestionnairePopulateProvider { @Operation(name = ProviderConstants.CR_OPERATION_POPULATE, idempotent = true, type = Questionnaire.class) public QuestionnaireResponse populate( @IdParam IdType theId, - @OperationParam(name = "canonical") String theCanonical, @OperationParam(name = "questionnaire") Questionnaire theQuestionnaire, + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "url") String theUrl, + @OperationParam(name = "version") String theVersion, @OperationParam(name = "subject") String theSubject, @OperationParam(name = "parameters") Parameters theParameters, + @OperationParam(name = "useServerData") BooleanType theUseServerData, + @OperationParam(name = "data") Bundle theData, @OperationParam(name = "bundle") Bundle theBundle, @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { - return (QuestionnaireResponse) myR4QuestionnaireProcessorFactory + CanonicalType canonicalType = getCanonicalType(FhirVersionEnum.R4, theCanonical, theUrl, theVersion); + Bundle data = theData == null ? theBundle : theData; + return (QuestionnaireResponse) myQuestionnaireProcessorFactory .create(theRequestDetails) .populate( - theId, - new CanonicalType(theCanonical), - theQuestionnaire, + Eithers.for3(canonicalType, theId, theQuestionnaire), theSubject, theParameters, - theBundle, + data, + theUseServerData == null ? Boolean.TRUE : theUseServerData.booleanValue(), theDataEndpoint, theContentEndpoint, theTerminologyEndpoint); @@ -167,25 +194,30 @@ public class QuestionnairePopulateProvider { @Operation(name = ProviderConstants.CR_OPERATION_POPULATE, idempotent = true, type = Questionnaire.class) public QuestionnaireResponse populate( - @OperationParam(name = "canonical") String theCanonical, @OperationParam(name = "questionnaire") Questionnaire theQuestionnaire, + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "url") String theUrl, + @OperationParam(name = "version") String theVersion, @OperationParam(name = "subject") String theSubject, @OperationParam(name = "parameters") Parameters theParameters, + @OperationParam(name = "useServerData") BooleanType theUseServerData, + @OperationParam(name = "data") Bundle theData, @OperationParam(name = "bundle") Bundle theBundle, @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { - return (QuestionnaireResponse) myR4QuestionnaireProcessorFactory + CanonicalType canonicalType = getCanonicalType(FhirVersionEnum.R4, theCanonical, theUrl, theVersion); + Bundle data = theData == null ? theBundle : theData; + return (QuestionnaireResponse) myQuestionnaireProcessorFactory .create(theRequestDetails) .populate( - null, - new CanonicalType(theCanonical), - theQuestionnaire, + Eithers.for3(canonicalType, null, theQuestionnaire), theSubject, theParameters, - theBundle, + data, + theUseServerData == null ? Boolean.TRUE : theUseServerData.booleanValue(), theDataEndpoint, theContentEndpoint, theTerminologyEndpoint); diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/questionnaireresponse/QuestionnaireResponseExtractProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/questionnaireresponse/QuestionnaireResponseExtractProvider.java index f7598e66487..65242192998 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/questionnaireresponse/QuestionnaireResponseExtractProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/questionnaireresponse/QuestionnaireResponseExtractProvider.java @@ -20,7 +20,7 @@ package ca.uhn.fhir.cr.r4.questionnaireresponse; * #L% */ -import ca.uhn.fhir.cr.r4.IQuestionnaireResponseProcessorFactory; +import ca.uhn.fhir.cr.common.IQuestionnaireResponseProcessorFactory; import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.Operation; import ca.uhn.fhir.rest.annotation.OperationParam; @@ -29,22 +29,26 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.provider.ProviderConstants; import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.instance.model.api.IBaseBundle; +import org.hl7.fhir.r4.model.Bundle; import org.hl7.fhir.r4.model.IdType; +import org.hl7.fhir.r4.model.Parameters; import org.hl7.fhir.r4.model.QuestionnaireResponse; +import org.opencds.cqf.fhir.utility.monad.Eithers; import org.springframework.beans.factory.annotation.Autowired; public class QuestionnaireResponseExtractProvider { @Autowired - IQuestionnaireResponseProcessorFactory myR4QuestionnaireResponseProcessorFactory; + IQuestionnaireResponseProcessorFactory myQuestionnaireResponseProcessorFactory; /** - * Implements the $extract + * Implements the Structured Data Capture (SDC) IG. * * @param theId The id of the QuestionnaireResponse to extract data from. * @param theQuestionnaireResponse The QuestionnaireResponse to extract data from. Used when the operation is invoked at the 'type' level. + * @param theParameters Any input parameters defined in libraries referenced by the Questionnaire. + * @param theData Data to be made available during CQL evaluation. * @param theRequestDetails The details (such as tenant) of this request. Usually * autopopulated HAPI. * @return The resulting FHIR resource produced after extracting data. This will either be a single resource or a Transaction Bundle that contains multiple resources. @@ -53,20 +57,24 @@ public class QuestionnaireResponseExtractProvider { public IBaseBundle extract( @IdParam IdType theId, @OperationParam(name = "questionnaire-response") QuestionnaireResponse theQuestionnaireResponse, + @OperationParam(name = "parameters") Parameters theParameters, + @OperationParam(name = "data") Bundle theData, RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { - return myR4QuestionnaireResponseProcessorFactory + return myQuestionnaireResponseProcessorFactory .create(theRequestDetails) - .extract(theId, theQuestionnaireResponse, null, null, null); + .extract(Eithers.for2(theId, theQuestionnaireResponse), theParameters, theData); } @Operation(name = ProviderConstants.CR_OPERATION_EXTRACT, idempotent = true, type = QuestionnaireResponse.class) public IBaseBundle extract( @OperationParam(name = "questionnaire-response") QuestionnaireResponse theQuestionnaireResponse, + @OperationParam(name = "parameters") Parameters theParameters, + @OperationParam(name = "data") Bundle theData, RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { - return myR4QuestionnaireResponseProcessorFactory + return myQuestionnaireResponseProcessorFactory .create(theRequestDetails) - .extract(null, theQuestionnaireResponse, null, null, null); + .extract(Eithers.for2(null, theQuestionnaireResponse), theParameters, theData); } } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/structuredefinition/StructureDefinitionQuestionnaireProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/structuredefinition/StructureDefinitionQuestionnaireProvider.java new file mode 100644 index 00000000000..f125db8828f --- /dev/null +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/structuredefinition/StructureDefinitionQuestionnaireProvider.java @@ -0,0 +1,137 @@ +/*- + * #%L + * HAPI FHIR - Clinical Reasoning + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.cr.r4.structuredefinition; + +import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.cr.common.IQuestionnaireProcessorFactory; +import ca.uhn.fhir.rest.annotation.IdParam; +import ca.uhn.fhir.rest.annotation.Operation; +import ca.uhn.fhir.rest.annotation.OperationParam; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.server.provider.ProviderConstants; +import org.hl7.fhir.r4.model.BooleanType; +import org.hl7.fhir.r4.model.Bundle; +import org.hl7.fhir.r4.model.CanonicalType; +import org.hl7.fhir.r4.model.Endpoint; +import org.hl7.fhir.r4.model.IdType; +import org.hl7.fhir.r4.model.Parameters; +import org.hl7.fhir.r4.model.Questionnaire; +import org.hl7.fhir.r4.model.StructureDefinition; +import org.opencds.cqf.fhir.utility.monad.Eithers; +import org.springframework.beans.factory.annotation.Autowired; + +import static ca.uhn.fhir.cr.common.CanonicalHelper.getCanonicalType; + +public class StructureDefinitionQuestionnaireProvider { + @Autowired + IQuestionnaireProcessorFactory myQuestionnaireProcessorFactory; + + /** + * Implements the $questionnaire + * operation. + * + * @param theId The id of the StructureDefinition. + * @param theProfile The StructureDefinition to base the Questionnaire on. Used when the operation is invoked at the 'type' level. + * @param theCanonical The canonical identifier for the StructureDefinition (optionally version-specific). + * @param theUrl Canonical URL of the StructureDefinition when invoked at the resource type level. This is exclusive with the profile and canonical parameters. + * @param theVersion Version of the StructureDefinition when invoked at the resource type level. This is exclusive with the profile and canonical parameters. + * @param theSupportedOnly If true (default: false), the questionnaire will only include those elements marked as "mustSupport='true'" in the StructureDefinition. + * @param theRequiredOnly If true (default: false), the questionnaire will only include those elements marked as "min>0" in the StructureDefinition. + * @param theSubject The subject(s) that is/are the target of the Questionnaire. + * @param theParameters Any input parameters defined in libraries referenced by the StructureDefinition. + * @param theUseServerData Whether to use data from the server performing the evaluation. + * @param theData Data to be made available during CQL evaluation. + * @param theDataEndpoint An endpoint to use to access data referenced by retrieve operations in libraries + * referenced by the StructureDefinition. + * @param theContentEndpoint An endpoint to use to access content (i.e. libraries) referenced by the StructureDefinition. + * @param theTerminologyEndpoint An endpoint to use to access terminology (i.e. valuesets, codesystems, and membership testing) + * referenced by the StructureDefinition. + * @param theRequestDetails The details (such as tenant) of this request. Usually + * autopopulated HAPI. + * @return The questionnaire form generated based on the StructureDefinition. + */ + @Operation(name = ProviderConstants.CR_OPERATION_QUESTIONNAIRE, idempotent = true, type = StructureDefinition.class) + public Questionnaire questionnaire( + @IdParam IdType theId, + @OperationParam(name = "profile") StructureDefinition theProfile, + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "url") String theUrl, + @OperationParam(name = "version") String theVersion, + @OperationParam(name = "supportedOnly") BooleanType theSupportedOnly, + @OperationParam(name = "requiredOnly") BooleanType theRequiredOnly, + @OperationParam(name = "subject") String theSubject, + @OperationParam(name = "parameters") Parameters theParameters, + @OperationParam(name = "useServerData") BooleanType theUseServerData, + @OperationParam(name = "data") Bundle theData, + @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, + @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, + @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, + RequestDetails theRequestDetails) { + CanonicalType canonicalType = getCanonicalType(FhirVersionEnum.R4, theCanonical, theUrl, theVersion); + return (Questionnaire) myQuestionnaireProcessorFactory + .create(theRequestDetails) + .generateQuestionnaire( + Eithers.for3(canonicalType, theId, theProfile), + theSupportedOnly == null ? Boolean.FALSE : theSupportedOnly.booleanValue(), + theRequiredOnly == null ? Boolean.FALSE : theRequiredOnly.booleanValue(), + theSubject, + theParameters, + theData, + theUseServerData == null ? Boolean.TRUE : theUseServerData.booleanValue(), + theDataEndpoint, + theContentEndpoint, + theTerminologyEndpoint, + null); + } + + @Operation(name = ProviderConstants.CR_OPERATION_QUESTIONNAIRE, idempotent = true, type = StructureDefinition.class) + public Questionnaire questionnaire( + @OperationParam(name = "profile") StructureDefinition theProfile, + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "url") String theUrl, + @OperationParam(name = "version") String theVersion, + @OperationParam(name = "supportedOnly") BooleanType theSupportedOnly, + @OperationParam(name = "requiredOnly") BooleanType theRequiredOnly, + @OperationParam(name = "subject") String theSubject, + @OperationParam(name = "parameters") Parameters theParameters, + @OperationParam(name = "useServerData") BooleanType theUseServerData, + @OperationParam(name = "data") Bundle theData, + @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, + @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, + @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, + RequestDetails theRequestDetails) { + CanonicalType canonicalType = getCanonicalType(FhirVersionEnum.R4, theCanonical, theUrl, theVersion); + return (Questionnaire) myQuestionnaireProcessorFactory + .create(theRequestDetails) + .generateQuestionnaire( + Eithers.for3(canonicalType, null, theProfile), + theSupportedOnly == null ? Boolean.FALSE : theSupportedOnly.booleanValue(), + theRequiredOnly == null ? Boolean.FALSE : theRequiredOnly.booleanValue(), + theSubject, + theParameters, + theData, + theUseServerData == null ? Boolean.TRUE : theUseServerData.booleanValue(), + theDataEndpoint, + theContentEndpoint, + theTerminologyEndpoint, + null); + } +} diff --git a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/PlanDefinitionOperationsProviderTest.java b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/PlanDefinitionOperationsProviderTest.java index ef53d8acc57..7beb1e5a208 100644 --- a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/PlanDefinitionOperationsProviderTest.java +++ b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/PlanDefinitionOperationsProviderTest.java @@ -19,10 +19,11 @@ public class PlanDefinitionOperationsProviderTest extends BaseCrR4TestServer { loadBundle("ca/uhn/fhir/cr/r4/Bundle-PatientData.json"); var requestDetails = setupRequestDetails(); - var planDefinitionID = new IdType(Enumerations.FHIRAllTypes.PLANDEFINITION.toCode(), "ASLPA1"); + var url = "http://example.org/sdh/dtr/aslp/PlanDefinition/ASLPA1"; + var version = "1.0.0"; var patientID = "positive"; var parameters = new Parameters().addParameter("Service Request Id", "SleepStudy").addParameter("Service Request Id", "SleepStudy2"); - var result = (CarePlan) myPlanDefinitionApplyProvider.apply(planDefinitionID, null, null, patientID, + var result = (CarePlan) myPlanDefinitionApplyProvider.apply(null, null, null, url, version, patientID, null, null, null, null, null, null, null, null, parameters, new BooleanType(true), null, null, null, null, @@ -35,7 +36,7 @@ public class PlanDefinitionOperationsProviderTest extends BaseCrR4TestServer { .getItem().get(0) .getText()); - var resultR5 = (Bundle) myPlanDefinitionApplyProvider.applyR5(planDefinitionID, null, null, patientID, + var resultR5 = (Bundle) myPlanDefinitionApplyProvider.applyR5(null, null, null, url, version, patientID, null, null, null, null, null, null, null, null, parameters, new BooleanType(true), null, null, null, null, diff --git a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/QuestionnaireOperationsProviderTest.java b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/QuestionnaireOperationsProviderTest.java index 353539f91d6..f14507d6a52 100644 --- a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/QuestionnaireOperationsProviderTest.java +++ b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/QuestionnaireOperationsProviderTest.java @@ -3,6 +3,7 @@ package ca.uhn.fhir.cr.r4; import ca.uhn.fhir.cr.r4.questionnaire.QuestionnairePackageProvider; import ca.uhn.fhir.cr.r4.questionnaire.QuestionnairePopulateProvider; +import org.hl7.fhir.r4.model.BooleanType; import org.hl7.fhir.r4.model.Enumerations; import org.hl7.fhir.r4.model.IdType; import org.hl7.fhir.r4.model.Parameters; @@ -28,7 +29,7 @@ public class QuestionnaireOperationsProviderTest extends BaseCrR4TestServer { var theSubject = "positive"; var parameters = new Parameters().addParameter("Service Request Id", "SleepStudy").addParameter("Service Request Id", "SleepStudy2"); var result = myQuestionnairePopulateProvider.populate(new IdType("Questionnaire", "ASLPA1"), - null, null, theSubject, parameters, + null, null, null, null, theSubject, parameters, null, null, null, null, null, null, requestDetails); @@ -45,7 +46,7 @@ public class QuestionnaireOperationsProviderTest extends BaseCrR4TestServer { var theSubject = "positive"; var parameters = new Parameters().addParameter("Service Request Id", "SleepStudy").addParameter("Service Request Id", "SleepStudy2"); var result = myQuestionnairePopulateProvider.prepopulate(new IdType("Questionnaire", "ASLPA1"), - null, null, theSubject, parameters, + null, null, null, null, theSubject, parameters, null, null, null, null, null, null, requestDetails); @@ -58,7 +59,7 @@ public class QuestionnaireOperationsProviderTest extends BaseCrR4TestServer { loadBundle("ca/uhn/fhir/cr/r4/Bundle-QuestionnairePackage.json"); var requestDetails = setupRequestDetails(); var result = myQuestionnairePackageProvider.packageQuestionnaire(null, - "http://example.org/sdh/dtr/aslp/Questionnaire/ASLPA1", "true", + "http://example.org/sdh/dtr/aslp/Questionnaire/ASLPA1", null, null, new BooleanType("true"), requestDetails); assertNotNull(result); diff --git a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/QuestionnaireResponseOperationsProviderTest.java b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/QuestionnaireResponseOperationsProviderTest.java index 334c551127c..0d21df7dd94 100644 --- a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/QuestionnaireResponseOperationsProviderTest.java +++ b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/QuestionnaireResponseOperationsProviderTest.java @@ -21,7 +21,7 @@ public class QuestionnaireResponseOperationsProviderTest extends BaseCrR4TestSer var requestDetails = setupRequestDetails(); loadResource(Questionnaire.class, "ca/uhn/fhir/cr/r4/Questionnaire-MyPainQuestionnaire.json", requestDetails); var questionnaireResponse = readResource(QuestionnaireResponse.class, "ca/uhn/fhir/cr/r4/QuestionnaireResponse-QRSharonDecision.json"); - var result = (Bundle) myQuestionnaireResponseExtractProvider.extract(null, questionnaireResponse, requestDetails); + var result = (Bundle) myQuestionnaireResponseExtractProvider.extract(null, questionnaireResponse, null, null, requestDetails); assertNotNull(result); assertEquals(5, result.getEntry().size()); diff --git a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/TestCrR4Config.java b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/TestCrR4Config.java index c6efed8ddee..e142b5e7c7d 100644 --- a/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/TestCrR4Config.java +++ b/hapi-fhir-storage-cr/src/test/java/ca/uhn/fhir/cr/r4/TestCrR4Config.java @@ -49,7 +49,6 @@ public class TestCrR4Config { @Bean CareGapsProperties careGapsProperties() { var careGapsProperties = new CareGapsProperties(); - careGapsProperties.setThreadedCareGapsEnabled(false); careGapsProperties.setCareGapsReporter("Organization/alphora"); careGapsProperties.setCareGapsCompositionSectionAuthor("Organization/alphora-author"); return careGapsProperties; diff --git a/hapi-fhir-storage-mdm/pom.xml b/hapi-fhir-storage-mdm/pom.xml index d38f71d841c..aaaffd4dfe4 100644 --- a/hapi-fhir-storage-mdm/pom.xml +++ b/hapi-fhir-storage-mdm/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-test-utilities/pom.xml b/hapi-fhir-storage-test-utilities/pom.xml index ab081e7bd27..97697fbeceb 100644 --- a/hapi-fhir-storage-test-utilities/pom.xml +++ b/hapi-fhir-storage-test-utilities/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage/pom.xml b/hapi-fhir-storage/pom.xml index 804610dd522..96682b6343b 100644 --- a/hapi-fhir-storage/pom.xml +++ b/hapi-fhir-storage/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java index 2c86b83a33c..294b72da983 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java @@ -49,6 +49,10 @@ import java.util.TreeSet; @SuppressWarnings("JavadocLinkAsPlainText") public class JpaStorageSettings extends StorageSettings { + /** + * Default value for {@link #getBulkExportFileMaximumSize()}: 100 MB + */ + public static final long DEFAULT_BULK_EXPORT_MAXIMUM_WORK_CHUNK_SIZE = 100 * FileUtils.ONE_MB; /** * Default value for {@link #setReuseCachedSearchResultsForMillis(Long)}: 60000ms (one minute) */ @@ -313,6 +317,10 @@ public class JpaStorageSettings extends StorageSettings { * Since 6.2.0 */ private int myBulkExportFileMaximumCapacity = DEFAULT_BULK_EXPORT_FILE_MAXIMUM_CAPACITY; + /** + * Since 7.2.0 + */ + private long myBulkExportFileMaximumSize = DEFAULT_BULK_EXPORT_MAXIMUM_WORK_CHUNK_SIZE; /** * Since 6.4.0 */ @@ -2301,11 +2309,42 @@ public class JpaStorageSettings extends StorageSettings { * Default is 1000 resources per file. * * @since 6.2.0 + * @see #setBulkExportFileMaximumCapacity(int) */ public void setBulkExportFileMaximumCapacity(int theBulkExportFileMaximumCapacity) { myBulkExportFileMaximumCapacity = theBulkExportFileMaximumCapacity; } + /** + * Defines the maximum size for a single work chunk or report file to be held in + * memory or stored in the database for bulk export jobs. + * Note that the framework will attempt to not exceed this limit, but will only + * estimate the actual chunk size as it works, so this value should be set + * below any hard limits that may be present. + * + * @since 7.2.0 + * @see #DEFAULT_BULK_EXPORT_MAXIMUM_WORK_CHUNK_SIZE The default value for this setting + */ + public long getBulkExportFileMaximumSize() { + return myBulkExportFileMaximumSize; + } + + /** + * Defines the maximum size for a single work chunk or report file to be held in + * memory or stored in the database for bulk export jobs. Default is 100 MB. + * Note that the framework will attempt to not exceed this limit, but will only + * estimate the actual chunk size as it works, so this value should be set + * below any hard limits that may be present. + * + * @since 7.2.0 + * @see #setBulkExportFileMaximumCapacity(int) + * @see #DEFAULT_BULK_EXPORT_MAXIMUM_WORK_CHUNK_SIZE The default value for this setting + */ + public void setBulkExportFileMaximumSize(long theBulkExportFileMaximumSize) { + Validate.isTrue(theBulkExportFileMaximumSize > 0, "theBulkExportFileMaximumSize must be positive"); + myBulkExportFileMaximumSize = theBulkExportFileMaximumSize; + } + /** * If this setting is enabled, then gated batch jobs that produce only one chunk will immediately trigger a batch * maintenance job. This may be useful for testing, but is not recommended for production use. diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/patch/FhirPatch.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/patch/FhirPatch.java index 270a0c002af..3fd0c7ba915 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/patch/FhirPatch.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/patch/FhirPatch.java @@ -30,10 +30,10 @@ import ca.uhn.fhir.util.IModelVisitor2; import ca.uhn.fhir.util.ParametersUtil; import jakarta.annotation.Nonnull; import jakarta.annotation.Nullable; +import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseEnumeration; -import org.hl7.fhir.instance.model.api.IBaseExtension; import org.hl7.fhir.instance.model.api.IBaseParameters; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; @@ -49,7 +49,6 @@ import java.util.Optional; import java.util.Set; import static org.apache.commons.lang3.StringUtils.defaultString; -import static org.apache.commons.lang3.StringUtils.isNotBlank; public class FhirPatch { @@ -319,12 +318,9 @@ public class FhirPatch { if (valuePartValue.isPresent()) { newValue = valuePartValue.get(); } else { - newValue = theChildDefinition.getChildElement().newInstance(); + List partParts = valuePart.map(this::extractPartsFromPart).orElse(Collections.emptyList()); - if (valuePart.isPresent()) { - IBase theValueElement = valuePart.get(); - populateNewValue(theChildDefinition, newValue, theValueElement); - } + newValue = createAndPopulateNewElement(theChildDefinition, partParts); } if (IBaseEnumeration.class.isAssignableFrom( @@ -350,31 +346,65 @@ public class FhirPatch { return newValue; } - private void populateNewValue(ChildDefinition theChildDefinition, IBase theNewValue, IBase theValueElement) { - List valuePartParts = myContext.newTerser().getValues(theValueElement, "part"); - for (IBase nextValuePartPart : valuePartParts) { + @Nonnull + private List extractPartsFromPart(IBase theParametersParameterComponent) { + return myContext.newTerser().getValues(theParametersParameterComponent, "part"); + } + + /** + * this method will instantiate an element according to the provided Definition and it according to + * the properties found in thePartParts. a part usually represent a datatype as a name/value[X] pair. + * it may also represent a complex type like an Extension. + * + * @param theDefinition wrapper around the runtime definition of the element to be populated + * @param thePartParts list of Part to populate the element that will be created from theDefinition + * @return an element that was created from theDefinition and populated with the parts + */ + private IBase createAndPopulateNewElement(ChildDefinition theDefinition, List thePartParts) { + IBase newElement = theDefinition.getChildElement().newInstance(); + + for (IBase nextValuePartPart : thePartParts) { String name = myContext .newTerser() .getSingleValue(nextValuePartPart, PARAMETER_NAME, IPrimitiveType.class) .map(IPrimitiveType::getValueAsString) .orElse(null); - if (isNotBlank(name)) { - Optional value = - myContext.newTerser().getSingleValue(nextValuePartPart, "value[x]", IBase.class); - if (value.isPresent()) { + if (StringUtils.isBlank(name)) { + continue; + } - BaseRuntimeChildDefinition partChildDef = - theChildDefinition.getChildElement().getChildByName(name); - if (partChildDef == null) { - name = name + "[x]"; - partChildDef = theChildDefinition.getChildElement().getChildByName(name); - } - partChildDef.getMutator().addValue(theNewValue, value.get()); + Optional value = myContext.newTerser().getSingleValue(nextValuePartPart, "value[x]", IBase.class); + + if (value.isPresent()) { + // we have a dataType. let's extract its value and assign it. + BaseRuntimeChildDefinition partChildDef = + theDefinition.getChildElement().getChildByName(name); + if (partChildDef == null) { + name = name + "[x]"; + partChildDef = theDefinition.getChildElement().getChildByName(name); } + partChildDef.getMutator().addValue(newElement, value.get()); + + // a part represent a datatype or a complexType but not both at the same time. + continue; + } + + List part = extractPartsFromPart(nextValuePartPart); + + if (!part.isEmpty()) { + // we have a complexType. let's find its definition and recursively process + // them till all complexTypes are processed. + ChildDefinition childDefinition = findChildDefinition(newElement, name); + + IBase childNewValue = createAndPopulateNewElement(childDefinition, part); + + childDefinition.getChildDef().getMutator().setValue(newElement, childNewValue); } } + + return newElement; } private void deleteSingleElement(IBase theElementToDelete) { @@ -390,17 +420,6 @@ public class FhirPatch { } return true; } - - @Override - public boolean acceptUndeclaredExtension( - IBaseExtension theNextExt, - List theContainingElementPath, - List theChildDefinitionPath, - List> theElementDefinitionPath) { - theNextExt.setUrl(null); - theNextExt.setValue(null); - return true; - } }); } @@ -565,7 +584,7 @@ public class FhirPatch { * If the value is a Resource or a datatype, we can put it into the part.value and that will cover * all of its children. If it's an infrastructure element though, such as Patient.contact we can't * just put it into part.value because it isn't an actual type. So we have to put all of its - * childen in instead. + * children in instead. */ if (valueDef.isStandardType()) { ParametersUtil.addPart(myContext, operation, PARAMETER_VALUE, value); diff --git a/hapi-fhir-structures-dstu2.1/pom.xml b/hapi-fhir-structures-dstu2.1/pom.xml index ec487f310cb..541a22ad700 100644 --- a/hapi-fhir-structures-dstu2.1/pom.xml +++ b/hapi-fhir-structures-dstu2.1/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu2/pom.xml b/hapi-fhir-structures-dstu2/pom.xml index 32b46056807..0dab5394bcd 100644 --- a/hapi-fhir-structures-dstu2/pom.xml +++ b/hapi-fhir-structures-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu3/pom.xml b/hapi-fhir-structures-dstu3/pom.xml index eb66657106a..fc3ad9f5c7f 100644 --- a/hapi-fhir-structures-dstu3/pom.xml +++ b/hapi-fhir-structures-dstu3/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-hl7org-dstu2/pom.xml b/hapi-fhir-structures-hl7org-dstu2/pom.xml index 9b32e4e252f..04e09044793 100644 --- a/hapi-fhir-structures-hl7org-dstu2/pom.xml +++ b/hapi-fhir-structures-hl7org-dstu2/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4/pom.xml b/hapi-fhir-structures-r4/pom.xml index 37f6865be6b..b4150ec9c38 100644 --- a/hapi-fhir-structures-r4/pom.xml +++ b/hapi-fhir-structures-r4/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/param/TokenParamTest.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/param/TokenParamTest.java index 64fdf393cca..1dead512c7f 100644 --- a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/param/TokenParamTest.java +++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/param/TokenParamTest.java @@ -2,14 +2,13 @@ package ca.uhn.fhir.rest.param; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.rest.api.Constants; -import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.junit.jupiter.api.Assertions.fail; public class TokenParamTest { private static final FhirContext ourCtx = FhirContext.forR4Cached(); @@ -51,19 +50,16 @@ public class TokenParamTest { } @Test - public void testNameNickname() { - StringParam param = new StringParam(); - assertFalse(param.isNicknameExpand()); - param.setValueAsQueryToken(ourCtx, "name", Constants.PARAMQUALIFIER_NICKNAME, "kenny"); - assertTrue(param.isNicknameExpand()); + public void testMdmQualifier() { + final String value = "Patient/PJANE1"; + + TokenParam param = new TokenParam(); + param.setValueAsQueryToken(ourCtx, "_id", Constants.PARAMQUALIFIER_MDM, value); + assertNull(param.getModifier()); + assertNull(param.getSystem()); + assertTrue(param.isMdmExpand()); + assertEquals(value, param.getValue()); } - @Test - public void testGivenNickname() { - StringParam param = new StringParam(); - assertFalse(param.isNicknameExpand()); - param.setValueAsQueryToken(ourCtx, "given", Constants.PARAMQUALIFIER_NICKNAME, "kenny"); - assertTrue(param.isNicknameExpand()); - } } diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/NullMethodOutcomeResourceProviderTest.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/NullMethodOutcomeResourceProviderTest.java new file mode 100644 index 00000000000..3a784d82d00 --- /dev/null +++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/NullMethodOutcomeResourceProviderTest.java @@ -0,0 +1,125 @@ +package ca.uhn.fhir.rest.server; + +import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.rest.annotation.Create; +import ca.uhn.fhir.rest.annotation.Delete; +import ca.uhn.fhir.rest.annotation.IdParam; +import ca.uhn.fhir.rest.annotation.Patch; +import ca.uhn.fhir.rest.annotation.ResourceParam; +import ca.uhn.fhir.rest.annotation.Update; +import ca.uhn.fhir.rest.annotation.Validate; +import ca.uhn.fhir.rest.api.MethodOutcome; +import ca.uhn.fhir.rest.api.PatchTypeEnum; +import ca.uhn.fhir.rest.client.api.IGenericClient; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; +import ca.uhn.fhir.test.utilities.server.RestfulServerExtension; +import org.apache.http.HttpStatus; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.r4.model.IdType; +import org.hl7.fhir.r4.model.Parameters; +import org.hl7.fhir.r4.model.Patient; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; + +public class NullMethodOutcomeResourceProviderTest { + + public static final String TEST_PATIENT_ID = "Patient/123"; + + @RegisterExtension + private RestfulServerExtension myServer = new RestfulServerExtension(FhirVersionEnum.R4) + .registerProvider(new NullMethodOutcomePatientProvider()); + + private IGenericClient myClient; + private Patient myPatient; + + @BeforeEach + public void before() { + myPatient = new Patient(); + myClient = myServer.getFhirClient(); + } + + @Test + public void testCreate_withNullMethodOutcome_throwsException() { + try { + myClient.create().resource(myPatient).execute(); + fail(); + } catch (InternalErrorException e){ + assertTrue(e.getMessage().contains("HTTP 500 Server Error: HAPI-0368")); + } + } + + @Test + public void testUpdate_withNullMethodOutcome_returnsHttp200() { + myPatient.setId(TEST_PATIENT_ID); + MethodOutcome outcome = myClient.update().resource(myPatient).execute(); + assertEquals(HttpStatus.SC_OK, outcome.getResponseStatusCode()); + } + + @Test + public void testPatch_withNullMethodOutcome_returnsHttp200() { + MethodOutcome outcome = myClient.patch().withFhirPatch(new Parameters()).withId(TEST_PATIENT_ID).execute(); + assertEquals(HttpStatus.SC_OK, outcome.getResponseStatusCode()); + } + + @Test + public void testValidate_withNullMethodOutcome_throwsException() { + try { + myClient.validate().resource(myPatient).execute(); + fail(); + } catch (ResourceNotFoundException e){ + // This fails with HAPI-0436 because the MethodOutcome of the @Validate method is used + // to build an IBundleProvider with a OperationOutcome resource (which will be null from the provider below). + // See OperationMethodBinding#invokeServer() + assertTrue(e.getMessage().contains("HTTP 404 Not Found: HAPI-0436")); + } + } + + @Test + public void testDelete_withNullMethodOutcome_throwsException() { + try { + myPatient.setId(TEST_PATIENT_ID); + myClient.delete().resource(myPatient).execute(); + fail(); + } catch (InternalErrorException e){ + assertTrue(e.getMessage().contains("HTTP 500 Server Error: HAPI-0368")); + } + } + + public static class NullMethodOutcomePatientProvider implements IResourceProvider { + + @Create + public MethodOutcome create(@ResourceParam Patient thePatient) { + return null; + } + + @Update + public MethodOutcome update(@IdParam IdType theId, @ResourceParam Patient thePatient) { + return null; + } + + @Patch + public MethodOutcome patch(@IdParam IdType theId, @ResourceParam String theBody, PatchTypeEnum thePatchType){ + return null; + } + + @Delete + public MethodOutcome delete(@IdParam IdType theId) { + return null; + } + + @Validate + public MethodOutcome validate(@ResourceParam Patient thePatient) { + return null; + } + + public Class getResourceType() { + return Patient.class; + } + } +} diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/ServerConcurrencyTest.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/ServerConcurrencyTest.java index 950f625b55a..6c1b9d50600 100644 --- a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/ServerConcurrencyTest.java +++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/ServerConcurrencyTest.java @@ -9,6 +9,11 @@ import ca.uhn.fhir.rest.api.RequestTypeEnum; import ca.uhn.fhir.test.utilities.HttpClientExtension; import ca.uhn.fhir.test.utilities.server.RestfulServerExtension; import com.helger.commons.collection.iterate.EmptyEnumeration; +import jakarta.annotation.Nonnull; +import jakarta.servlet.ReadListener; +import jakarta.servlet.ServletInputStream; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; import org.apache.commons.collections4.iterators.IteratorEnumeration; import org.apache.commons.lang3.RandomStringUtils; import org.hl7.fhir.r4.model.IdType; @@ -23,11 +28,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.util.Assert; -import jakarta.annotation.Nonnull; -import jakarta.servlet.ReadListener; -import jakarta.servlet.ServletInputStream; -import jakarta.servlet.http.HttpServletRequest; -import jakarta.servlet.http.HttpServletResponse; import java.io.ByteArrayInputStream; import java.io.EOFException; import java.io.IOException; @@ -36,7 +36,9 @@ import java.io.PrintWriter; import java.nio.charset.StandardCharsets; import java.util.Collections; import java.util.HashMap; +import java.util.Set; +import static org.apache.commons.collections.CollectionUtils.isEmpty; import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doThrow; @@ -109,6 +111,13 @@ public class ServerConcurrencyTest { } return new EmptyEnumeration<>(); }); + when(myRequest.getHeaderNames()).thenAnswer(t -> { + Set headerNames = myHeaders.keySet(); + if (!isEmpty(headerNames)){ + return new IteratorEnumeration<>(headerNames.iterator()); + } + return new EmptyEnumeration<>(); + }); } /** diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/util/bundle/BundleUtilTest.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/util/bundle/BundleUtilTest.java index 20059362e67..ccbb3c60080 100644 --- a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/util/bundle/BundleUtilTest.java +++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/util/bundle/BundleUtilTest.java @@ -41,6 +41,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; import static org.hl7.fhir.r4.model.Bundle.HTTPVerb.DELETE; import static org.hl7.fhir.r4.model.Bundle.HTTPVerb.GET; import static org.hl7.fhir.r4.model.Bundle.HTTPVerb.POST; @@ -464,6 +465,96 @@ public class BundleUtilTest { assertThat(searchBundleEntryParts.get(1).getFullUrl(), is(containsString("Condition/"))); assertThat(searchBundleEntryParts.get(1).getResource(), is(notNullValue())); } + @Test + public void testConvertingToSearchBundleEntryPartsRespectsMissingMode() { + + //Given + String bundleString = """ + { + "resourceType": "Bundle", + "id": "bd194b7f-ac1e-429a-a206-ee2c470f23b5", + "type": "searchset", + "total": 1, + "link": [ + { + "relation": "self", + "url": "http://localhost:8000/Condition?_count=1" + } + ], + "entry": [ + { + "fullUrl": "http://localhost:8000/Condition/1626", + "resource": { + "resourceType": "Condition", + "id": "1626", + "identifier": [ + { + "system": "urn:hssc:musc:conditionid", + "value": "1064115000.1.5" + } + ] + } + } + ] + }"""; + Bundle bundle = ourCtx.newJsonParser().parseResource(Bundle.class, bundleString); + + //When + List searchBundleEntryParts = BundleUtil.getSearchBundleEntryParts(ourCtx, bundle); + + //Then + assertThat(searchBundleEntryParts, hasSize(1)); + assertThat(searchBundleEntryParts.get(0).getSearchMode(), is(nullValue())); + assertThat(searchBundleEntryParts.get(0).getFullUrl(), is(containsString("Condition/1626"))); + assertThat(searchBundleEntryParts.get(0).getResource(), is(notNullValue())); + } + + @Test + public void testConvertingToSearchBundleEntryPartsRespectsOutcomeMode() { + + //Given + String bundleString = """ + { + "resourceType": "Bundle", + "id": "bd194b7f-ac1e-429a-a206-ee2c470f23b5", + "type": "searchset", + "total": 1, + "link": [ + { + "relation": "self", + "url": "http://localhost:8000/Condition?_count=1" + } + ], + "entry": [ + { + "fullUrl": "http://localhost:8000/Condition/1626", + "resource": { + "resourceType": "Condition", + "id": "1626", + "identifier": [ + { + "system": "urn:hssc:musc:conditionid", + "value": "1064115000.1.5" + } + ] + }, + "search": { + "mode": "outcome" + } + } + ] + }"""; + Bundle bundle = ourCtx.newJsonParser().parseResource(Bundle.class, bundleString); + + //When + List searchBundleEntryParts = BundleUtil.getSearchBundleEntryParts(ourCtx, bundle); + + //Then + assertThat(searchBundleEntryParts, hasSize(1)); + assertThat(searchBundleEntryParts.get(0).getSearchMode(), is(equalTo(BundleEntrySearchModeEnum.OUTCOME))); + assertThat(searchBundleEntryParts.get(0).getFullUrl(), is(containsString("Condition/1626"))); + assertThat(searchBundleEntryParts.get(0).getResource(), is(notNullValue())); + } @Test public void testTransactionSorterReturnsDeletesInCorrectProcessingOrder() { diff --git a/hapi-fhir-structures-r4b/pom.xml b/hapi-fhir-structures-r4b/pom.xml index fb0fdd98b16..d852ad84473 100644 --- a/hapi-fhir-structures-r4b/pom.xml +++ b/hapi-fhir-structures-r4b/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r5/pom.xml b/hapi-fhir-structures-r5/pom.xml index 6a59995b63f..1c88c4f4869 100644 --- a/hapi-fhir-structures-r5/pom.xml +++ b/hapi-fhir-structures-r5/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-test-utilities/pom.xml b/hapi-fhir-test-utilities/pom.xml index a167088c490..ff040fa535c 100644 --- a/hapi-fhir-test-utilities/pom.xml +++ b/hapi-fhir-test-utilities/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-testpage-overlay/pom.xml b/hapi-fhir-testpage-overlay/pom.xml index 003ebc562d8..0081f27ff59 100644 --- a/hapi-fhir-testpage-overlay/pom.xml +++ b/hapi-fhir-testpage-overlay/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-validation-resources-dstu2.1/pom.xml b/hapi-fhir-validation-resources-dstu2.1/pom.xml index a590bc42c93..be3911ce0ac 100644 --- a/hapi-fhir-validation-resources-dstu2.1/pom.xml +++ b/hapi-fhir-validation-resources-dstu2.1/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu2/pom.xml b/hapi-fhir-validation-resources-dstu2/pom.xml index d01beb852a7..ab34e8d8bcc 100644 --- a/hapi-fhir-validation-resources-dstu2/pom.xml +++ b/hapi-fhir-validation-resources-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu3/pom.xml b/hapi-fhir-validation-resources-dstu3/pom.xml index 37fe068e49c..522ac2b450c 100644 --- a/hapi-fhir-validation-resources-dstu3/pom.xml +++ b/hapi-fhir-validation-resources-dstu3/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r4/pom.xml b/hapi-fhir-validation-resources-r4/pom.xml index 9f14436198b..bb7ed613bb1 100644 --- a/hapi-fhir-validation-resources-r4/pom.xml +++ b/hapi-fhir-validation-resources-r4/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r4b/pom.xml b/hapi-fhir-validation-resources-r4b/pom.xml index 1547065821b..fb7965abff1 100644 --- a/hapi-fhir-validation-resources-r4b/pom.xml +++ b/hapi-fhir-validation-resources-r4b/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r5/pom.xml b/hapi-fhir-validation-resources-r5/pom.xml index 8d3945ee89f..11e616c37cd 100644 --- a/hapi-fhir-validation-resources-r5/pom.xml +++ b/hapi-fhir-validation-resources-r5/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation/pom.xml b/hapi-fhir-validation/pom.xml index bef9c61146d..bd6a099c68c 100644 --- a/hapi-fhir-validation/pom.xml +++ b/hapi-fhir-validation/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation/src/test/java/ca/uhn/fhir/rest/server/interceptor/auth/AuthorizationInterceptorR4Test.java b/hapi-fhir-validation/src/test/java/ca/uhn/fhir/rest/server/interceptor/auth/AuthorizationInterceptorR4Test.java index d85cf5e4ee9..372b8ead4a8 100644 --- a/hapi-fhir-validation/src/test/java/ca/uhn/fhir/rest/server/interceptor/auth/AuthorizationInterceptorR4Test.java +++ b/hapi-fhir-validation/src/test/java/ca/uhn/fhir/rest/server/interceptor/auth/AuthorizationInterceptorR4Test.java @@ -2589,6 +2589,9 @@ public class AuthorizationInterceptorR4Test extends BaseValidationTestWithInline assertEquals(false, ourHitMethod); } + // This test is of dubious value since it does NOT exercise DAO code. It simply exercises the AuthorizationInterceptor. + // In functional testing or with a more realistic integration test, this scenario, namely having ONLY a FHIR_PATCH + // role, will result in a failure to update the resource. @Test public void testPatchAllowed() throws IOException { Observation obs = new Observation(); diff --git a/hapi-tinder-plugin/pom.xml b/hapi-tinder-plugin/pom.xml index 4bfad09b5f6..4fb7cf0d88e 100644 --- a/hapi-tinder-plugin/pom.xml +++ b/hapi-tinder-plugin/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../pom.xml diff --git a/hapi-tinder-test/pom.xml b/hapi-tinder-test/pom.xml index 483bd1ca8d7..5f84df6f169 100644 --- a/hapi-tinder-test/pom.xml +++ b/hapi-tinder-test/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../pom.xml diff --git a/pom.xml b/pom.xml index 7a01fefadce..a3004385ff1 100644 --- a/pom.xml +++ b/pom.xml @@ -9,7 +9,7 @@ ca.uhn.hapi.fhir hapi-fhir pom - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT HAPI-FHIR An open-source implementation of the FHIR specification in Java. @@ -989,7 +989,7 @@ 2.2.19 2.0.9 2.19.0 - 6.1.1 + 6.1.5 2023.1.0 4.3.3 3.2.0 @@ -1006,7 +1006,7 @@ 1.0.8 - 3.0.0-PRE17 + 3.2.0 5.4.1 @@ -1990,7 +1990,7 @@ org.postgresql postgresql - 42.7.1 + 42.7.3 com.oracle.database.jdbc diff --git a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml index 3b0c9d0200d..5f119fcd90b 100644 --- a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml +++ b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-client/pom.xml b/tests/hapi-fhir-base-test-mindeps-client/pom.xml index 245d4adf698..4250751cced 100644 --- a/tests/hapi-fhir-base-test-mindeps-client/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-server/pom.xml b/tests/hapi-fhir-base-test-mindeps-server/pom.xml index 8842b7c244f..0fd0f7c3774 100644 --- a/tests/hapi-fhir-base-test-mindeps-server/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.1.6-SNAPSHOT + 7.1.7-SNAPSHOT ../../pom.xml