Merge branch 'master' into 5745-ready-state-batch2
This commit is contained in:
commit
74319a7b6d
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -29,6 +29,7 @@ import java.util.Map;
|
|||
public enum BundleEntrySearchModeEnum {
|
||||
MATCH("match", "http://hl7.org/fhir/search-entry-mode"),
|
||||
INCLUDE("include", "http://hl7.org/fhir/search-entry-mode"),
|
||||
OUTCOME("outcome", "http://hl7.org/fhir/search-entry-mode"),
|
||||
;
|
||||
|
||||
/**
|
||||
|
@ -79,7 +80,7 @@ public enum BundleEntrySearchModeEnum {
|
|||
/**
|
||||
* Returns the enumerated value associated with this code
|
||||
*/
|
||||
public BundleEntrySearchModeEnum forCode(String theCode) {
|
||||
public static BundleEntrySearchModeEnum forCode(String theCode) {
|
||||
BundleEntrySearchModeEnum retVal = CODE_TO_ENUM.get(theCode);
|
||||
return retVal;
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@ import ca.uhn.fhir.i18n.Msg;
|
|||
import ca.uhn.fhir.model.base.composite.BaseCodingDt;
|
||||
import ca.uhn.fhir.model.base.composite.BaseIdentifierDt;
|
||||
import ca.uhn.fhir.model.primitive.UriDt;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
|
@ -147,18 +148,22 @@ public class TokenParam extends BaseParam /*implements IQueryParameterType*/ {
|
|||
@Override
|
||||
void doSetValueAsQueryToken(FhirContext theContext, String theParamName, String theQualifier, String theParameter) {
|
||||
setModifier(null);
|
||||
setSystem(null);
|
||||
|
||||
if (theQualifier != null) {
|
||||
if (Constants.PARAMQUALIFIER_MDM.equals(theQualifier)) {
|
||||
setMdmExpand(true);
|
||||
}
|
||||
|
||||
TokenParamModifier modifier = TokenParamModifier.forValue(theQualifier);
|
||||
setModifier(modifier);
|
||||
|
||||
if (modifier == TokenParamModifier.TEXT) {
|
||||
setSystem(null);
|
||||
setValue(ParameterUtil.unescape(theParameter));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
setSystem(null);
|
||||
if (theParameter == null) {
|
||||
setValue(null);
|
||||
} else {
|
||||
|
|
|
@ -30,11 +30,7 @@ public class SearchBundleEntryParts {
|
|||
public SearchBundleEntryParts(String theFullUrl, IBaseResource theResource, String theSearchMode) {
|
||||
myFullUrl = theFullUrl;
|
||||
myResource = theResource;
|
||||
if (BundleEntrySearchModeEnum.INCLUDE.getCode().equalsIgnoreCase(theSearchMode)) {
|
||||
mySearchMode = BundleEntrySearchModeEnum.INCLUDE;
|
||||
} else {
|
||||
mySearchMode = BundleEntrySearchModeEnum.MATCH;
|
||||
}
|
||||
mySearchMode = BundleEntrySearchModeEnum.forCode(theSearchMode);
|
||||
}
|
||||
|
||||
public String getFullUrl() {
|
||||
|
|
|
@ -2,6 +2,7 @@ package ca.uhn.fhir.rest.param;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ch.qos.logback.classic.Level;
|
||||
import ch.qos.logback.classic.Logger;
|
||||
import ch.qos.logback.classic.spi.ILoggingEvent;
|
||||
|
@ -132,6 +133,23 @@ public class StringParamTest {
|
|||
assertNicknameWarningLogged(false);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNameNickname() {
|
||||
StringParam param = new StringParam();
|
||||
assertFalse(param.isNicknameExpand());
|
||||
param.setValueAsQueryToken(myContext, "name", Constants.PARAMQUALIFIER_NICKNAME, "kenny");
|
||||
assertTrue(param.isNicknameExpand());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGivenNickname() {
|
||||
StringParam param = new StringParam();
|
||||
assertFalse(param.isNicknameExpand());
|
||||
param.setValueAsQueryToken(myContext, "given", Constants.PARAMQUALIFIER_NICKNAME, "kenny");
|
||||
assertTrue(param.isNicknameExpand());
|
||||
}
|
||||
|
||||
|
||||
private void assertNicknameQualifierSearchParameterIsValid(StringParam theStringParam, String theExpectedValue){
|
||||
assertTrue(theStringParam.isNicknameExpand());
|
||||
assertFalse(theStringParam.isExact());
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-bom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<packaging>pom</packaging>
|
||||
<name>HAPI FHIR BOM</name>
|
||||
|
@ -12,7 +12,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5088
|
||||
title: "Previously, the fullUrl for resources in _history bundles was not generated correctly when using a client
|
||||
provided id. The same problem started to happen for the resources with server generated ids more recently
|
||||
(after 6.9.10). This has now been fixed"
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5771
|
||||
jira: SMILE-7837
|
||||
title: "Previously, a Patch operation would fail when adding a complex extension, i.e. an extension
|
||||
comprised of another extension. This issue has been fixed."
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
type: perf
|
||||
issue: 5748
|
||||
title: "In the JPA server, several database columns related to Batch2 jobs and searching
|
||||
have been reworked so that they no will longer use LOB datatypes going forward. This
|
||||
is a significant advantage on Postgresql databases as it removes a significant use
|
||||
of the inefficient `pg_largeobject` table, and should yield performance boosts for
|
||||
MSSQL as well."
|
|
@ -0,0 +1,12 @@
|
|||
---
|
||||
type: add
|
||||
issue: 5750
|
||||
title: "Update to the 3.2.0 release of the Clinical Reasoning Module. This includes the following changes:
|
||||
<ul>
|
||||
<li>Updated the Clinical Reasoning documentation.</li>
|
||||
<li>Added support for additional parameters on operations.</li>
|
||||
<li>Added StructureDefinition/$questionnaire operation.</li>
|
||||
<li>Add ability to generate PlanDefinition/$apply results with unique ids.</li>
|
||||
<li>Resolved issues with Questionnaire item generation during PlanDefinition/$apply.</li>
|
||||
<li>Resolved issues with some request resources not generated correctly from ActivityDefinition/$apply</li>
|
||||
</ul>"
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5773
|
||||
title: "Subscriptions with null content caused NullPointerExceptions. This condition is now checked and handled."
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: add
|
||||
issue: 5777
|
||||
title: "Change the implementation of CDS on FHIR to use the Auto Prefetch functionality and to no longer pass the fhirServer from the request into the dataEndpoint parameter of $apply."
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5780
|
||||
title: "SearchBundleEntryParts now correctly respects `OUTCOME` and `null` search modes in a bundle entry. In the public space, this means `BundleUtil#getSearchBundleEntryParts()` no longer incorrectly infers information about the entry mode "
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: add
|
||||
issue: 5784
|
||||
title: "Add support to _sort for chained `composition` Bundle SearchParameters"
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5788
|
||||
title: "Previously, the casing of the X-Request-ID header key was not retained in the corresponding response.
|
||||
This has been fixed."
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: add
|
||||
issue: 5800
|
||||
title: "A new setting in JpaStorageSettings enforces a maximum file size for Bulk Export
|
||||
output files, as well as work chunks creating during processing. This setting has
|
||||
a default value of 100 MB."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5802
|
||||
title: "Previously, using the ':mdm' qualifier with the '_id' search parameter would not included expanded resources in
|
||||
search result. This issue has been fixed."
|
|
@ -0,0 +1,143 @@
|
|||
# CDS Hooks
|
||||
|
||||
CDS Hooks are services called by CDS Clients (typically Electronic Health Record Systems (EHRs) or other health
|
||||
information systems). They implement a "hook"-based pattern for invoking decision support from within a clinician's
|
||||
workflow.
|
||||
|
||||
HAPI FHIR implements [Version 1.1 of the CDS Hooks Specification](https://cds-hooks.hl7.org/ballots/2020Sep/).
|
||||
|
||||
The HAPI FHIR CDS Hooks Module simplifies the effort for creating CDS Hooks. All you need to do is create a method that
|
||||
accepts a `CdsServiceRequestJson` parameter and returns a `CdsServiceResponseJson` value and annotate this method with
|
||||
the `@CdsService` annotation. This annotation and the Json classes and all their subcomponents are available in the
|
||||
open-source project called `hapi-fhir-server-cds-hooks`. Any FHIR resources in requests and responses are automatically serialized
|
||||
into hapi-fhir FHIR resource instances for you, so they are easy to work with within your code.
|
||||
|
||||
In addition to simplifying the effort to build CDS Hooks, the HAPI FHIR CDS Hooks module also provides the following:
|
||||
|
||||
* All access is logged in the HAPI FHIR Audit Trail.
|
||||
* Authorization is controlled by the HAPI FHIR security framework.
|
||||
* Management and monitoring capabilities are provided by the HAPI FHIR platform.
|
||||
* [CDS on FHIR](/docs/cds_hooks/#cds-on-fhir) implementation that auto-generates CDS Services from PlanDefinitions and executes via the $apply operation.
|
||||
|
||||
# Auto Prefetch
|
||||
|
||||
The HAPI FHIR CDS Hooks module provides a couple of powerful Auto-Prefetch features:
|
||||
|
||||
1. If `allowAutoFhirClientPrefetch` is set to `true` in the `@CdsService` annotation on your CDS Service method, then
|
||||
before calling your method, HAPI FHIR will compare the prefetch elements declared by your service method in
|
||||
the `@CdsService` annotation to the prefetch elements included within the `CdsServiceRequestJson` REST request and if
|
||||
it detects any are missing, then HAPI FHIR will use the FHIR endpoint authorization details included within
|
||||
the `fhirAuthorization` element in the request to automatically add them to the prefetch before calling your method.
|
||||
2. Even simpler, if your HAPI FHIR server has a FHIR Storage module, you can optionally add a dependency from your
|
||||
CDS Hooks Module on your FHIR Storage module. If you do this, then when HAPI FHIR detects any required prefetch
|
||||
elements missing in a request, it will automatically fetch the missing data from your storage module before calling
|
||||
your CDS Hooks method. Note in this case, the same credentials used to call the CDS Hooks endpoint are used to
|
||||
authorize access to the FHIR Storage module.
|
||||
|
||||
## CDS Hooks Auto Prefetch Rules
|
||||
|
||||
- If there are no missing prefetch elements, the CDS Hooks service method is called directly with the request. (Note
|
||||
that per the CDS Hooks specification, a value of `null` is not considered to be missing. CDS Hooks clients set a
|
||||
prefetch value to `null` to indicate that this prefetch data is known to not exist).
|
||||
- Otherwise, if a `fhirServer` is included in the request
|
||||
- If the `@CdsService` annotation on the service method has `allowAutoFhirClientPrefetch = true`, then HAPI FHIR will
|
||||
perform a FHIR REST call to that `fhirServer` endpoint to fetch the missing data.
|
||||
- otherwise, the CDS Hooks service method is expected to call the `fhirServer` endpoint itself to retrieve the
|
||||
missing data.
|
||||
- Otherwise, if the CDS Hooks Module declares a dependency on a FHIR Storage Module, then HAPI FHIR will fetch the
|
||||
missing data from that FHIR Storage Module.
|
||||
- Otherwise, the method will fail with HTTP 412 PRECONDITION FAILED (per the CDS Hooks specification).
|
||||
- The Auto-Prefetch rules can be overridden for individual elements by setting a `source` for the `@CdsServicePrefetch`.
|
||||
HAPI FHIR will attempt to use the `source` strategy for the query instead of following the order above.
|
||||
|
||||
# Architecture
|
||||
|
||||
The diagram below shows how CDS Hooks work. The box in grey contains *customer code*, which is code that you write.
|
||||
|
||||
<img src="/docs/images/cds_hooks.svg" alt="CDS Hooks Architecture" style="width: 1120px;"/>
|
||||
|
||||
A CDS Hooks implementation is packaged as a Java JAR file that contains several key components:
|
||||
|
||||
* **CDS Service** classes, which implement CDS Hooks *service* and *feedback* methods.
|
||||
* A **Spring Context Config** class, which is a Spring Framework class used to instantiate and configure the CDS Hooks
|
||||
classes.
|
||||
|
||||
# CDS Hooks Classes
|
||||
|
||||
A CDS Hooks class contains annotated *service* and *feedback* methods. One CDS Hooks class can contain any number of
|
||||
these methods. A CDS Hooks *service* method is annotated with the `@CdsService` annotation and a CDS Hooks *feedback*
|
||||
method is annotated with the `@CdsServiceFeedback` annotation. The "value" of these annotations corresponds to the id of
|
||||
the CDS Hooks service. For example:
|
||||
|
||||
A method annotated with `@CdsService(value="example-service")` is accessed at a path
|
||||
like `https://example.com:8888/cds-services/example-service`
|
||||
|
||||
A method annotated with `@CdsServiceFeedback(value="my-service")` is accessed at a path
|
||||
like `https://example.com:8888/cds-services/my-service/feedback`.
|
||||
|
||||
A very basic example is shown below:
|
||||
|
||||
```java
|
||||
{{snippet:file:hapi-fhir-server-cds-hooks/src/test/java/ca.uhn.hapi.fhir.cdshooks/controller/ExampleCdsService.java}}
|
||||
```
|
||||
|
||||
Both of these example methods accept a single json instance parameter (`CdsServiceRequestJson`
|
||||
and `CdsServiceFeedbackJson` respectively). Alternatively, these methods can accept a single String parameter in which
|
||||
case the CDS Hooks module will string-encode the instance before calling the method.
|
||||
|
||||
# The Spring Context Config Class
|
||||
|
||||
This mandatory class is a [Spring Framework](https://springframework.org) Annotation-based Application Context Config
|
||||
class. It is characterized by having the `@Configuration` annotation on the class itself, as well as having one or more
|
||||
non-static factory methods annotated with the `@Bean` method, which create instances of your providers (as well as
|
||||
creating any other utility classes you might need, such as database pools, HTTP clients, etc.).
|
||||
|
||||
This class must instantiate a bean named `cdsServices`:
|
||||
|
||||
* The `cdsServices` bean method should return a `List<Object>` of classes that contain `@CdsService`
|
||||
and/or `@CdsServiceFeedback` annotated methods.
|
||||
|
||||
The following example shows a Spring Context Config class that registers the CDS Hooks example above.
|
||||
|
||||
```java
|
||||
@Configuration
|
||||
public class TestServerAppCtx {
|
||||
|
||||
/**
|
||||
* This bean is a list of CDS Hooks classes, each one
|
||||
* of which implements one or more CDS-Hook Services.
|
||||
*/
|
||||
@Bean(name = "cdsServices")
|
||||
public List<Object> cdsServices(){
|
||||
List<Object> retVal = new ArrayList<>();
|
||||
retVal.add(new ExampleCdsService());
|
||||
// add other CDS Hooks classes...
|
||||
return retVal;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
# Calling CDS Hooks
|
||||
|
||||
Per [Version 1.1 of the CDS Hooks Specification](https://cds-hooks.hl7.org/ballots/2020Sep/), a list of all registered
|
||||
services is available at a path like `https://example.com:8888/cds-services`. As a convenience, swagger REST
|
||||
documentation is provided at the root of the endpoint: `https://example.com:8888/`.
|
||||
|
||||
# Example Project
|
||||
|
||||
A sample CDS Hooks project is available at the following links:
|
||||
|
||||
* [cdr-endpoint-cds-hooks-demoproject-1.0.zip](/docs/downloads/cdr-endpoint-cds-hooks-demoproject-1.0.zip)
|
||||
* [cdr-endpoint-cds-hooks-demoproject-1.0.tar.gz](/docs/downloads/cdr-endpoint-cds-hooks-demoproject-1.0.tar.gz)
|
||||
|
||||
# CDS on FHIR
|
||||
|
||||
To create CDS Services from PlanDefinitions the dependencies for a FHIR Storage Module, FHIR Endpoint and CQL module must be set. This will create a listener on the storage module so that any changes to PlanDefinition resources will update the CDS Service cache.
|
||||
|
||||
Any PlanDefinition resource with an action that has a trigger of type [named-event](http://hl7.org/fhir/R4/codesystem-trigger-type.html#trigger-type-named-event) will have a CDS Service created using the PlanDefinition.id as the service id and the name of the trigger as the hook that the service is created for per the [CDS on FHIR Specification](https://hl7.org/fhir/clinicalreasoning-cds-on-fhir.html#surfacing-clinical-decision-support).
|
||||
|
||||
CDS Services created this way will show up as registered services and can be called just as other services are called. The CDS Service request will be converted into parameters for the [$apply operation](/docs/clinical_reasoning/plan_definitions.html#apply), the results of which are then converted into a CDS Response per the [CDS on FHIR Specification](https://hl7.org/fhir/clinicalreasoning-cds-on-fhir.html#consuming-decision-support).
|
||||
|
||||
These CDS Services will take advantage of the [Auto Prefetch](/docs/cds_hooks/#auto-prefetch) feature. Prefetch data is included as a Bundle in the `data` parameter of the $apply call.
|
||||
|
||||
The $apply operation is running against the FHIR Storage Module, so it will also have access to any data stored there. Any CQL evaluation during the $apply operation that results in a retrieve will always pull from the Bundle and the FHIR Storage Module. This is done regardless of what data is passed into the prefetch of the service request.
|
|
@ -0,0 +1,48 @@
|
|||
# ActivityDefinition
|
||||
|
||||
## Introduction
|
||||
|
||||
The FHIR Clinical Reasoning Module defines the [ActivityDefinition resource](https://www.hl7.org/fhir/activitydefinition.html) and several [associated operations](https://www.hl7.org/fhir/activitydefinition-operations.html). An ActivityDefinition is a shareable, consumable description of some activity to be performed. It may be used to specify actions to be taken as part of a workflow, order set, or protocol, or it may be used independently as part of a catalog of activities such as orderables.
|
||||
|
||||
In general, an activity definition is simply a conceptual description of some specific action that should be taken. An instance of an ActivityDefinition does not indicate that any action has been performed (as an event resource does), nor does it indicate the actual intent to carry out any particular action (as a request resource does). Instead, an activity definition provides a reusable template that can be used to construct specific request resources such as ServiceRequest and MedicationRequest.
|
||||
|
||||
Note that this is conceptually similar to the Task resource as well, with the distinction being that ActivityDefinition represents the description of a task in the abstract, while the Task resource is used to track a specific instance of a task as it moves through the steps of a workflow.
|
||||
|
||||
An ActivityDefinition resource provides a description, or template, of an action to perform. These actions can be purely text-based descriptions of the action to be performed, only interpretable by a human user, or they can be structured definitions with enough information to construct a resource to represent the request or activity directly. This process of converting the ActivityDefinition into a specific resource in a particular context is performed with the [$apply](/docs/clinical_reasoning/activity_definitions.html#apply) operation.
|
||||
|
||||
## Operations
|
||||
|
||||
HAPI implements the following operations for ActivityDefinitions
|
||||
|
||||
* [$apply](/docs/clinical_reasoning/activity_definitions.html#apply)
|
||||
|
||||
## Apply
|
||||
|
||||
The `ActivityDefinition/$apply` [operation](https://www.hl7.org/fhir/activitydefinition-operation-apply.html) creates a [Request Resource](https://www.hl7.org/fhir/workflow.html#request) for a given context. This implementation follows the [FHIR Specification](https://www.hl7.org/fhir/activitydefinition.html#12.22.4.3) and supports the [FHIR Clinical Guidelines IG](http://hl7.org/fhir/uv/cpg/index.html).
|
||||
|
||||
### Parameters
|
||||
|
||||
The following parameters are supported for the `ActivityDefinition/$apply` operation:
|
||||
|
||||
| Parameter | Type | Description |
|
||||
|---------------------|---------------------------|-------------|
|
||||
| activityDefinition | ActivityDefinition | The activity definition to be applied. If the operation is invoked at the instance level, this parameter is not allowed; if the operation is invoked at the type level, this parameter is required, or a url (and optionally version) must be supplied. |
|
||||
| canonical | canonical(ActivityDefinition) | The canonical url of the activity definition to be applied. If the operation is invoked at the instance level, this parameter is not allowed; if the operation is invoked at the type level, this parameter (and optionally the version), or the activityDefinition parameter must be supplied. |
|
||||
| url | uri | Canonical URL of the ActivityDefinition when invoked at the resource type level. This is exclusive with the activityDefinition and canonical parameters. |
|
||||
| version | string | Version of the ActivityDefinition when invoked at the resource type level. This is exclusive with the activityDefinition and canonical parameters. |
|
||||
| subject | string(reference) | The subject(s) that is/are the target of the activity definition to be applied. |
|
||||
| encounter | string(reference) | The encounter in context, if any. |
|
||||
| practitioner | string(reference) | The practitioner applying the activity definition. |
|
||||
| organization | string(reference) | The organization applying the activity definition. |
|
||||
| userType | CodeableConcept | The type of user initiating the request, e.g. patient, healthcare provider, or specific type of healthcare provider (physician, nurse, etc.) |
|
||||
| userLanguage | CodeableConcept | Preferred language of the person using the system |
|
||||
| userTaskContext | CodeableConcept | The task the system user is performing, e.g. laboratory results review, medication list review, etc. This information can be used to tailor decision support outputs, such as recommended information resources. |
|
||||
| setting | CodeableConcept | The current setting of the request (inpatient, outpatient, etc.). |
|
||||
| settingContext | CodeableConcept | Additional detail about the setting of the request, if any |
|
||||
| parameters | Parameters | Any input parameters defined in libraries referenced by the ActivityDefinition. |
|
||||
| useServerData | boolean | Whether to use data from the server performing the evaluation. If this parameter is true (the default), then the operation will use data first from any bundles provided as parameters (through the data and prefetch parameters), second data from the server performing the operation, and third, data from the dataEndpoint parameter (if provided). If this parameter is false, the operation will use data first from the bundles provided in the data or prefetch parameters, and second from the dataEndpoint parameter (if provided). |
|
||||
| data | Bundle | Data to be made available to the ActivityDefinition evaluation. |
|
||||
| dataEndpoint | Endpoint | An endpoint to use to access data referenced by retrieve operations in libraries referenced by the ActivityDefinition. |
|
||||
| contentEndpoint | Endpoint | An endpoint to use to access content (i.e. libraries) referenced by the ActivityDefinition. |
|
||||
| terminologyEndpoint | Endpoint | An endpoint to use to access terminology (i.e. valuesets, codesystems, and membership testing) referenced by the ActivityDefinition. |
|
||||
|
|
@ -18,6 +18,7 @@ There are additional IGs outside the FHIR CR module that define further requirem
|
|||
* [Structured Data Capture IG](https://build.fhir.org/ig/HL7/sdc/)
|
||||
* [Clinical Guidelines IG](https://hl7.org/fhir/uv/cpg/)
|
||||
* [Quality Measures IG](http://hl7.org/fhir/us/cqfmeasures/)
|
||||
* [Canonical Resource Management Infrastructure IG](https://build.fhir.org/ig/HL7/crmi-ig/index.html)
|
||||
|
||||
## HAPI FHIR
|
||||
|
||||
|
|
|
@ -13,6 +13,75 @@ The process of applying a PlanDefinition to a particular context typically produ
|
|||
Each ActivityDefinition is used to construct a specific resource, based on the definition of the activity and combined with contextual information for the particular patient that the plan definition is being applied to.
|
||||
|
||||
|
||||
## Operations
|
||||
|
||||
HAPI implements the following operations for PlanDefinitions:
|
||||
|
||||
* [$apply](/docs/clinical_reasoning/plan_definitions.html#apply)
|
||||
* [$package](/docs/clinical_reasoning/plan_definitions.html#package)
|
||||
|
||||
## Apply
|
||||
|
||||
The `PlanDefinition/$apply` [operation](https://www.hl7.org/fhir/plandefinition-operation-apply.html) applies a PlanDefinition to a given context. This implementation follows the [FHIR Specification](https://www.hl7.org/fhir/plandefinition.html#12.23.4.3) and supports the [FHIR Clinical Guidelines IG](http://hl7.org/fhir/uv/cpg/index.html). In addition, an R5 version of apply is made available for R4 instances. This will cause $apply to return a Bundle of resources instead of a CarePlan. This can be invoked with `$r5.apply`.
|
||||
|
||||
Some example PlanDefinition workflows are available in the [opioid-cds-r4](https://github.com/cqframework/opioid-cds-r4) IG. Full Bundles with all the required supporting resources are available [here](https://github.com/cqframework/opioid-cds-r4/tree/1e543f781138f3d85404b7f65a92ff713519ef2c/bundles). You can download a Bundle and load it on your server as a transaction:
|
||||
|
||||
```bash
|
||||
POST http://your-server-base/fhir opioidcds-10-patient-view-bundle.json
|
||||
```
|
||||
|
||||
These Bundles do not include example Patient clinical data. Applying a PlanDefinition can be invoked with:
|
||||
|
||||
```bash
|
||||
GET http://your-server-base/fhir/PlanDefinition/opioidcds-10-patient-view/$apply?subject=Patient/patientId&encounter=Encounter/encounterId&practitioner=Practitioner/practitionerId
|
||||
```
|
||||
|
||||
### Parameters
|
||||
|
||||
The following parameters are supported for the `PlanDefinition/$apply` and `PlanDefinition/$r5.apply` operation:
|
||||
|
||||
| Parameter | Type | Description |
|
||||
|---------------------|---------------------------|-------------|
|
||||
| planDefinition | PlanDefinition | The plan definition to be applied. If the operation is invoked at the instance level, this parameter is not allowed; if the operation is invoked at the type level, this parameter is required, or a url (and optionally version) must be supplied. |
|
||||
| canonical | canonical(PlanDefinition) | The canonical url of the plan definition to be applied. If the operation is invoked at the instance level, this parameter is not allowed; if the operation is invoked at the type level, this parameter (and optionally the version), or the planDefinition parameter must be supplied. |
|
||||
| url | uri | Canonical URL of the PlanDefinition when invoked at the resource type level. This is exclusive with the planDefinition and canonical parameters. |
|
||||
| version | string | Version of the PlanDefinition when invoked at the resource type level. This is exclusive with the planDefinition and canonical parameters. |
|
||||
| subject | string(reference) | The subject(s) that is/are the target of the plan definition to be applied. |
|
||||
| encounter | string(reference) | The encounter in context, if any. |
|
||||
| practitioner | string(reference) | The practitioner applying the plan definition. |
|
||||
| organization | string(reference) | The organization applying the plan definition. |
|
||||
| userType | CodeableConcept | The type of user initiating the request, e.g. patient, healthcare provider, or specific type of healthcare provider (physician, nurse, etc.) |
|
||||
| userLanguage | CodeableConcept | Preferred language of the person using the system |
|
||||
| userTaskContext | CodeableConcept | The task the system user is performing, e.g. laboratory results review, medication list review, etc. This information can be used to tailor decision support outputs, such as recommended information resources. |
|
||||
| setting | CodeableConcept | The current setting of the request (inpatient, outpatient, etc.). |
|
||||
| settingContext | CodeableConcept | Additional detail about the setting of the request, if any |
|
||||
| parameters | Parameters | Any input parameters defined in libraries referenced by the PlanDefinition. |
|
||||
| useServerData | boolean | Whether to use data from the server performing the evaluation. If this parameter is true (the default), then the operation will use data first from any bundles provided as parameters (through the data and prefetch parameters), second data from the server performing the operation, and third, data from the dataEndpoint parameter (if provided). If this parameter is false, the operation will use data first from the bundles provided in the data or prefetch parameters, and second from the dataEndpoint parameter (if provided). |
|
||||
| data | Bundle | Data to be made available to the PlanDefinition evaluation. |
|
||||
| dataEndpoint | Endpoint | An endpoint to use to access data referenced by retrieve operations in libraries referenced by the PlanDefinition. |
|
||||
| contentEndpoint | Endpoint | An endpoint to use to access content (i.e. libraries) referenced by the PlanDefinition. |
|
||||
| terminologyEndpoint | Endpoint | An endpoint to use to access terminology (i.e. valuesets, codesystems, and membership testing) referenced by the PlanDefinition. |
|
||||
|
||||
|
||||
## Package
|
||||
|
||||
The `PlanDefinition/$package` [operation](https://build.fhir.org/ig/HL7/crmi-ig/OperationDefinition-crmi-package.html) for PlanDefinition will generate a Bundle of resources that includes the PlanDefinition as well as any related resources which can then be shared. This implementation follows the [CRMI IG](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/index.html) guidance for [packaging artifacts](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/packaging.html).
|
||||
|
||||
### Parameters
|
||||
|
||||
The following parameters are supported for the `PlanDefinition/$package` operation:
|
||||
|
||||
| Parameter | Type | Description |
|
||||
|-----------|-----------|-------------|
|
||||
| id | string | The logical id of an existing Resource to package on the server. |
|
||||
| canonical | canonical | A canonical url (optionally version specific) of a Resource to package on the server. |
|
||||
| url | uri | A canonical or artifact reference to a Resource to package on the server. This is exclusive with the canonical parameter. |
|
||||
| version | string | The version of the Resource. This is exclusive with the canonical parameter. |
|
||||
| usePut | boolean | Determines the type of method returned in the Bundle Entries: POST if False (the default), PUT if True. |
|
||||
|
||||
|
||||
## Example PlanDefinition
|
||||
|
||||
```json
|
||||
{
|
||||
"resourceType": "PlanDefinition",
|
||||
|
@ -223,48 +292,3 @@ Each ActivityDefinition is used to construct a specific resource, based on the d
|
|||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Operations
|
||||
|
||||
HAPI implements the [$apply](http://hl7.org/fhir/uv/cpg/OperationDefinition-cpg-plandefinition-apply.html) operation. Support for additional operations is planned.
|
||||
|
||||
## Apply
|
||||
|
||||
The `$apply` operation applies a PlanDefinition to a given context. This implementation follows the [FHIR Specification](https://www.hl7.org/fhir/plandefinition.html#12.23.4.3) and supports the [FHIR Clinical Guidelines IG](http://hl7.org/fhir/uv/cpg/index.html). In addition, an R5 version of apply is made available for R4 instances. This will cause $apply to return a Bundle of resources instead of a CarePlan. This can be invoked with `$r5.apply`.
|
||||
|
||||
### Example PlanDefinition
|
||||
|
||||
Some example PlanDefinition workflows are available in the [opioid-cds-r4](https://github.com/cqframework/opioid-cds-r4) IG. Full Bundles with all the required supporting resources are available [here](https://github.com/cqframework/opioid-cds-r4/tree/1e543f781138f3d85404b7f65a92ff713519ef2c/bundles). You can download a Bundle and load it on your server as a transaction:
|
||||
|
||||
```bash
|
||||
POST http://your-server-base/fhir opioidcds-10-patient-view-bundle.json
|
||||
```
|
||||
|
||||
These Bundles do not include example Patient clinical data. Applying a PlanDefinition can be invoked with:
|
||||
|
||||
```bash
|
||||
GET http://your-server-base/fhir/PlanDefinition/opioidcds-10-patient-view/$apply?subject=Patient/patientId&encounter=Encounter/encounterId&practitioner=Practitioner/practitionerId
|
||||
```
|
||||
|
||||
### Additional Parameters
|
||||
|
||||
The following additional parameters are supported for the `$apply` and `$r5.apply` operation:
|
||||
|
||||
| Parameter | Type | Description |
|
||||
|-----------|------------|-------------|
|
||||
| organization | String | The organization in context |
|
||||
| userType | String | The type of user initiating the request, e.g. patient, healthcare provider, or specific type of healthcare provider (physician, nurse, etc.) |
|
||||
| userLanguage | String | Preferred language of the person using the system |
|
||||
| userTaskContext | String | The task the system user is performing, e.g. laboratory results review, medication list review, etc. This information can be used to tailor decision support outputs, such as recommended information resources |
|
||||
| setting | String | The current setting of the request (inpatient, outpatient, etc.) |
|
||||
| settingContext | String | Additional detail about the setting of the request, if any |
|
||||
| parameters | Parameters | Any input parameters defined in libraries referenced by the PlanDefinition. |
|
||||
| data | Bundle | Data to be made available to the PlanDefinition evaluation. |
|
||||
| dataEndpoint | Endpoint | An endpoint to use to access data referenced by retrieve operations in libraries referenced by the PlanDefinition. |
|
||||
| contentEndpoint | Endpoint | An endpoint to use to access content (i.e. libraries) referenced by the PlanDefinition. |
|
||||
| terminologyEndpoint | Endpoint | An endpoint to use to access terminology (i.e. valuesets, codesystems, and membership testing) referenced by the PlanDefinition. |
|
||||
|
||||
|
||||
## Package
|
||||
|
||||
The `package` operation for [PlanDefinition](https://www.hl7.org/fhir/plandefinition.html) will generate a Bundle of resources that includes the PlanDefinition as well as any related resources which can then be shared. This implementation follows the [CRMI IG](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/index.html) guidance for [packaging artifacts](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/packaging.html).
|
||||
|
|
|
@ -10,17 +10,96 @@ In addition to its use as a means for capturing data, Questionnaires can also be
|
|||
|
||||
## Operations
|
||||
|
||||
HAPI implements the following operations from the [Structured Data Capture IG](https://hl7.org/fhir/uv/sdc/index.html)
|
||||
* [$populate](https://hl7.org/fhir/uv/sdc/OperationDefinition-Questionnaire-populate.html)
|
||||
* [$extract](http://hl7.org/fhir/uv/sdc/OperationDefinition-QuestionnaireResponse-extract.html)
|
||||
HAPI implements the following operations for Questionnaires and QuestionnaireResponses:
|
||||
|
||||
Support for additional operations is planned.
|
||||
* [$questionnaire](/docs/clinical_reasoning/questionnaires.html#questionnaire)
|
||||
* [$populate](/docs/clinical_reasoning/questionnaires.html#populate)
|
||||
* [$extract](/docs/clinical_reasoning/questionnaires.html#extract)
|
||||
* [$package](/docs/clinical_reasoning/questionnaires.html#package)
|
||||
|
||||
|
||||
## Questionnaire
|
||||
|
||||
The `StructureDefinition/$questionnaire` [operation]() generates a [Questionnaire](https://www.hl7.org/fhir/questionnaire.html) from a given [StructureDefinition](https://www.hl7.org/fhir/structuredefinition.html). A question will be created for each core element or extension element found in the StructureDefinition.
|
||||
|
||||
### Parameters
|
||||
|
||||
The following parameters are supported for the `StructureDefinition/$questionnaire` operation:
|
||||
|
||||
| Parameter | Type | Description |
|
||||
|-----------|------|-------------|
|
||||
| profile | StructureDefinition | The StructureDefinition to base the Questionnaire on. Used when the operation is invoked at the 'type' level. |
|
||||
| canonical | canonical | The canonical identifier for the StructureDefinition (optionally version-specific). |
|
||||
| url | uri | Canonical URL of the StructureDefinition when invoked at the resource type level. This is exclusive with the profile and canonical parameters. |
|
||||
| version | string | Version of the StructureDefinition when invoked at the resource type level. This is exclusive with the profile and canonical parameters. |
|
||||
| supportedOnly | boolean | If true (default: false), the questionnaire will only include those elements marked as "mustSupport='true'" in the StructureDefinition. |
|
||||
| requiredOnly | boolean | If true (default: false), the questionnaire will only include those elements marked as "min>0" in the StructureDefinition. |
|
||||
| subject | string | The subject(s) that is/are the target of the Questionnaire. |
|
||||
| parameters | Parameters | Any input parameters defined in libraries referenced by the StructureDefinition. |
|
||||
| useServerData | boolean Whether to use data from the server performing the evaluation. |
|
||||
| data | Bundle | Data to be made available during CQL evaluation. |
|
||||
| dataEndpoint | Endpoint | An endpoint to use to access data referenced by retrieve operations in libraries referenced by the StructureDefinition. |
|
||||
| contentEndpoint | Endpoint | An endpoint to use to access content (i.e. libraries) referenced by the StructureDefinition. |
|
||||
| terminologyEndpoint | Endpoint | An endpoint to use to access terminology (i.e. valuesets, codesystems, and membership testing) referenced by the StructureDefinition. |
|
||||
|
||||
## Populate
|
||||
|
||||
The `populate` operation generates a [QuestionnaireResponse](https://www.hl7.org/fhir/questionnaireresponse.html) based on a specific [Questionnaire](https://www.hl7.org/fhir/questionnaire.html), filling in answers to questions where possible based on information provided as part of the operation or already known by the server about the subject of the Questionnaire.
|
||||
The `Questionnaire/$populate` [operation](https://hl7.org/fhir/uv/sdc/OperationDefinition-Questionnaire-populate.html) generates a [QuestionnaireResponse](https://www.hl7.org/fhir/questionnaireresponse.html) based on a specific [Questionnaire](https://www.hl7.org/fhir/questionnaire.html), filling in answers to questions where possible based on information provided as part of the operation or already known by the server about the subject of the Questionnaire.
|
||||
|
||||
### Example Questionnaire
|
||||
### Parameters
|
||||
|
||||
The following parameters are supported for the `Questionnaire/$populate` operation:
|
||||
|
||||
| Parameter | Type | Description |
|
||||
|-----------|------|-------------|
|
||||
| questionnaire | Questionnaire | The Questionnaire to populate. Used when the operation is invoked at the 'type' level. |
|
||||
| canonical | canonical | The canonical identifier for the Questionnaire (optionally version-specific). |
|
||||
| url | uri | Canonical URL of the Questionnaire when invoked at the resource type level. This is exclusive with the questionnaire and canonical parameters. |
|
||||
| version | string | Version of the Questionnaire when invoked at the resource type level. This is exclusive with the questionnaire and canonical parameters. |
|
||||
| subject | string | The subject(s) that is/are the target of the Questionnaire. |
|
||||
| parameters | Parameters | Any input parameters defined in libraries referenced by the Questionnaire. |
|
||||
| useServerData | boolean | Whether to use data from the server performing the evaluation. |
|
||||
| data | Bundle | Data to be made available during CQL evaluation. |
|
||||
| dataEndpoint | Endpoint | An endpoint to use to access data referenced by retrieve operations in libraries referenced by the Questionnaire. |
|
||||
| contentEndpoint | Endpoint | An endpoint to use to access content (i.e. libraries) referenced by the Questionnaire. |
|
||||
| terminologyEndpoint | Endpoint | An endpoint to use to access terminology (i.e. valuesets, codesystems, and membership testing) referenced by the Questionnaire. |
|
||||
|
||||
|
||||
## Extract
|
||||
|
||||
The `QuestionnaireResponse/$extract` [operation](http://hl7.org/fhir/uv/sdc/OperationDefinition-QuestionnaireResponse-extract.html) takes a completed [QuestionnaireResponse](https://www.hl7.org/fhir/questionnaireresponse.html) and converts it to a Bundle of resources by using metadata embedded in the [Questionnaire](https://www.hl7.org/fhir/questionnaire.html) the QuestionnaireResponse is based on. The extracted resources might include Observations, MedicationStatements and other standard FHIR resources which can then be shared and manipulated. When invoking the $extract operation, care should be taken that the submitted QuestionnaireResponse is itself valid. If not, the extract operation could fail (with appropriate OperationOutcomes) or, more problematic, might succeed but provide incorrect output.
|
||||
|
||||
This implementation allows for both [Observation based](https://hl7.org/fhir/uv/sdc/extraction.html#observation-based-extraction) and [Definition based](https://hl7.org/fhir/uv/sdc/extraction.html#definition-based-extraction) extraction.
|
||||
|
||||
### Parameters
|
||||
|
||||
The following parameters are supported for the `QuestionnaireResponse/$extract` operation:
|
||||
|
||||
| Parameter | Type | Description |
|
||||
|-----------|------|-------------|
|
||||
| questionnaire-response | QuestionnaireResponse | The QuestionnaireResponse to extract data from. Used when the operation is invoked at the 'type' level. |
|
||||
| parameters | Parameters | Any input parameters defined in libraries referenced by the Questionnaire. |
|
||||
| data | Bundle | Data to be made available during CQL evaluation. |
|
||||
|
||||
|
||||
## Package
|
||||
|
||||
The `Questionnaire/$package` [operation](https://build.fhir.org/ig/HL7/crmi-ig/OperationDefinition-crmi-package.html) for [Questionnaire](https://www.hl7.org/fhir/questionnaire.html) will generate a Bundle of resources that includes the Questionnaire as well as any related Library or ValueSet resources which can then be shared. This implementation follows the [CRMI IG](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/index.html) guidance for [packaging artifacts](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/packaging.html).
|
||||
|
||||
### Parameters
|
||||
|
||||
The following parameters are supported for the `Questionnaire/$package` operation:
|
||||
|
||||
| Parameter | Type | Description |
|
||||
|-----------|-----------|-------------|
|
||||
| id | string | The logical id of an existing Resource to package on the server. |
|
||||
| canonical | canonical | A canonical url (optionally version specific) of a Resource to package on the server. |
|
||||
| url | uri | A canonical or artifact reference to a Resource to package on the server. This is exclusive with the canonical parameter. |
|
||||
| version | string | The version of the Resource. This is exclusive with the canonical parameter. |
|
||||
| usePut | boolean | Determines the type of method returned in the Bundle Entries: POST if False (the default), PUT if True. |
|
||||
|
||||
|
||||
## Example Questionnaire
|
||||
|
||||
```json
|
||||
{
|
||||
|
@ -219,7 +298,7 @@ The `populate` operation generates a [QuestionnaireResponse](https://www.hl7.org
|
|||
}
|
||||
```
|
||||
|
||||
### Example QuestionnaireResponse
|
||||
## Example QuestionnaireResponse
|
||||
|
||||
```json
|
||||
{
|
||||
|
@ -486,14 +565,3 @@ The `populate` operation generates a [QuestionnaireResponse](https://www.hl7.org
|
|||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Extract
|
||||
|
||||
The `extract` operation takes a completed [QuestionnaireResponse](https://www.hl7.org/fhir/questionnaireresponse.html) and converts it to a Bundle of resources by using metadata embedded in the [Questionnaire](https://www.hl7.org/fhir/questionnaire.html) the QuestionnaireResponse is based on. The extracted resources might include Observations, MedicationStatements and other standard FHIR resources which can then be shared and manipulated. When invoking the $extract operation, care should be taken that the submitted QuestionnaireResponse is itself valid. If not, the extract operation could fail (with appropriate OperationOutcomes) or, more problematic, might succeed but provide incorrect output.
|
||||
|
||||
This implementation allows for both [Observation based](https://hl7.org/fhir/uv/sdc/extraction.html#observation-based-extraction) and [Definition based](https://hl7.org/fhir/uv/sdc/extraction.html#definition-based-extraction) extraction.
|
||||
|
||||
|
||||
## Package
|
||||
|
||||
The `package` operation for [Questionnaire](https://www.hl7.org/fhir/questionnaire.html) will generate a Bundle of resources that includes the Questionnaire as well as any related Library or ValueSet resources which can then be shared. This implementation follows the [CRMI IG](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/index.html) guidance for [packaging artifacts](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/packaging.html).
|
||||
|
|
File diff suppressed because one or more lines are too long
After Width: | Height: | Size: 24 KiB |
|
@ -11,7 +11,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -1,3 +1,22 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Model
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.logging;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
|
|
|
@ -1,3 +1,22 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Model
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.logging;
|
||||
|
||||
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
|
||||
|
|
|
@ -1,3 +1,22 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Model
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.logging;
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,3 +1,22 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Model
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.logging;
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
|
|
|
@ -1,3 +1,22 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Model
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.logging;
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,3 +1,22 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Model
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.logging;
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
|
|
|
@ -1,3 +1,22 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Model
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.logging;
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,3 +1,22 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Model
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.logging;
|
||||
|
||||
import org.hibernate.engine.jdbc.spi.SqlStatementLogger;
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -74,7 +74,7 @@ public class HapiFhirHibernateJpaDialect extends HibernateJpaDialect {
|
|||
}
|
||||
|
||||
if (HapiSystemProperties.isUnitTestModeEnabled()) {
|
||||
ourLog.error("Hibernate exception", theException);
|
||||
ourLog.error("Unit test mode: Hibernate exception", theException);
|
||||
}
|
||||
|
||||
if (theException instanceof ConstraintViolationException) {
|
||||
|
|
|
@ -167,6 +167,14 @@ public class HistoryBuilder {
|
|||
Optional<String> forcedId = pidToForcedId.get(JpaPid.fromId(nextResourceId));
|
||||
if (forcedId.isPresent()) {
|
||||
resourceId = forcedId.get();
|
||||
// IdHelperService returns a forcedId with the '<resourceType>/' prefix
|
||||
// but the transientForcedId is expected to be just the idPart (without the <resourceType>/ prefix).
|
||||
// For that reason, strip the prefix before setting the transientForcedId below.
|
||||
// If not stripped this messes up the id of the resource as the resourceType would be repeated
|
||||
// twice like Patient/Patient/1234 in the resource constructed
|
||||
if (resourceId.startsWith(myResourceType + "/")) {
|
||||
resourceId = resourceId.substring(myResourceType.length() + 1);
|
||||
}
|
||||
} else {
|
||||
resourceId = nextResourceId.toString();
|
||||
}
|
||||
|
|
|
@ -55,14 +55,15 @@ public interface IBatch2JobInstanceRepository
|
|||
int updateWorkChunksPurgedTrue(@Param("id") String theInstanceId);
|
||||
|
||||
@Query(
|
||||
"SELECT b from Batch2JobInstanceEntity b WHERE b.myDefinitionId = :defId AND b.myParamsJson = :params AND b.myStatus IN( :stats )")
|
||||
"SELECT b from Batch2JobInstanceEntity b WHERE b.myDefinitionId = :defId AND (b.myParamsJson = :params OR b.myParamsJsonVc = :params) AND b.myStatus IN( :stats )")
|
||||
List<Batch2JobInstanceEntity> findInstancesByJobIdParamsAndStatus(
|
||||
@Param("defId") String theDefinitionId,
|
||||
@Param("params") String theParams,
|
||||
@Param("stats") Set<StatusEnum> theStatus,
|
||||
Pageable thePageable);
|
||||
|
||||
@Query("SELECT b from Batch2JobInstanceEntity b WHERE b.myDefinitionId = :defId AND b.myParamsJson = :params")
|
||||
@Query(
|
||||
"SELECT b from Batch2JobInstanceEntity b WHERE b.myDefinitionId = :defId AND (b.myParamsJson = :params OR b.myParamsJsonVc = :params)")
|
||||
List<Batch2JobInstanceEntity> findInstancesByJobIdAndParams(
|
||||
@Param("defId") String theDefinitionId, @Param("params") String theParams, Pageable thePageable);
|
||||
|
||||
|
|
|
@ -65,7 +65,7 @@ public interface IBatch2WorkChunkRepository
|
|||
|
||||
@Modifying
|
||||
@Query("UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myEndTime = :et, "
|
||||
+ "e.myRecordsProcessed = :rp, e.myErrorCount = e.myErrorCount + :errorRetries, e.mySerializedData = null, "
|
||||
+ "e.myRecordsProcessed = :rp, e.myErrorCount = e.myErrorCount + :errorRetries, e.mySerializedData = null, e.mySerializedDataVc = null, "
|
||||
+ "e.myWarningMessage = :warningMessage WHERE e.myId = :id")
|
||||
void updateChunkStatusAndClearDataForEndSuccess(
|
||||
@Param("id") String theChunkId,
|
||||
|
@ -77,7 +77,7 @@ public interface IBatch2WorkChunkRepository
|
|||
|
||||
@Modifying
|
||||
@Query(
|
||||
"UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myEndTime = :et, e.mySerializedData = null, e.myErrorMessage = :em WHERE e.myId IN(:ids)")
|
||||
"UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myEndTime = :et, e.mySerializedData = null, e.mySerializedDataVc = null, e.myErrorMessage = :em WHERE e.myId IN(:ids)")
|
||||
void updateAllChunksForInstanceStatusClearDataAndSetError(
|
||||
@Param("ids") List<String> theChunkIds,
|
||||
@Param("et") Date theEndTime,
|
||||
|
|
|
@ -36,6 +36,7 @@ import jakarta.persistence.TemporalType;
|
|||
import jakarta.persistence.Version;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
import org.hibernate.Length;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
@ -95,13 +96,17 @@ public class Batch2JobInstanceEntity implements Serializable {
|
|||
@Column(name = "FAST_TRACKING", nullable = true)
|
||||
private Boolean myFastTracking;
|
||||
|
||||
// TODO: VC column added in 7.2.0 - Remove non-VC column later
|
||||
@Column(name = "PARAMS_JSON", length = PARAMS_JSON_MAX_LENGTH, nullable = true)
|
||||
private String myParamsJson;
|
||||
|
||||
@Lob
|
||||
@Lob // TODO: VC column added in 7.2.0 - Remove non-VC column later
|
||||
@Column(name = "PARAMS_JSON_LOB", nullable = true)
|
||||
private String myParamsJsonLob;
|
||||
|
||||
@Column(name = "PARAMS_JSON_VC", nullable = true, length = Length.LONG32)
|
||||
private String myParamsJsonVc;
|
||||
|
||||
@Column(name = "CMB_RECS_PROCESSED", nullable = true)
|
||||
private Integer myCombinedRecordsProcessed;
|
||||
|
||||
|
@ -142,11 +147,14 @@ public class Batch2JobInstanceEntity implements Serializable {
|
|||
* Any output from the job can be held in this column
|
||||
* Even serialized json
|
||||
*/
|
||||
@Lob
|
||||
@Lob // TODO: VC column added in 7.2.0 - Remove non-VC column later
|
||||
@Basic(fetch = FetchType.LAZY)
|
||||
@Column(name = "REPORT", nullable = true, length = Integer.MAX_VALUE - 1)
|
||||
private String myReport;
|
||||
|
||||
@Column(name = "REPORT_VC", nullable = true, length = Length.LONG32)
|
||||
private String myReportVc;
|
||||
|
||||
public String getCurrentGatedStepId() {
|
||||
return myCurrentGatedStepId;
|
||||
}
|
||||
|
@ -260,6 +268,9 @@ public class Batch2JobInstanceEntity implements Serializable {
|
|||
}
|
||||
|
||||
public String getParams() {
|
||||
if (myParamsJsonVc != null) {
|
||||
return myParamsJsonVc;
|
||||
}
|
||||
if (myParamsJsonLob != null) {
|
||||
return myParamsJsonLob;
|
||||
}
|
||||
|
@ -267,13 +278,9 @@ public class Batch2JobInstanceEntity implements Serializable {
|
|||
}
|
||||
|
||||
public void setParams(String theParams) {
|
||||
myParamsJsonVc = theParams;
|
||||
myParamsJsonLob = null;
|
||||
myParamsJson = null;
|
||||
if (theParams != null && theParams.length() > PARAMS_JSON_MAX_LENGTH) {
|
||||
myParamsJsonLob = theParams;
|
||||
} else {
|
||||
myParamsJson = theParams;
|
||||
}
|
||||
}
|
||||
|
||||
public boolean getWorkChunksPurged() {
|
||||
|
@ -309,11 +316,12 @@ public class Batch2JobInstanceEntity implements Serializable {
|
|||
}
|
||||
|
||||
public String getReport() {
|
||||
return myReport;
|
||||
return myReportVc != null ? myReportVc : myReport;
|
||||
}
|
||||
|
||||
public void setReport(String theReport) {
|
||||
myReport = theReport;
|
||||
myReportVc = theReport;
|
||||
myReport = null;
|
||||
}
|
||||
|
||||
public String getWarningMessages() {
|
||||
|
@ -362,7 +370,7 @@ public class Batch2JobInstanceEntity implements Serializable {
|
|||
.append("progress", myProgress)
|
||||
.append("errorMessage", myErrorMessage)
|
||||
.append("estimatedTimeRemaining", myEstimatedTimeRemaining)
|
||||
.append("report", myReport)
|
||||
.append("report", getReport())
|
||||
.append("warningMessages", myWarningMessages)
|
||||
.append("initiatingUsername", myTriggeringUsername)
|
||||
.append("initiatingclientId", myTriggeringClientId)
|
||||
|
|
|
@ -39,6 +39,7 @@ import jakarta.persistence.TemporalType;
|
|||
import jakarta.persistence.Version;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
import org.hibernate.Length;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
@ -97,11 +98,14 @@ public class Batch2WorkChunkEntity implements Serializable {
|
|||
@Column(name = "TGT_STEP_ID", length = ID_MAX_LENGTH, nullable = false)
|
||||
private String myTargetStepId;
|
||||
|
||||
@Lob
|
||||
@Lob // TODO: VC column added in 7.2.0 - Remove non-VC column later
|
||||
@Basic(fetch = FetchType.LAZY)
|
||||
@Column(name = "CHUNK_DATA", nullable = true, length = Integer.MAX_VALUE - 1)
|
||||
private String mySerializedData;
|
||||
|
||||
@Column(name = "CHUNK_DATA_VC", nullable = true, length = Length.LONG32)
|
||||
private String mySerializedDataVc;
|
||||
|
||||
@Column(name = "STAT", length = STATUS_MAX_LENGTH, nullable = false)
|
||||
@Enumerated(EnumType.STRING)
|
||||
private WorkChunkStatusEnum myStatus;
|
||||
|
@ -290,11 +294,12 @@ public class Batch2WorkChunkEntity implements Serializable {
|
|||
}
|
||||
|
||||
public String getSerializedData() {
|
||||
return mySerializedData;
|
||||
return mySerializedDataVc != null ? mySerializedDataVc : mySerializedData;
|
||||
}
|
||||
|
||||
public void setSerializedData(String theSerializedData) {
|
||||
mySerializedData = theSerializedData;
|
||||
mySerializedData = null;
|
||||
mySerializedDataVc = theSerializedData;
|
||||
}
|
||||
|
||||
public WorkChunkStatusEnum getStatus() {
|
||||
|
@ -336,7 +341,7 @@ public class Batch2WorkChunkEntity implements Serializable {
|
|||
.append("updateTime", myUpdateTime)
|
||||
.append("recordsProcessed", myRecordsProcessed)
|
||||
.append("targetStepId", myTargetStepId)
|
||||
.append("serializedData", mySerializedData)
|
||||
.append("serializedData", getSerializedData())
|
||||
.append("status", myStatus)
|
||||
.append("errorMessage", myErrorMessage)
|
||||
.append("warningMessage", myWarningMessage)
|
||||
|
|
|
@ -32,6 +32,7 @@ import jakarta.persistence.Lob;
|
|||
import jakarta.persistence.ManyToOne;
|
||||
import jakarta.persistence.SequenceGenerator;
|
||||
import jakarta.persistence.Table;
|
||||
import org.hibernate.Length;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
@ -66,10 +67,13 @@ public class BulkImportJobFileEntity implements Serializable {
|
|||
@Column(name = "FILE_DESCRIPTION", nullable = true, length = MAX_DESCRIPTION_LENGTH)
|
||||
private String myFileDescription;
|
||||
|
||||
@Lob
|
||||
@Column(name = "JOB_CONTENTS", nullable = false)
|
||||
@Lob // TODO: VC column added in 7.2.0 - Remove non-VC column later
|
||||
@Column(name = "JOB_CONTENTS", nullable = true)
|
||||
private byte[] myContents;
|
||||
|
||||
@Column(name = "JOB_CONTENTS_VC", nullable = true, length = Length.LONG32)
|
||||
private String myContentsVc;
|
||||
|
||||
@Column(name = "TENANT_NAME", nullable = true, length = PartitionEntity.MAX_NAME_LENGTH)
|
||||
private String myTenantName;
|
||||
|
||||
|
@ -98,11 +102,16 @@ public class BulkImportJobFileEntity implements Serializable {
|
|||
}
|
||||
|
||||
public String getContents() {
|
||||
return new String(myContents, StandardCharsets.UTF_8);
|
||||
if (myContentsVc != null) {
|
||||
return myContentsVc;
|
||||
} else {
|
||||
return new String(myContents, StandardCharsets.UTF_8);
|
||||
}
|
||||
}
|
||||
|
||||
public void setContents(String theContents) {
|
||||
myContents = theContents.getBytes(StandardCharsets.UTF_8);
|
||||
myContentsVc = theContents;
|
||||
myContents = null;
|
||||
}
|
||||
|
||||
public BulkImportJobFileJson toJson() {
|
||||
|
|
|
@ -50,6 +50,7 @@ import jakarta.persistence.UniqueConstraint;
|
|||
import jakarta.persistence.Version;
|
||||
import org.apache.commons.lang3.SerializationUtils;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.hibernate.Length;
|
||||
import org.hibernate.annotations.JdbcTypeCode;
|
||||
import org.hibernate.annotations.OptimisticLock;
|
||||
import org.hibernate.type.SqlTypes;
|
||||
|
@ -141,14 +142,21 @@ public class Search implements ICachedSearchDetails, Serializable {
|
|||
|
||||
@Column(name = "RESOURCE_TYPE", length = 200, nullable = true)
|
||||
private String myResourceType;
|
||||
|
||||
/**
|
||||
* Note that this field may have the request partition IDs prepended to it
|
||||
*/
|
||||
@Lob()
|
||||
@Lob // TODO: VC column added in 7.2.0 - Remove non-VC column later
|
||||
@Basic(fetch = FetchType.LAZY)
|
||||
@Column(name = "SEARCH_QUERY_STRING", nullable = true, updatable = false, length = MAX_SEARCH_QUERY_STRING)
|
||||
private String mySearchQueryString;
|
||||
|
||||
/**
|
||||
* Note that this field may have the request partition IDs prepended to it
|
||||
*/
|
||||
@Column(name = "SEARCH_QUERY_STRING_VC", nullable = true, length = Length.LONG32)
|
||||
private String mySearchQueryStringVc;
|
||||
|
||||
@Column(name = "SEARCH_QUERY_STRING_HASH", nullable = true, updatable = false)
|
||||
private Integer mySearchQueryStringHash;
|
||||
|
||||
|
@ -172,10 +180,13 @@ public class Search implements ICachedSearchDetails, Serializable {
|
|||
@Column(name = "OPTLOCK_VERSION", nullable = true)
|
||||
private Integer myVersion;
|
||||
|
||||
@Lob
|
||||
@Lob // TODO: VC column added in 7.2.0 - Remove non-VC column later
|
||||
@Column(name = "SEARCH_PARAM_MAP", nullable = true)
|
||||
private byte[] mySearchParameterMap;
|
||||
|
||||
@Column(name = "SEARCH_PARAM_MAP_BIN", nullable = true, length = Length.LONG32)
|
||||
private byte[] mySearchParameterMapBin;
|
||||
|
||||
@Transient
|
||||
private transient SearchParameterMap mySearchParameterMapTransient;
|
||||
|
||||
|
@ -350,7 +361,7 @@ public class Search implements ICachedSearchDetails, Serializable {
|
|||
* Note that this field may have the request partition IDs prepended to it
|
||||
*/
|
||||
public String getSearchQueryString() {
|
||||
return mySearchQueryString;
|
||||
return mySearchQueryStringVc != null ? mySearchQueryStringVc : mySearchQueryString;
|
||||
}
|
||||
|
||||
public void setSearchQueryString(String theSearchQueryString, RequestPartitionId theRequestPartitionId) {
|
||||
|
@ -362,12 +373,13 @@ public class Search implements ICachedSearchDetails, Serializable {
|
|||
// We want this field to always have a wide distribution of values in order
|
||||
// to avoid optimizers avoiding using it if it has lots of nulls, so in the
|
||||
// case of null, just put a value that will never be hit
|
||||
mySearchQueryString = UUID.randomUUID().toString();
|
||||
mySearchQueryStringVc = UUID.randomUUID().toString();
|
||||
} else {
|
||||
mySearchQueryString = searchQueryString;
|
||||
mySearchQueryStringVc = searchQueryString;
|
||||
}
|
||||
|
||||
mySearchQueryStringHash = mySearchQueryString.hashCode();
|
||||
mySearchQueryString = null;
|
||||
mySearchQueryStringHash = mySearchQueryStringVc.hashCode();
|
||||
}
|
||||
|
||||
public SearchTypeEnum getSearchType() {
|
||||
|
@ -466,8 +478,12 @@ public class Search implements ICachedSearchDetails, Serializable {
|
|||
return Optional.of(mySearchParameterMapTransient);
|
||||
}
|
||||
SearchParameterMap searchParameterMap = null;
|
||||
if (mySearchParameterMap != null) {
|
||||
searchParameterMap = SerializationUtils.deserialize(mySearchParameterMap);
|
||||
byte[] searchParameterMapSerialized = mySearchParameterMapBin;
|
||||
if (searchParameterMapSerialized == null) {
|
||||
searchParameterMapSerialized = mySearchParameterMap;
|
||||
}
|
||||
if (searchParameterMapSerialized != null) {
|
||||
searchParameterMap = SerializationUtils.deserialize(searchParameterMapSerialized);
|
||||
mySearchParameterMapTransient = searchParameterMap;
|
||||
}
|
||||
return Optional.ofNullable(searchParameterMap);
|
||||
|
@ -475,7 +491,8 @@ public class Search implements ICachedSearchDetails, Serializable {
|
|||
|
||||
public void setSearchParameterMap(SearchParameterMap theSearchParameterMap) {
|
||||
mySearchParameterMapTransient = theSearchParameterMap;
|
||||
mySearchParameterMap = SerializationUtils.serialize(theSearchParameterMap);
|
||||
mySearchParameterMapBin = SerializationUtils.serialize(theSearchParameterMap);
|
||||
mySearchParameterMap = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -117,6 +117,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
init640_after_20230126();
|
||||
init660();
|
||||
init680();
|
||||
init680_Part2();
|
||||
init700();
|
||||
}
|
||||
|
||||
|
@ -226,6 +227,44 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
.withColumns("INSTANCE_ID", "TGT_STEP_ID", "STAT", "SEQ", "ID");
|
||||
}
|
||||
|
||||
private void init680_Part2() {
|
||||
Builder version = forVersion(VersionEnum.V6_8_0);
|
||||
|
||||
// Add additional LOB migration columns
|
||||
version.onTable("BT2_JOB_INSTANCE")
|
||||
.addColumn("20240227.1", "REPORT_VC")
|
||||
.nullable()
|
||||
.type(ColumnTypeEnum.TEXT);
|
||||
version.onTable("BT2_JOB_INSTANCE")
|
||||
.addColumn("20240227.2", "PARAMS_JSON_VC")
|
||||
.nullable()
|
||||
.type(ColumnTypeEnum.TEXT);
|
||||
|
||||
version.onTable("BT2_WORK_CHUNK")
|
||||
.addColumn("20240227.3", "CHUNK_DATA_VC")
|
||||
.nullable()
|
||||
.type(ColumnTypeEnum.TEXT);
|
||||
|
||||
version.onTable("HFJ_SEARCH")
|
||||
.addColumn("20240227.4", "SEARCH_QUERY_STRING_VC")
|
||||
.nullable()
|
||||
.type(ColumnTypeEnum.TEXT);
|
||||
version.onTable("HFJ_SEARCH")
|
||||
.addColumn("20240227.5", "SEARCH_PARAM_MAP_BIN")
|
||||
.nullable()
|
||||
.type(ColumnTypeEnum.BINARY);
|
||||
|
||||
version.onTable("HFJ_BLK_IMPORT_JOBFILE")
|
||||
.addColumn("20240227.6", "JOB_CONTENTS_VC")
|
||||
.nullable()
|
||||
.type(ColumnTypeEnum.TEXT);
|
||||
|
||||
version.onTable("HFJ_BLK_IMPORT_JOBFILE")
|
||||
.modifyColumn("20240227.7", "JOB_CONTENTS")
|
||||
.nullable()
|
||||
.withType(ColumnTypeEnum.BLOB);
|
||||
}
|
||||
|
||||
protected void init680() {
|
||||
Builder version = forVersion(VersionEnum.V6_8_0);
|
||||
// HAPI-FHIR #4801 - Add New Index On HFJ_RESOURCE
|
||||
|
|
|
@ -843,6 +843,11 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
|
||||
RuntimeSearchParam param = null;
|
||||
|
||||
if (param == null) {
|
||||
// do we have a composition param defined for the whole chain?
|
||||
param = mySearchParamRegistry.getActiveSearchParam(myResourceName, theSort.getParamName());
|
||||
}
|
||||
|
||||
/*
|
||||
* If we have a sort like _sort=subject.name and we have an
|
||||
* uplifted refchain for that combination we can do it more efficiently
|
||||
|
@ -851,7 +856,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
* to "name" in this example) so that we know what datatype it is.
|
||||
*/
|
||||
String paramName = theSort.getParamName();
|
||||
if (myStorageSettings.isIndexOnUpliftedRefchains()) {
|
||||
if (param == null && myStorageSettings.isIndexOnUpliftedRefchains()) {
|
||||
String[] chains = StringUtils.split(paramName, '.');
|
||||
if (chains.length == 2) {
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -205,6 +205,9 @@ public class SubscriptionWebsocketHandler extends TextWebSocketHandler implement
|
|||
* @return The payload
|
||||
*/
|
||||
private Optional<String> getPayloadByContent(ResourceDeliveryMessage msg) {
|
||||
if (msg.getSubscription().getContent() == null) {
|
||||
return Optional.empty();
|
||||
}
|
||||
switch (msg.getSubscription().getContent()) {
|
||||
case IDONLY:
|
||||
return Optional.of(msg.getPayloadId());
|
||||
|
|
|
@ -25,8 +25,10 @@ import ca.uhn.fhir.jpa.searchparam.matcher.SearchParamMatcher;
|
|||
import ca.uhn.fhir.jpa.subscription.match.matcher.matching.SubscriptionStrategyEvaluator;
|
||||
import ca.uhn.fhir.jpa.subscription.submit.interceptor.SubscriptionQueryValidator;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Lazy;
|
||||
|
||||
@Configuration
|
||||
public class SubscriptionTopicConfig {
|
||||
@Bean
|
||||
SubscriptionTopicMatchingSubscriber subscriptionTopicMatchingSubscriber(FhirContext theFhirContext) {
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package ca.uhn.fhir.jpa.bulk;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
||||
import ca.uhn.fhir.batch2.api.IJobPersistence;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
|
||||
import ca.uhn.fhir.batch2.model.StatusEnum;
|
||||
|
@ -10,6 +11,9 @@ import ca.uhn.fhir.interceptor.api.Pointcut;
|
|||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
|
||||
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
||||
import ca.uhn.fhir.jpa.batch2.JpaJobPersistenceImpl;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository;
|
||||
import ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
|
@ -21,11 +25,13 @@ import ca.uhn.fhir.rest.client.apache.ResourceEntity;
|
|||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import ca.uhn.fhir.test.utilities.HttpClientExtension;
|
||||
import ca.uhn.fhir.test.utilities.ProxyUtil;
|
||||
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
|
||||
import ca.uhn.fhir.util.JsonUtil;
|
||||
import com.google.common.collect.Sets;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import org.apache.commons.io.LineIterator;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
|
@ -66,6 +72,7 @@ import org.mockito.Spy;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.StringReader;
|
||||
|
@ -80,6 +87,8 @@ import java.util.Set;
|
|||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static ca.uhn.fhir.batch2.jobs.export.BulkExportAppCtx.CREATE_REPORT_STEP;
|
||||
import static ca.uhn.fhir.batch2.jobs.export.BulkExportAppCtx.WRITE_TO_BINARIES;
|
||||
import static ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoR4TagsInlineTest.createSearchParameterForInlineSecurity;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
import static org.awaitility.Awaitility.await;
|
||||
|
@ -100,17 +109,25 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
|||
|
||||
@Autowired
|
||||
private IJobCoordinator myJobCoordinator;
|
||||
@Autowired
|
||||
private IBatch2WorkChunkRepository myWorkChunkRepository;
|
||||
@Autowired
|
||||
private IJobPersistence myJobPersistence;
|
||||
private JpaJobPersistenceImpl myJobPersistenceImpl;
|
||||
|
||||
@AfterEach
|
||||
void afterEach() {
|
||||
myStorageSettings.setIndexMissingFields(JpaStorageSettings.IndexEnabledEnum.DISABLED);
|
||||
myStorageSettings.setTagStorageMode(new JpaStorageSettings().getTagStorageMode());
|
||||
myStorageSettings.setResourceClientIdStrategy(new JpaStorageSettings().getResourceClientIdStrategy());
|
||||
JpaStorageSettings defaults = new JpaStorageSettings();
|
||||
myStorageSettings.setTagStorageMode(defaults.getTagStorageMode());
|
||||
myStorageSettings.setResourceClientIdStrategy(defaults.getResourceClientIdStrategy());
|
||||
myStorageSettings.setBulkExportFileMaximumSize(defaults.getBulkExportFileMaximumSize());
|
||||
}
|
||||
|
||||
@BeforeEach
|
||||
public void beforeEach() {
|
||||
myStorageSettings.setJobFastTrackingEnabled(false);
|
||||
myJobPersistenceImpl = ProxyUtil.getSingletonTarget(myJobPersistence, JpaJobPersistenceImpl.class);
|
||||
}
|
||||
|
||||
@Spy
|
||||
|
|
|
@ -0,0 +1,177 @@
|
|||
package ca.uhn.fhir.jpa.bulk.export;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobDataSink;
|
||||
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
|
||||
import ca.uhn.fhir.batch2.jobs.export.ExpandResourceAndWriteBinaryStep;
|
||||
import ca.uhn.fhir.batch2.jobs.export.ExpandResourcesStep;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportBinaryFileId;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.ExpandedResourcesList;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.ResourceIdList;
|
||||
import ca.uhn.fhir.batch2.jobs.models.BatchResourceId;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import jakarta.persistence.Id;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.hl7.fhir.r4.model.Binary;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.CsvSource;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.Captor;
|
||||
import org.mockito.Mock;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.mockito.Mockito.atLeast;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
||||
public class ExpandResourcesAndWriteBinaryStepJpaTest extends BaseJpaR4Test {
|
||||
|
||||
@Autowired
|
||||
private ExpandResourceAndWriteBinaryStep myExpandResourcesStep;
|
||||
|
||||
@Mock
|
||||
private IJobDataSink<BulkExportBinaryFileId> mySink;
|
||||
@Captor
|
||||
private ArgumentCaptor<BulkExportBinaryFileId> myWorkChunkCaptor;
|
||||
|
||||
@Override
|
||||
public void afterCleanupDao() {
|
||||
super.afterCleanupDao();
|
||||
|
||||
JpaStorageSettings defaults = new JpaStorageSettings();
|
||||
myStorageSettings.setBulkExportFileMaximumSize(defaults.getBulkExportFileMaximumSize());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMaximumChunkSize() {
|
||||
/*
|
||||
* We're going to set the maximum file size to 3000, and create some resources with
|
||||
* a name that is 1000 chars long. With the other boilerplate text in a resource that
|
||||
* will put the resource length at just over 1000 chars, meaning that any given
|
||||
* chunk or file should have only 2 resources in it.
|
||||
*/
|
||||
int testResourceSize = 1000;
|
||||
int maxFileSize = 3 * testResourceSize;
|
||||
myStorageSettings.setBulkExportFileMaximumSize(maxFileSize);
|
||||
|
||||
List<BatchResourceId> expectedIds = new ArrayList<>();
|
||||
for (int i = 0; i < 10; i++) {
|
||||
Patient p = new Patient();
|
||||
p.addName().setFamily(StringUtils.leftPad("", testResourceSize, 'A'));
|
||||
String id = myPatientDao.create(p, mySrd).getId().getIdPart();
|
||||
expectedIds.add(new BatchResourceId().setResourceType("Patient").setId(id));
|
||||
}
|
||||
Collections.sort(expectedIds);
|
||||
|
||||
ResourceIdList resourceList = new ResourceIdList();
|
||||
resourceList.setResourceType("Patient");
|
||||
resourceList.setIds(expectedIds);
|
||||
|
||||
BulkExportJobParameters params = new BulkExportJobParameters();
|
||||
JobInstance jobInstance = new JobInstance();
|
||||
String chunkId = "ABC";
|
||||
|
||||
StepExecutionDetails<BulkExportJobParameters, ResourceIdList> details = new StepExecutionDetails<>(params, resourceList, jobInstance, chunkId);
|
||||
|
||||
// Test
|
||||
|
||||
myExpandResourcesStep.run(details, mySink);
|
||||
|
||||
// Verify
|
||||
verify(mySink, atLeast(1)).accept(myWorkChunkCaptor.capture());
|
||||
List<BatchResourceId> actualResourceIdList = new ArrayList<>();
|
||||
for (BulkExportBinaryFileId next : myWorkChunkCaptor.getAllValues()) {
|
||||
|
||||
Binary nextBinary = myBinaryDao.read(new IdType(next.getBinaryId()), mySrd);
|
||||
String nextNdJsonString = new String(nextBinary.getContent(), StandardCharsets.UTF_8);
|
||||
|
||||
// This is the most important check here
|
||||
assertThat(nextNdJsonString.length(), lessThanOrEqualTo(maxFileSize));
|
||||
|
||||
Arrays.stream(nextNdJsonString.split("\\n"))
|
||||
.filter(StringUtils::isNotBlank)
|
||||
.map(t->myFhirContext.newJsonParser().parseResource(t))
|
||||
.map(t->new BatchResourceId().setResourceType(t.getIdElement().getResourceType()).setId(t.getIdElement().getIdPart()))
|
||||
.forEach(actualResourceIdList::add);
|
||||
|
||||
}
|
||||
Collections.sort(actualResourceIdList);
|
||||
assertEquals(expectedIds, actualResourceIdList);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMaximumChunkSize_SingleFileExceedsMaximum() {
|
||||
/*
|
||||
* We're going to set the maximum file size to 1000, and create some resources
|
||||
* with a name that is 1500 chars long. In this case, we'll exceed the
|
||||
* configured maximum, so it should be one output file per resourcs.
|
||||
*/
|
||||
int testResourceSize = 1500;
|
||||
int maxFileSize = 1000;
|
||||
myStorageSettings.setBulkExportFileMaximumSize(maxFileSize);
|
||||
|
||||
List<BatchResourceId> expectedIds = new ArrayList<>();
|
||||
int numberOfResources = 10;
|
||||
for (int i = 0; i < numberOfResources; i++) {
|
||||
Patient p = new Patient();
|
||||
p.addName().setFamily(StringUtils.leftPad("", testResourceSize, 'A'));
|
||||
String id = myPatientDao.create(p, mySrd).getId().getIdPart();
|
||||
expectedIds.add(new BatchResourceId().setResourceType("Patient").setId(id));
|
||||
}
|
||||
Collections.sort(expectedIds);
|
||||
|
||||
ResourceIdList resourceList = new ResourceIdList();
|
||||
resourceList.setResourceType("Patient");
|
||||
resourceList.setIds(expectedIds);
|
||||
|
||||
BulkExportJobParameters params = new BulkExportJobParameters();
|
||||
JobInstance jobInstance = new JobInstance();
|
||||
String chunkId = "ABC";
|
||||
|
||||
StepExecutionDetails<BulkExportJobParameters, ResourceIdList> details = new StepExecutionDetails<>(params, resourceList, jobInstance, chunkId);
|
||||
|
||||
// Test
|
||||
|
||||
myExpandResourcesStep.run(details, mySink);
|
||||
|
||||
// Verify
|
||||
|
||||
// This is the most important check - we should have one file per resource
|
||||
verify(mySink, times(numberOfResources)).accept(myWorkChunkCaptor.capture());
|
||||
|
||||
List<BatchResourceId> actualResourceIdList = new ArrayList<>();
|
||||
for (BulkExportBinaryFileId next : myWorkChunkCaptor.getAllValues()) {
|
||||
|
||||
Binary nextBinary = myBinaryDao.read(new IdType(next.getBinaryId()), mySrd);
|
||||
String nextNdJsonString = new String(nextBinary.getContent(), StandardCharsets.UTF_8);
|
||||
|
||||
Arrays.stream(nextNdJsonString.split("\\n"))
|
||||
.filter(StringUtils::isNotBlank)
|
||||
.map(t->myFhirContext.newJsonParser().parseResource(t))
|
||||
.map(t->new BatchResourceId().setResourceType(t.getIdElement().getResourceType()).setId(t.getIdElement().getIdPart()))
|
||||
.forEach(actualResourceIdList::add);
|
||||
|
||||
}
|
||||
Collections.sort(actualResourceIdList);
|
||||
assertEquals(expectedIds, actualResourceIdList);
|
||||
}
|
||||
|
||||
}
|
|
@ -3,13 +3,14 @@ package ca.uhn.fhir.jpa.bulk.export;
|
|||
import ca.uhn.fhir.batch2.api.IJobDataSink;
|
||||
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
|
||||
import ca.uhn.fhir.batch2.jobs.export.ExpandResourcesStep;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.ExpandedResourcesList;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.ResourceIdList;
|
||||
import ca.uhn.fhir.batch2.jobs.models.BatchResourceId;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
|
@ -20,13 +21,17 @@ import org.mockito.Mock;
|
|||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.mockito.Mockito.atLeast;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
||||
|
@ -44,7 +49,9 @@ public class ExpandResourcesStepJpaTest extends BaseJpaR4Test {
|
|||
public void afterCleanupDao() {
|
||||
super.afterCleanupDao();
|
||||
|
||||
myStorageSettings.setTagStorageMode(new JpaStorageSettings().getTagStorageMode());
|
||||
JpaStorageSettings defaults = new JpaStorageSettings();
|
||||
myStorageSettings.setTagStorageMode(defaults.getTagStorageMode());
|
||||
myStorageSettings.setBulkExportFileMaximumSize(defaults.getBulkExportFileMaximumSize());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -194,4 +201,60 @@ public class ExpandResourcesStepJpaTest extends BaseJpaR4Test {
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMaximumChunkSize() {
|
||||
/*
|
||||
* We're going to set the maximum file size to 3000, and create some resources with
|
||||
* a name that is 1000 chars long. With the other boilerplate text in a resource that
|
||||
* will put the resource length at just over 1000 chars, meaning that any given
|
||||
* chunk or file should have only 2 resources in it.
|
||||
*/
|
||||
int testResourceSize = 1000;
|
||||
int maxFileSize = 3 * testResourceSize;
|
||||
myStorageSettings.setBulkExportFileMaximumSize(maxFileSize);
|
||||
|
||||
List<BatchResourceId> expectedIds = new ArrayList<>();
|
||||
for (int i = 0; i < 10; i++) {
|
||||
Patient p = new Patient();
|
||||
p.addName().setFamily(StringUtils.leftPad("", testResourceSize, 'A'));
|
||||
String id = myPatientDao.create(p, mySrd).getId().getIdPart();
|
||||
expectedIds.add(new BatchResourceId().setResourceType("Patient").setId(id));
|
||||
}
|
||||
Collections.sort(expectedIds);
|
||||
|
||||
ResourceIdList resourceList = new ResourceIdList();
|
||||
resourceList.setResourceType("Patient");
|
||||
resourceList.setIds(expectedIds);
|
||||
|
||||
BulkExportJobParameters params = new BulkExportJobParameters();
|
||||
JobInstance jobInstance = new JobInstance();
|
||||
String chunkId = "ABC";
|
||||
|
||||
StepExecutionDetails<BulkExportJobParameters, ResourceIdList> details = new StepExecutionDetails<>(params, resourceList, jobInstance, chunkId);
|
||||
|
||||
// Test
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
myExpandResourcesStep.run(details, mySink);
|
||||
|
||||
// Verify
|
||||
verify(mySink, atLeast(1)).accept(myWorkChunkCaptor.capture());
|
||||
List<BatchResourceId> actualResourceIdList = new ArrayList<>();
|
||||
for (var next : myWorkChunkCaptor.getAllValues()) {
|
||||
int nextSize = String.join("\n", next.getStringifiedResources()).length();
|
||||
ourLog.info("Next size: {}", nextSize);
|
||||
assertThat(nextSize, lessThanOrEqualTo(maxFileSize));
|
||||
next.getStringifiedResources().stream()
|
||||
.filter(StringUtils::isNotBlank)
|
||||
.map(t->myFhirContext.newJsonParser().parseResource(t))
|
||||
.map(t->new BatchResourceId().setResourceType(t.getIdElement().getResourceType()).setId(t.getIdElement().getIdPart()))
|
||||
.forEach(actualResourceIdList::add);
|
||||
}
|
||||
|
||||
Collections.sort(actualResourceIdList);
|
||||
assertEquals(expectedIds, actualResourceIdList);
|
||||
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -10,14 +10,18 @@ import ca.uhn.fhir.batch2.model.JobInstance;
|
|||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoR4TagsTest;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.util.JsonUtil;
|
||||
import org.hl7.fhir.r4.model.DateTimeType;
|
||||
import org.hl7.fhir.r4.model.OrganizationAffiliation;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.Captor;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Mockito;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
@ -38,7 +42,9 @@ public class FetchResourceIdsStepJpaTest extends BaseJpaR4Test {
|
|||
public void afterCleanupDao() {
|
||||
super.afterCleanupDao();
|
||||
|
||||
myStorageSettings.setTagStorageMode(new JpaStorageSettings().getTagStorageMode());
|
||||
JpaStorageSettings defaults = new JpaStorageSettings();
|
||||
myStorageSettings.setTagStorageMode(defaults.getTagStorageMode());
|
||||
myStorageSettings.setBulkExportFileMaximumSize(defaults.getBulkExportFileMaximumSize());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -74,6 +80,39 @@ public class FetchResourceIdsStepJpaTest extends BaseJpaR4Test {
|
|||
assertEquals(10, idList.getIds().size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testChunkMaximumSize() {
|
||||
myStorageSettings.setBulkExportFileMaximumSize(500);
|
||||
|
||||
for (int i = 0; i < 100; i++) {
|
||||
OrganizationAffiliation orgAff = new OrganizationAffiliation();
|
||||
orgAff.setActive(true);
|
||||
myOrganizationAffiliationDao.create(orgAff, mySrd);
|
||||
}
|
||||
|
||||
BulkExportJobParameters params = new BulkExportJobParameters();
|
||||
params.setResourceTypes(List.of("OrganizationAffiliation"));
|
||||
VoidModel data = new VoidModel();
|
||||
JobInstance instance = new JobInstance();
|
||||
instance.setInstanceId("instance-id");
|
||||
String chunkId = "chunk-id";
|
||||
StepExecutionDetails<BulkExportJobParameters, VoidModel> executionDetails = new StepExecutionDetails<>(params, data, instance, chunkId);
|
||||
|
||||
// Test
|
||||
myFetchResourceIdsStep.run(executionDetails, mySink);
|
||||
|
||||
// Verify
|
||||
verify(mySink, Mockito.atLeast(1)).accept(myResourceIdListCaptor.capture());
|
||||
List<ResourceIdList> idLists = myResourceIdListCaptor.getAllValues();
|
||||
for (var next : idLists) {
|
||||
String serialized = JsonUtil.serialize(next, false);
|
||||
|
||||
// Note that the 600 is a bit higher than the configured maximum of 500 above,
|
||||
// because our chunk size estimate is not totally accurate, but it's not
|
||||
// going to be way off, less than 100 regardless of how big the maximum is
|
||||
assertThat(serialized, serialized.length(), lessThanOrEqualTo(600));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -5,13 +5,10 @@ import ca.uhn.fhir.i18n.Msg;
|
|||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.jpa.util.SqlQuery;
|
||||
import ca.uhn.fhir.parser.StrictErrorHandler;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.AuditEvent;
|
||||
|
@ -20,7 +17,6 @@ import org.hl7.fhir.r4.model.CodeableConcept;
|
|||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.Composition;
|
||||
import org.hl7.fhir.r4.model.Device;
|
||||
import org.hl7.fhir.r4.model.DomainResource;
|
||||
import org.hl7.fhir.r4.model.Encounter;
|
||||
import org.hl7.fhir.r4.model.Enumerations;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
|
@ -41,14 +37,13 @@ import org.junit.jupiter.params.ParameterizedTest;
|
|||
import org.junit.jupiter.params.provider.CsvSource;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.Date;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.countMatches;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
@ -117,7 +112,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
|
||||
|
||||
@Test
|
||||
public void testShouldResolveATwoLinkChainWithStandAloneResourcesWithoutContainedResourceIndexing() throws Exception {
|
||||
public void testShouldResolveATwoLinkChainWithStandAloneResourcesWithoutContainedResourceIndexing() {
|
||||
|
||||
// setup
|
||||
IIdType oid1;
|
||||
|
@ -141,7 +136,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
String url = "/Observation?subject.name=Smith";
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
|
||||
// validate
|
||||
assertEquals(1L, oids.size());
|
||||
|
@ -149,7 +144,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveATwoLinkChainWithStandAloneResources() throws Exception {
|
||||
public void testShouldResolveATwoLinkChainWithStandAloneResources() {
|
||||
|
||||
// setup
|
||||
myStorageSettings.setIndexOnContainedResources(true);
|
||||
|
@ -175,7 +170,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
String url = "/Observation?subject.name=Smith";
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
|
||||
// validate
|
||||
assertEquals(1L, oids.size());
|
||||
|
@ -183,7 +178,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveATwoLinkChainWithStandAloneResources_CommonReference() throws Exception {
|
||||
public void testShouldResolveATwoLinkChainWithStandAloneResources_CommonReference() {
|
||||
|
||||
// setup
|
||||
myStorageSettings.setIndexOnContainedResources(true);
|
||||
|
@ -218,7 +213,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
|
||||
// execute
|
||||
myCaptureQueriesListener.clear();
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
|
||||
// validate
|
||||
|
@ -227,7 +222,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveATwoLinkChainWithStandAloneResources_CompoundReference() throws Exception {
|
||||
public void testShouldResolveATwoLinkChainWithStandAloneResources_CompoundReference() {
|
||||
|
||||
// setup
|
||||
myStorageSettings.setIndexOnContainedResources(true);
|
||||
|
@ -265,7 +260,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
|
||||
// execute
|
||||
myCaptureQueriesListener.clear();
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url, myAuditEventDao);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
|
||||
// validate
|
||||
|
@ -274,7 +269,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveATwoLinkChainWithContainedResources_CompoundReference() throws Exception {
|
||||
public void testShouldResolveATwoLinkChainWithContainedResources_CompoundReference() {
|
||||
|
||||
// setup
|
||||
myStorageSettings.setIndexOnContainedResources(true);
|
||||
|
@ -313,7 +308,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
|
||||
// execute
|
||||
myCaptureQueriesListener.clear();
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url, myAuditEventDao);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
|
||||
// validate
|
||||
|
@ -322,7 +317,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveATwoLinkChainWithAContainedResource() throws Exception {
|
||||
public void testShouldResolveATwoLinkChainWithAContainedResource() {
|
||||
// setup
|
||||
myStorageSettings.setIndexOnContainedResources(true);
|
||||
|
||||
|
@ -355,7 +350,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
String url = "/Observation?subject.name=Smith";
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
|
||||
// validate
|
||||
assertEquals(1L, oids.size());
|
||||
|
@ -363,7 +358,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testShouldNotResolveATwoLinkChainWithAContainedResourceWhenContainedResourceIndexingIsTurnedOff() throws Exception {
|
||||
public void testShouldNotResolveATwoLinkChainWithAContainedResourceWhenContainedResourceIndexingIsTurnedOff() {
|
||||
// setup
|
||||
IIdType oid1;
|
||||
|
||||
|
@ -378,24 +373,24 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
obs.setValue(new StringType("Test"));
|
||||
obs.getSubject().setReference("#pat");
|
||||
|
||||
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
|
||||
myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
// Create a dummy record so that an unconstrained query doesn't pass the test due to returning the only record
|
||||
// Create a dummy record so that an unconstrained query doesn't pass the test due to returning the only record
|
||||
myObservationDao.create(new Observation(), mySrd);
|
||||
}
|
||||
|
||||
String url = "/Observation?subject.name=Smith";
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
|
||||
// validate
|
||||
assertEquals(0L, oids.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void testShouldResolveATwoLinkChainWithQualifiersWithAContainedResource() throws Exception {
|
||||
@Disabled("Known limitation")
|
||||
public void testShouldResolveATwoLinkChainWithQualifiersWithAContainedResource() {
|
||||
// TODO: This test fails because of a known limitation in qualified searches over contained resources.
|
||||
// Type information for intermediate resources in the chain is not being retained in the indexes.
|
||||
// setup
|
||||
|
@ -435,7 +430,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
String url = "/Observation?subject:Patient.name=Smith";
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
|
||||
// validate
|
||||
assertEquals(1L, oids.size());
|
||||
|
@ -443,7 +438,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveATwoLinkChainToAContainedReference() throws Exception {
|
||||
public void testShouldResolveATwoLinkChainToAContainedReference() {
|
||||
// Adding support for this case in SMILE-3151
|
||||
|
||||
// setup
|
||||
|
@ -477,7 +472,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
String url = "/Observation?subject.organization=" + orgId.getValueAsString();
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
|
||||
// validate
|
||||
assertEquals(1L, oids.size());
|
||||
|
@ -485,7 +480,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveATwoLinkChainToAStandAloneReference() throws Exception {
|
||||
public void testShouldResolveATwoLinkChainToAStandAloneReference() {
|
||||
// Adding support for this case in SMILE-3151
|
||||
|
||||
// setup
|
||||
|
@ -519,7 +514,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
String url = "/Observation?subject.organization=" + orgId.getValueAsString();
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
|
||||
// validate
|
||||
assertEquals(1L, oids.size());
|
||||
|
@ -527,7 +522,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveATwoLinkChainWithAContainedResource_CommonReference() throws Exception {
|
||||
public void testShouldResolveATwoLinkChainWithAContainedResource_CommonReference() {
|
||||
|
||||
// setup
|
||||
myStorageSettings.setIndexOnContainedResources(true);
|
||||
|
@ -558,7 +553,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
|
||||
// execute
|
||||
myCaptureQueriesListener.clear();
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
|
||||
// validate
|
||||
|
@ -567,7 +562,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveAThreeLinkChainWhereAllResourcesStandAloneWithoutContainedResourceIndexing() throws Exception {
|
||||
public void testShouldResolveAThreeLinkChainWhereAllResourcesStandAloneWithoutContainedResourceIndexing() {
|
||||
|
||||
// setup
|
||||
IIdType oid1;
|
||||
|
@ -611,7 +606,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
String url = "/Observation?subject.organization.name=HealthCo";
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
|
||||
// validate
|
||||
assertEquals(1L, oids.size());
|
||||
|
@ -619,7 +614,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveAThreeLinkChainWhereAllResourcesStandAlone() throws Exception {
|
||||
public void testShouldResolveAThreeLinkChainWhereAllResourcesStandAlone() {
|
||||
|
||||
// setup
|
||||
myStorageSettings.setIndexOnContainedResources(true);
|
||||
|
@ -665,7 +660,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
String url = "/Observation?subject.organization.name=HealthCo";
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
|
||||
// validate
|
||||
assertEquals(1L, oids.size());
|
||||
|
@ -673,7 +668,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveAThreeLinkChainWithAContainedResourceAtTheEndOfTheChain() throws Exception {
|
||||
public void testShouldResolveAThreeLinkChainWithAContainedResourceAtTheEndOfTheChain() {
|
||||
// This is the case that is most relevant to SMILE-2899
|
||||
|
||||
// setup
|
||||
|
@ -706,7 +701,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
String url = "/Observation?subject.organization.name=HealthCo";
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
|
||||
// validate
|
||||
assertEquals(1L, oids.size());
|
||||
|
@ -714,7 +709,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveAThreeLinkChainWithAContainedResourceAtTheEndOfTheChain_CommonReference() throws Exception {
|
||||
public void testShouldResolveAThreeLinkChainWithAContainedResourceAtTheEndOfTheChain_CommonReference() {
|
||||
|
||||
// setup
|
||||
myStorageSettings.setIndexOnContainedResources(true);
|
||||
|
@ -750,7 +745,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
|
||||
// execute
|
||||
myCaptureQueriesListener.clear();
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
|
||||
// validate
|
||||
|
@ -759,7 +754,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveAThreeLinkChainWithAContainedResourceAtTheBeginningOfTheChain() throws Exception {
|
||||
public void testShouldResolveAThreeLinkChainWithAContainedResourceAtTheBeginningOfTheChain() {
|
||||
// Adding support for this case in SMILE-3151
|
||||
|
||||
// setup
|
||||
|
@ -792,7 +787,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
String url = "/Observation?subject.organization.name=HealthCo";
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
|
||||
// validate
|
||||
assertEquals(1L, oids.size());
|
||||
|
@ -800,7 +795,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveAThreeLinkChainWithAContainedResourceAtTheBeginningOfTheChain_CommonReference() throws Exception {
|
||||
public void testShouldResolveAThreeLinkChainWithAContainedResourceAtTheBeginningOfTheChain_CommonReference() {
|
||||
|
||||
// setup
|
||||
myStorageSettings.setIndexOnContainedResources(true);
|
||||
|
@ -835,7 +830,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
|
||||
// execute
|
||||
myCaptureQueriesListener.clear();
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
|
||||
// validate
|
||||
|
@ -844,7 +839,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testShouldNotResolveAThreeLinkChainWithAllContainedResourcesWhenRecursiveContainedIndexesAreDisabled() throws Exception {
|
||||
public void testShouldNotResolveAThreeLinkChainWithAllContainedResourcesWhenRecursiveContainedIndexesAreDisabled() {
|
||||
|
||||
// setup
|
||||
myStorageSettings.setIndexOnContainedResources(true);
|
||||
|
@ -867,23 +862,23 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
obs.getCode().setText("Observation 1");
|
||||
obs.getSubject().setReference("#pat");
|
||||
|
||||
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
|
||||
myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
// Create a dummy record so that an unconstrained query doesn't pass the test due to returning the only record
|
||||
// Create a dummy record so that an unconstrained query doesn't pass the test due to returning the only record
|
||||
myObservationDao.create(new Observation(), mySrd);
|
||||
}
|
||||
|
||||
String url = "/Observation?subject.organization.name=HealthCo";
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
|
||||
// validate
|
||||
assertEquals(0L, oids.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveAThreeLinkChainWithAllContainedResources() throws Exception {
|
||||
public void testShouldResolveAThreeLinkChainWithAllContainedResources() {
|
||||
|
||||
// setup
|
||||
myStorageSettings.setIndexOnContainedResources(true);
|
||||
|
@ -918,7 +913,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
|
||||
// execute
|
||||
myCaptureQueriesListener.clear();
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
|
||||
// validate
|
||||
|
@ -927,7 +922,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveAThreeLinkChainWithQualifiersWhereAllResourcesStandAlone() throws Exception {
|
||||
public void testShouldResolveAThreeLinkChainWithQualifiersWhereAllResourcesStandAlone() {
|
||||
|
||||
// setup
|
||||
myStorageSettings.setIndexOnContainedResources(true);
|
||||
|
@ -969,7 +964,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
String url = "/Observation?subject:Patient.organization:Organization.name=HealthCo";
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
|
||||
// validate
|
||||
assertEquals(1L, oids.size());
|
||||
|
@ -977,7 +972,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveAThreeLinkChainWithQualifiersWithAContainedResourceAtTheEndOfTheChain() throws Exception {
|
||||
public void testShouldResolveAThreeLinkChainWithQualifiersWithAContainedResourceAtTheEndOfTheChain() {
|
||||
// This is the case that is most relevant to SMILE-2899
|
||||
|
||||
// setup
|
||||
|
@ -1025,7 +1020,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
String url = "/Observation?subject:Patient.organization:Organization.name=HealthCo";
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
|
||||
// validate
|
||||
assertEquals(1L, oids.size());
|
||||
|
@ -1033,7 +1028,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveAThreeLinkChainWithQualifiersWithAContainedResourceAtTheBeginning() throws Exception {
|
||||
public void testShouldResolveAThreeLinkChainWithQualifiersWithAContainedResourceAtTheBeginning() {
|
||||
// Adding support for this case in SMILE-3151
|
||||
|
||||
// setup
|
||||
|
@ -1078,7 +1073,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
|
||||
// execute
|
||||
myCaptureQueriesListener.clear();
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
|
||||
// validate
|
||||
|
@ -1087,8 +1082,8 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void testShouldResolveAThreeLinkChainWithQualifiersWithAContainedResourceAtTheBeginning_NotDistinctSourcePaths() throws Exception {
|
||||
@Disabled("Known limitation")
|
||||
public void testShouldResolveAThreeLinkChainWithQualifiersWithAContainedResourceAtTheBeginning_NotDistinctSourcePaths() {
|
||||
// TODO: This test fails because of a known limitation in qualified searches over contained resources.
|
||||
// Type information for intermediate resources in the chain is not being retained in the indexes.
|
||||
|
||||
|
@ -1136,7 +1131,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
|
||||
// execute
|
||||
myCaptureQueriesListener.clear();
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
|
||||
// validate
|
||||
|
@ -1145,8 +1140,8 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void testShouldResolveAThreeLinkChainWithQualifiersWithAllContainedResources() throws Exception {
|
||||
@Disabled("Known limitation")
|
||||
public void testShouldResolveAThreeLinkChainWithQualifiersWithAllContainedResources() {
|
||||
// TODO: This test fails because of a known limitation in qualified searches over contained resources.
|
||||
// Type information for intermediate resources in the chain is not being retained in the indexes.
|
||||
|
||||
|
@ -1198,7 +1193,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
|
||||
// execute
|
||||
myCaptureQueriesListener.clear();
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
|
||||
// validate
|
||||
|
@ -1207,7 +1202,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveAFourLinkChainWhereAllResourcesStandAlone() throws Exception {
|
||||
public void testShouldResolveAFourLinkChainWhereAllResourcesStandAlone() {
|
||||
|
||||
// setup
|
||||
myStorageSettings.setIndexOnContainedResources(true);
|
||||
|
@ -1244,7 +1239,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
String url = "/Observation?subject.organization.partof.name=HealthCo";
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
|
||||
// validate
|
||||
assertEquals(1L, oids.size());
|
||||
|
@ -1252,7 +1247,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveAFourLinkChainWhereTheLastReferenceIsContained() throws Exception {
|
||||
public void testShouldResolveAFourLinkChainWhereTheLastReferenceIsContained() {
|
||||
|
||||
// setup
|
||||
myStorageSettings.setIndexOnContainedResources(true);
|
||||
|
@ -1289,7 +1284,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
String url = "/Observation?subject.organization.partof.name=HealthCo";
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
|
||||
// validate
|
||||
assertEquals(1L, oids.size());
|
||||
|
@ -1297,7 +1292,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveAFourLinkChainWhereTheLastTwoReferencesAreContained() throws Exception {
|
||||
public void testShouldResolveAFourLinkChainWhereTheLastTwoReferencesAreContained() {
|
||||
|
||||
// setup
|
||||
myStorageSettings.setIndexOnContainedResources(true);
|
||||
|
@ -1334,7 +1329,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
String url = "/Observation?subject.organization.partof.name=HealthCo";
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
|
||||
// validate
|
||||
assertEquals(1L, oids.size());
|
||||
|
@ -1342,7 +1337,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveAFourLinkChainWithAContainedResourceInTheMiddle() throws Exception {
|
||||
public void testShouldResolveAFourLinkChainWithAContainedResourceInTheMiddle() {
|
||||
|
||||
// setup
|
||||
myStorageSettings.setIndexOnContainedResources(true);
|
||||
|
@ -1384,7 +1379,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
|
||||
// execute
|
||||
myCaptureQueriesListener.clear();
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
|
||||
// validate
|
||||
|
@ -1393,7 +1388,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveAFourLinkChainWhereTheFirstTwoReferencesAreContained() throws Exception {
|
||||
public void testShouldResolveAFourLinkChainWhereTheFirstTwoReferencesAreContained() {
|
||||
|
||||
// setup
|
||||
myStorageSettings.setIndexOnContainedResources(true);
|
||||
|
@ -1431,7 +1426,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
String url = "/Observation?subject.organization.partof.name=HealthCo";
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
|
||||
// validate
|
||||
assertEquals(1L, oids.size());
|
||||
|
@ -1439,7 +1434,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveAFourLinkChainWhereTheFirstReferenceAndTheLastReferenceAreContained() throws Exception {
|
||||
public void testShouldResolveAFourLinkChainWhereTheFirstReferenceAndTheLastReferenceAreContained() {
|
||||
|
||||
// setup
|
||||
myStorageSettings.setIndexOnContainedResources(true);
|
||||
|
@ -1476,7 +1471,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
String url = "/Observation?subject.organization.partof.name=HealthCo";
|
||||
|
||||
// execute
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
|
||||
// validate
|
||||
assertEquals(1L, oids.size());
|
||||
|
@ -1484,7 +1479,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testShouldResolveAFourLinkChainWhereAllReferencesAreContained() throws Exception {
|
||||
public void testShouldResolveAFourLinkChainWhereAllReferencesAreContained() {
|
||||
|
||||
// setup
|
||||
myStorageSettings.setIndexOnContainedResources(true);
|
||||
|
@ -1524,7 +1519,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
|
||||
// execute
|
||||
myCaptureQueriesListener.clear();
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
List<String> oids = myTestDaoSearch.searchForIds(url);
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
|
||||
// validate
|
||||
|
@ -1533,7 +1528,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testShouldThrowAnExceptionForAFiveLinkChain() throws Exception {
|
||||
public void testShouldThrowAnExceptionForAFiveLinkChain() {
|
||||
|
||||
// setup
|
||||
myStorageSettings.setIndexOnContainedResources(true);
|
||||
|
@ -1543,7 +1538,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
|
||||
try {
|
||||
// execute
|
||||
searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
myTestDaoSearch.searchForIds(url);
|
||||
fail("Expected an exception to be thrown");
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals(Msg.code(2007) + "The search chain subject.organization.partof.partof.name is too long. Only chains up to three references are supported.", e.getMessage());
|
||||
|
@ -1551,7 +1546,7 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testQueryStructure() throws Exception {
|
||||
public void testQueryStructure() {
|
||||
|
||||
// With indexing of contained resources turned off, we should not see UNION clauses in the query
|
||||
countUnionStatementsInGeneratedQuery("/Observation?patient.name=Smith", 0);
|
||||
|
@ -1584,16 +1579,19 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
@ParameterizedTest
|
||||
@CsvSource({
|
||||
// search url expected count
|
||||
"/Bundle?composition.patient.identifier=system|value-1&composition.patient.birthdate=1980-01-01, 1", // correct identifier, correct birthdate
|
||||
"/Bundle?composition.patient.birthdate=1980-01-01&composition.patient.identifier=system|value-1, 1", // correct birthdate, correct identifier
|
||||
"/Bundle?composition.patient.identifier=system|value-1&composition.patient.birthdate=2000-01-01, 0", // correct identifier, incorrect birthdate
|
||||
"/Bundle?composition.patient.birthdate=2000-01-01&composition.patient.identifier=system|value-1, 0", // incorrect birthdate, correct identifier
|
||||
"/Bundle?composition.patient.identifier=system|value-2&composition.patient.birthdate=1980-01-01, 0", // incorrect identifier, correct birthdate
|
||||
"/Bundle?composition.patient.birthdate=1980-01-01&composition.patient.identifier=system|value-2, 0", // correct birthdate, incorrect identifier
|
||||
"/Bundle?composition.patient.identifier=system|value-2&composition.patient.birthdate=2000-01-01, 0", // incorrect identifier, incorrect birthdate
|
||||
"/Bundle?composition.patient.birthdate=2000-01-01&composition.patient.identifier=system|value-2, 0", // incorrect birthdate, incorrect identifier
|
||||
"/Bundle?composition.patient.identifier=system|value-1&composition.patient.birthdate=1980-01-01, 1, correct identifier correct birthdate",
|
||||
"/Bundle?composition.patient.birthdate=1980-01-01&composition.patient.identifier=system|value-1, 1, correct birthdate correct identifier",
|
||||
"/Bundle?composition.patient.identifier=system|value-1&composition.patient.birthdate=2000-01-01, 0, correct identifier incorrect birthdate",
|
||||
"/Bundle?composition.patient.birthdate=2000-01-01&composition.patient.identifier=system|value-1, 0, incorrect birthdate correct identifier",
|
||||
"/Bundle?composition.patient.identifier=system|value-2&composition.patient.birthdate=1980-01-01, 0, incorrect identifier correct birthdate",
|
||||
"/Bundle?composition.patient.birthdate=1980-01-01&composition.patient.identifier=system|value-2, 0, correct birthdate incorrect identifier",
|
||||
"/Bundle?composition.patient.identifier=system|value-2&composition.patient.birthdate=2000-01-01, 0, incorrect identifier incorrect birthdate",
|
||||
"/Bundle?composition.patient.birthdate=2000-01-01&composition.patient.identifier=system|value-2, 0, incorrect birthdate incorrect identifier",
|
||||
// try sort by composition sp
|
||||
"/Bundle?composition.patient.identifier=system|value-1&_sort=composition.patient.birthdate, 1, correct identifier sort by birthdate",
|
||||
|
||||
})
|
||||
public void testMultipleChainedBundleCompositionSearchParameters(String theSearchUrl, int theExpectedCount) {
|
||||
public void testMultipleChainedBundleCompositionSearchParameters(String theSearchUrl, int theExpectedCount, String theMessage) {
|
||||
createSearchParameter("bundle-composition-patient-birthdate",
|
||||
"composition.patient.birthdate",
|
||||
"Bundle",
|
||||
|
@ -1610,8 +1608,8 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
|
||||
createDocumentBundleWithPatientDetails("1980-01-01", "system", "value-1");
|
||||
|
||||
SearchParameterMap params = myMatchUrlService.getResourceSearch(theSearchUrl).getSearchParameterMap().setLoadSynchronous(true);
|
||||
assertSearchReturns(myBundleDao, params, theExpectedCount);
|
||||
List<String> ids = myTestDaoSearch.searchForIds(theSearchUrl);
|
||||
assertThat(theMessage, ids, hasSize(theExpectedCount));
|
||||
}
|
||||
|
||||
private void createSearchParameter(String theId, String theCode, String theBase, String theExpression, Enumerations.SearchParamType theType) {
|
||||
|
@ -1651,9 +1649,9 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
assertEquals(theExpectedCount, theDao.search(theSearchParams, mySrd).size());
|
||||
}
|
||||
|
||||
private void countUnionStatementsInGeneratedQuery(String theUrl, int theExpectedNumberOfUnions) throws IOException {
|
||||
private void countUnionStatementsInGeneratedQuery(String theUrl, int theExpectedNumberOfUnions) {
|
||||
myCaptureQueriesListener.clear();
|
||||
searchAndReturnUnqualifiedVersionlessIdValues(theUrl);
|
||||
myTestDaoSearch.searchForIds(theUrl);
|
||||
List<SqlQuery> selectQueries = myCaptureQueriesListener.getSelectQueriesForCurrentThread();
|
||||
assertEquals(1, selectQueries.size());
|
||||
|
||||
|
@ -1661,18 +1659,4 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
assertEquals(theExpectedNumberOfUnions, countMatches(sqlQuery, "union"), sqlQuery);
|
||||
}
|
||||
|
||||
private List<String> searchAndReturnUnqualifiedVersionlessIdValues(String theUrl) throws IOException {
|
||||
return searchAndReturnUnqualifiedVersionlessIdValues(theUrl, myObservationDao);
|
||||
}
|
||||
|
||||
private List<String> searchAndReturnUnqualifiedVersionlessIdValues(String theUrl, IFhirResourceDao<? extends DomainResource> theObservationDao) {
|
||||
List<String> ids = new ArrayList<>();
|
||||
|
||||
ResourceSearch search = myMatchUrlService.getResourceSearch(theUrl);
|
||||
SearchParameterMap map = search.getSearchParameterMap();
|
||||
map.setLoadSynchronous(true);
|
||||
IBundleProvider result = theObservationDao.search(map);
|
||||
return result.getAllResourceIds();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.dao.r4;
|
|||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.SortOrderEnum;
|
||||
import ca.uhn.fhir.rest.api.SortSpec;
|
||||
|
@ -10,13 +11,18 @@ import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
|||
import ca.uhn.fhir.rest.param.DateParam;
|
||||
import ca.uhn.fhir.rest.param.DateRangeParam;
|
||||
import ca.uhn.fhir.rest.param.ParamPrefixEnum;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.Composition;
|
||||
import org.hl7.fhir.r4.model.DateTimeType;
|
||||
import org.hl7.fhir.r4.model.Enumerations;
|
||||
import org.hl7.fhir.r4.model.Enumerations.AdministrativeGender;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.SearchParameter;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
|
@ -28,7 +34,7 @@ import static org.hamcrest.Matchers.contains;
|
|||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
@SuppressWarnings({"unchecked", "deprecation"})
|
||||
@SuppressWarnings({"deprecation"})
|
||||
public class FhirResourceDaoR4SortTest extends BaseJpaR4Test {
|
||||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoR4SortTest.class);
|
||||
|
@ -51,7 +57,7 @@ public class FhirResourceDaoR4SortTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testSortOnId() throws Exception {
|
||||
public void testSortOnId() {
|
||||
// Numeric ID
|
||||
Patient p01 = new Patient();
|
||||
p01.setActive(true);
|
||||
|
@ -147,7 +153,7 @@ public class FhirResourceDaoR4SortTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testSortOnSearchParameterWhereAllResourcesHaveAValue() throws Exception {
|
||||
public void testSortOnSearchParameterWhereAllResourcesHaveAValue() {
|
||||
Patient pBA = new Patient();
|
||||
pBA.setId("BA");
|
||||
pBA.setActive(true);
|
||||
|
@ -348,20 +354,63 @@ public class FhirResourceDaoR4SortTest extends BaseJpaR4Test {
|
|||
SearchParameterMap map;
|
||||
List<String> ids;
|
||||
|
||||
runInTransaction(() -> {
|
||||
ourLog.info("Dates:\n * {}", myResourceIndexedSearchParamDateDao.findAll().stream().map(t -> t.toString()).collect(Collectors.joining("\n * ")));
|
||||
});
|
||||
|
||||
map = new SearchParameterMap();
|
||||
map.setLoadSynchronous(true);
|
||||
map.add(Observation.SP_SUBJECT, new ReferenceParam("Patient", "identifier", "PCA|PCA"));
|
||||
map.setSort(new SortSpec("date").setOrder(SortOrderEnum.DESC));
|
||||
myCaptureQueriesListener.clear();
|
||||
ids = toUnqualifiedVersionlessIdValues(myObservationDao.search(map));
|
||||
ourLog.info("IDS: {}", ids);
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertThat(ids.toString(), ids, contains("Observation/OBS2", "Observation/OBS1"));
|
||||
runInTransaction(() -> ourLog.info("Dates:\n * {}", myResourceIndexedSearchParamDateDao.findAll().stream().map(t -> t.toString()).collect(Collectors.joining("\n * "))));
|
||||
|
||||
myTestDaoSearch.assertSearchFinds(
|
||||
"chained search",
|
||||
"Observation?subject.identifier=PCA|PCA&_sort=-date",
|
||||
"OBS2", "OBS1"
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a composition SP for document Bundles, and sort by it.
|
||||
* The chain is referencing the Bundle contents.
|
||||
* @see https://smilecdr.com/docs/fhir_storage_relational/chained_searches_and_sorts.html#document-and-message-search-parameters
|
||||
*/
|
||||
@Test
|
||||
void testSortByCompositionSP() {
|
||||
// given
|
||||
SearchParameter searchParameter = new SearchParameter();
|
||||
searchParameter.setId("bundle-composition-patient-birthdate");
|
||||
searchParameter.setCode("composition.patient.birthdate");
|
||||
searchParameter.setName("composition.patient.birthdate");
|
||||
searchParameter.setUrl("http://example.org/SearchParameter/bundle-composition-patient-birthdate");
|
||||
searchParameter.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
searchParameter.addBase("Bundle");
|
||||
searchParameter.setType(Enumerations.SearchParamType.DATE);
|
||||
searchParameter.setExpression("Bundle.entry.resource.ofType(Patient).birthDate");
|
||||
doUpdateResource(searchParameter);
|
||||
|
||||
mySearchParamRegistry.forceRefresh();
|
||||
|
||||
Patient pat1 = buildResource("Patient", withId("pat1"), withBirthdate("2001-03-17"));
|
||||
doUpdateResource(pat1);
|
||||
Bundle pat1Bundle = buildCompositionBundle(pat1);
|
||||
String pat1BundleId = doCreateResource(pat1Bundle).getIdPart();
|
||||
|
||||
Patient pat2 = buildResource("Patient", withId("pat2"), withBirthdate("2000-01-01"));
|
||||
doUpdateResource(pat2);
|
||||
Bundle pat2Bundle = buildCompositionBundle(pat2);
|
||||
String pat2BundleId = doCreateResource(pat2Bundle).getIdPart();
|
||||
|
||||
// then
|
||||
myTestDaoSearch.assertSearchFinds("sort by contained date",
|
||||
"Bundle?_sort=composition.patient.birthdate", List.of(pat2BundleId, pat1BundleId));
|
||||
myTestDaoSearch.assertSearchFinds("reverse sort by contained date",
|
||||
"Bundle?_sort=-composition.patient.birthdate", List.of(pat1BundleId, pat2BundleId));
|
||||
}
|
||||
|
||||
private static Bundle buildCompositionBundle(Patient pat11) {
|
||||
Bundle bundle = new Bundle();
|
||||
bundle.setType(Bundle.BundleType.DOCUMENT);
|
||||
Composition composition = new Composition();
|
||||
composition.setType(new CodeableConcept().addCoding(new Coding().setCode("code").setSystem("http://example.org")));
|
||||
bundle.addEntry().setResource(composition);
|
||||
composition.getSubject().setReference(pat11.getIdElement().getValue());
|
||||
bundle.addEntry().setResource(pat11);
|
||||
return bundle;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@ import org.hl7.fhir.r4.model.BooleanType;
|
|||
import org.hl7.fhir.r4.model.CodeType;
|
||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.DateTimeType;
|
||||
import org.hl7.fhir.r4.model.Extension;
|
||||
import org.hl7.fhir.r4.model.HumanName;
|
||||
import org.hl7.fhir.r4.model.Identifier;
|
||||
|
@ -37,6 +38,7 @@ import static org.hamcrest.MatcherAssert.assertThat;
|
|||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
|
@ -531,6 +533,59 @@ public class FhirPatchApplyR4Test {
|
|||
assertThat(patient.getExtension().get(0).getValueAsPrimitive().getValueAsString(), is(equalTo("foo")));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAddExtensionWithExtension() {
|
||||
final String extensionUrl = "http://foo/fhir/extension/foo";
|
||||
final String innerExtensionUrl = "http://foo/fhir/extension/innerExtension";
|
||||
final String innerExtensionValue = "2021-07-24T13:23:30-04:00";
|
||||
|
||||
FhirPatch svc = new FhirPatch(ourCtx);
|
||||
Patient patient = new Patient();
|
||||
|
||||
Parameters patch = new Parameters();
|
||||
Parameters.ParametersParameterComponent addOperation = createPatchAddOperation("Patient", "extension", null);
|
||||
addOperation
|
||||
.addPart()
|
||||
.setName("value")
|
||||
.addPart(
|
||||
new Parameters.ParametersParameterComponent()
|
||||
.setName("url")
|
||||
.setValue(new UriType(extensionUrl))
|
||||
)
|
||||
.addPart(
|
||||
new Parameters.ParametersParameterComponent()
|
||||
.setName("extension")
|
||||
.addPart(
|
||||
new Parameters.ParametersParameterComponent()
|
||||
.setName("url")
|
||||
.setValue(new UriType(innerExtensionUrl))
|
||||
)
|
||||
.addPart(
|
||||
new Parameters.ParametersParameterComponent()
|
||||
.setName("value")
|
||||
.setValue(new DateTimeType(innerExtensionValue))
|
||||
)
|
||||
);
|
||||
|
||||
patch.addParameter(addOperation);
|
||||
|
||||
ourLog.info("Patch:\n{}", ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(patch));
|
||||
|
||||
svc.apply(patient, patch);
|
||||
ourLog.debug("Outcome:\n{}", ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(patient));
|
||||
|
||||
//Then: it adds the new extension correctly.
|
||||
assertThat(patient.getExtension(), hasSize(1));
|
||||
Extension extension = patient.getExtension().get(0);
|
||||
assertThat(extension.getUrl(), is(equalTo(extensionUrl)));
|
||||
Extension innerExtension = extension.getExtensionFirstRep();
|
||||
|
||||
assertThat(innerExtension, notNullValue());
|
||||
assertThat(innerExtension.getUrl(), is(equalTo(innerExtensionUrl)));
|
||||
assertThat(innerExtension.getValue().primitiveValue(), is(equalTo(innerExtensionValue)));
|
||||
|
||||
}
|
||||
|
||||
private Parameters.ParametersParameterComponent createPatchAddOperation(String thePath, String theName, Type theValue) {
|
||||
return createPatchOperation("add", thePath, theName, theValue, null);
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@ import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl;
|
|||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.term.ZipCollectionBuilder;
|
||||
import ca.uhn.fhir.jpa.test.config.TestR4Config;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.jpa.util.QueryParameterUtils;
|
||||
import ca.uhn.fhir.model.api.StorageResponseCodeEnum;
|
||||
import ca.uhn.fhir.model.api.TemporalPrecisionEnum;
|
||||
|
@ -2404,6 +2405,100 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
|||
assertThat(idValues, hasSize(0));
|
||||
}
|
||||
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource({
|
||||
"false,PatientWithServerGeneratedId1",
|
||||
"true,PatientWithServerGeneratedId2"
|
||||
})
|
||||
public void testHistoryOnInstanceWithServerGeneratedId(boolean theInvalidateCacheBeforeHistory,
|
||||
String thePatientFamilyName) {
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.addName().setFamily(thePatientFamilyName);
|
||||
IIdType id = myClient.create().resource(patient).execute().getId().toVersionless();
|
||||
ourLog.info("Res ID: {}", id);
|
||||
|
||||
final String expectedFullUrl = myServerBase + "/Patient/" + id.getIdPart();
|
||||
|
||||
if (theInvalidateCacheBeforeHistory) {
|
||||
// the reason for this test parameterization to invalidate the cache is that
|
||||
// when a resource is created/updated, its id mapping is cached for 1 minute so
|
||||
// retrieving the history right after creating the resource will use the cached value.
|
||||
// By invalidating the cache here and getting the history bundle again,
|
||||
// we test the scenario where the id mapping needs to be read from the db,
|
||||
// hence testing a different code path.
|
||||
myMemoryCacheService.invalidateCaches(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID);
|
||||
}
|
||||
|
||||
Bundle history = myClient.history().onInstance(id.getValue()).andReturnBundle(Bundle.class).execute();
|
||||
assertEquals(1, history.getEntry().size());
|
||||
BundleEntryComponent historyEntry0 = history.getEntry().get(0);
|
||||
// validate entry.fullUrl
|
||||
assertEquals(expectedFullUrl, historyEntry0.getFullUrl());
|
||||
//validate entry.request
|
||||
assertEquals(HTTPVerb.POST, historyEntry0.getRequest().getMethod());
|
||||
assertEquals("Patient/" + id.getIdPart() + "/_history/1", historyEntry0.getRequest().getUrl());
|
||||
//validate entry.response
|
||||
assertEquals("201 Created", historyEntry0.getResponse().getStatus());
|
||||
assertNotNull(historyEntry0.getResponse().getEtag());
|
||||
|
||||
//validate patient resource details in the entry
|
||||
Patient historyEntry0Patient = (Patient) historyEntry0.getResource();
|
||||
assertEquals(id.withVersion("1").getValue(), historyEntry0Patient.getId());
|
||||
assertEquals(1, historyEntry0Patient.getName().size());
|
||||
assertEquals(thePatientFamilyName, historyEntry0Patient.getName().get(0).getFamily());
|
||||
|
||||
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource({
|
||||
"false,PatientWithForcedId1",
|
||||
"true,PatientWithForcedId2"
|
||||
})
|
||||
public void testHistoryOnInstanceWithForcedId(boolean theInvalidateCacheBeforeHistory,
|
||||
String thePatientFamilyName) {
|
||||
|
||||
final String patientForcedId = thePatientFamilyName + "-ForcedId";
|
||||
Patient patient = new Patient();
|
||||
patient.addName().setFamily(thePatientFamilyName);
|
||||
patient.setId(patientForcedId);
|
||||
IIdType id = myClient.update().resource(patient).execute().getId().toVersionless();
|
||||
ourLog.info("Res ID: {}", id);
|
||||
assertEquals(patientForcedId, id.getIdPart());
|
||||
|
||||
final String expectedFullUrl = myServerBase + "/Patient/" + id.getIdPart();
|
||||
|
||||
if (theInvalidateCacheBeforeHistory) {
|
||||
// the reason for this test parameterization to invalidate the cache is that
|
||||
// when a resource is created/updated, its id mapping is cached for 1 minute so
|
||||
// retrieving the history right after creating the resource will use the cached value.
|
||||
// By invalidating the cache here and getting the history bundle again,
|
||||
// we test the scenario where the id mapping needs to be read from the db,
|
||||
// hence testing a different code path.
|
||||
myMemoryCacheService.invalidateCaches(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID);
|
||||
}
|
||||
|
||||
Bundle history = myClient.history().onInstance(id.getValue()).andReturnBundle(Bundle.class).execute();
|
||||
assertEquals(1, history.getEntry().size());
|
||||
BundleEntryComponent historyEntry0 = history.getEntry().get(0);
|
||||
// validate entry.fullUrl
|
||||
assertEquals(expectedFullUrl, historyEntry0.getFullUrl());
|
||||
//validate entry.request
|
||||
assertEquals(HTTPVerb.POST, historyEntry0.getRequest().getMethod());
|
||||
assertEquals("Patient/" + id.getIdPart() + "/_history/1", historyEntry0.getRequest().getUrl());
|
||||
//validate entry.response
|
||||
assertEquals("201 Created", historyEntry0.getResponse().getStatus());
|
||||
assertNotNull(historyEntry0.getResponse().getEtag());
|
||||
|
||||
//validate patient resource details in the entry
|
||||
Patient historyEntry0Patient = (Patient) historyEntry0.getResource();
|
||||
assertEquals(id.withVersion("1").getValue(), historyEntry0Patient.getId());
|
||||
assertEquals(1, historyEntry0Patient.getName().size());
|
||||
assertEquals(thePatientFamilyName, historyEntry0Patient.getName().get(0).getFamily());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHistoryWithDeletedResource() {
|
||||
String methodName = "testHistoryWithDeletedResource";
|
||||
|
|
|
@ -7,6 +7,7 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
|||
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
|
||||
import ca.uhn.fhir.util.ExtensionConstants;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.http.Header;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.hl7.fhir.r4.model.CapabilityStatement;
|
||||
|
@ -15,11 +16,15 @@ import org.hl7.fhir.r4.model.CapabilityStatement.CapabilityStatementRestResource
|
|||
import org.hl7.fhir.r4.model.Extension;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.ValueSource;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
@ -141,4 +146,26 @@ public class ServerR4Test extends BaseResourceProviderR4Test {
|
|||
|
||||
}
|
||||
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(strings = {"x-request-id", "X-Request-Id", "X-Request-ID", "X-REQUEST-ID"})
|
||||
public void testXRequestIdHeaderRetainsCase(String theXRequestIdHeaderKey) throws Exception {
|
||||
HttpGet get = new HttpGet(myServerBase + "/Patient");
|
||||
String xRequestIdHeaderValue = "abc123";
|
||||
get.addHeader(theXRequestIdHeaderKey, xRequestIdHeaderValue);
|
||||
|
||||
try (CloseableHttpResponse response = ourHttpClient.execute(get)) {
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
|
||||
String responseContent = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
ourLog.debug(responseContent);
|
||||
|
||||
List<Header> xRequestIdHeaders = Arrays.stream(response.getAllHeaders())
|
||||
.filter(header -> theXRequestIdHeaderKey.equals(header.getName()))
|
||||
.toList();
|
||||
|
||||
assertEquals(1, xRequestIdHeaders.size());
|
||||
assertEquals(xRequestIdHeaderValue, xRequestIdHeaders.get(0).getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
package ca.uhn.fhir.batch2.jobs.models;
|
||||
|
||||
import ca.uhn.fhir.util.JsonUtil;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
|
||||
class BatchResourceIdTest {
|
||||
|
||||
@Test
|
||||
public void testEstimateSize() {
|
||||
BatchResourceId id = new BatchResourceId();
|
||||
id.setId("12345");
|
||||
id.setResourceType("Patient");
|
||||
String serialized = JsonUtil.serialize(id, false);
|
||||
assertEquals(serialized.length(), id.estimateSerializedSize(), serialized);
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -59,6 +59,36 @@ public class WebsocketWithSubscriptionIdR5Test extends BaseSubscriptionsR5Test {
|
|||
myWebsocketClientExtension.afterEach(null);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubscriptionMessagePayloadContentIsNull() {
|
||||
// Given a subscription
|
||||
Subscription subscription = new Subscription();
|
||||
subscription.setStatus(Enumerations.SubscriptionStatusCodes.ACTIVE);
|
||||
subscription.setContent(null);
|
||||
subscription.setTopic("Topic/123");
|
||||
subscription.getChannelType().setCode("websocket");
|
||||
MethodOutcome methodOutcome = myClient.create().resource(subscription).execute();
|
||||
String subscriptionId = methodOutcome.getId().getIdPart();
|
||||
|
||||
// When
|
||||
myWebsocketClientExtension.bind(subscriptionId);
|
||||
|
||||
// And
|
||||
// Trigger resource creation
|
||||
Patient patient = new Patient();
|
||||
patient.setActive(true);
|
||||
myClient.create().resource(patient).execute();
|
||||
|
||||
// Then
|
||||
List<String> messages = myWebsocketClientExtension.getMessages();
|
||||
await().until(() -> !messages.isEmpty());
|
||||
|
||||
// Log it
|
||||
ourLog.info("Messages: {}", messages);
|
||||
|
||||
// Verify a ping message shall be returned
|
||||
Assertions.assertTrue(messages.contains("ping " + subscriptionId));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubscriptionMessagePayloadContentIsEmpty() {
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -42,6 +42,7 @@ import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider;
|
|||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
|
||||
import ca.uhn.fhir.jpa.dao.GZipUtil;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.dao.TestDaoSearch;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository;
|
||||
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
|
||||
|
@ -552,6 +553,8 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
|
|||
private IInterceptorService myInterceptorService;
|
||||
@Autowired(required = false)
|
||||
private MdmStorageInterceptor myMdmStorageInterceptor;
|
||||
@Autowired
|
||||
protected TestDaoSearch myTestDaoSearch;
|
||||
|
||||
@RegisterExtension
|
||||
private final PreventDanglingInterceptorsExtension myPreventDanglingInterceptorsExtension = new PreventDanglingInterceptorsExtension(()-> myInterceptorRegistry);
|
||||
|
|
|
@ -28,6 +28,7 @@ import ca.uhn.fhir.jpa.binstore.MemoryBinaryStorageSvcImpl;
|
|||
import ca.uhn.fhir.jpa.config.PackageLoaderConfig;
|
||||
import ca.uhn.fhir.jpa.config.r4.JpaR4Config;
|
||||
import ca.uhn.fhir.jpa.config.util.HapiEntityManagerFactoryUtil;
|
||||
import ca.uhn.fhir.jpa.dao.TestDaoSearch;
|
||||
import ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect;
|
||||
import ca.uhn.fhir.jpa.searchparam.config.NicknameServiceConfig;
|
||||
import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener;
|
||||
|
@ -78,7 +79,8 @@ import static org.junit.jupiter.api.Assertions.fail;
|
|||
TestHSearchAddInConfig.DefaultLuceneHeap.class,
|
||||
JpaBatch2Config.class,
|
||||
Batch2JobsConfig.class,
|
||||
NicknameServiceConfig.class
|
||||
NicknameServiceConfig.class,
|
||||
TestDaoSearch.Config.class
|
||||
})
|
||||
public class TestR4Config {
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -38,8 +38,7 @@ public class CdsCrServiceMethod extends BaseCdsCrMethod implements ICdsServiceMe
|
|||
|
||||
@Override
|
||||
public boolean isAllowAutoFhirClientPrefetch() {
|
||||
// The $apply operation will make FHIR requests for any data it needs
|
||||
// directly against the fhirServer of the ServiceRequest.
|
||||
return false;
|
||||
// The $apply operation will NOT make FHIR requests for any data it needs.
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -51,13 +51,13 @@ import java.util.stream.Collectors;
|
|||
* is complete so that other beans can use the stuff it creates.
|
||||
*/
|
||||
public class CdsHooksContextBooter {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(CdsHooksContextBooter.class);
|
||||
private static final String CDS_SERVICES_BEAN_NAME = "cdsServices";
|
||||
private Class<?> myDefinitionsClass;
|
||||
private AnnotationConfigApplicationContext myAppCtx;
|
||||
protected static final Logger ourLog = LoggerFactory.getLogger(CdsHooksContextBooter.class);
|
||||
protected static final String CDS_SERVICES_BEAN_NAME = "cdsServices";
|
||||
protected Class<?> myDefinitionsClass;
|
||||
protected AnnotationConfigApplicationContext myAppCtx;
|
||||
|
||||
private List<Object> myCdsServiceBeans = new ArrayList<>();
|
||||
private final CdsServiceCache myCdsServiceCache = new CdsServiceCache();
|
||||
protected List<Object> myCdsServiceBeans = new ArrayList<>();
|
||||
protected final CdsServiceCache myCdsServiceCache = new CdsServiceCache();
|
||||
|
||||
public void setDefinitionsClass(Class<?> theDefinitionsClass) {
|
||||
myDefinitionsClass = theDefinitionsClass;
|
||||
|
@ -70,7 +70,7 @@ public class CdsHooksContextBooter {
|
|||
return myCdsServiceCache;
|
||||
}
|
||||
|
||||
private void extractCdsServices(Object theServiceBean) {
|
||||
protected void extractCdsServices(Object theServiceBean) {
|
||||
Method[] methods = theServiceBean.getClass().getMethods();
|
||||
// Sort alphabetically so service list output is deterministic (to ensure GET /cds-services is idempotent).
|
||||
// This also simplifies testing :-)
|
||||
|
@ -104,7 +104,7 @@ public class CdsHooksContextBooter {
|
|||
}
|
||||
}
|
||||
|
||||
String validateJson(String theExtension) {
|
||||
protected String validateJson(String theExtension) {
|
||||
if (StringUtils.isEmpty(theExtension)) {
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -33,7 +33,6 @@ import ca.uhn.hapi.fhir.cdshooks.api.json.CdsServiceResponseSuggestionJson;
|
|||
import ca.uhn.hapi.fhir.cdshooks.api.json.CdsServiceResponseSystemActionJson;
|
||||
import org.hl7.fhir.dstu3.model.Bundle;
|
||||
import org.hl7.fhir.dstu3.model.CarePlan;
|
||||
import org.hl7.fhir.dstu3.model.Endpoint;
|
||||
import org.hl7.fhir.dstu3.model.Extension;
|
||||
import org.hl7.fhir.dstu3.model.IdType;
|
||||
import org.hl7.fhir.dstu3.model.ParameterDefinition;
|
||||
|
@ -54,7 +53,6 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
|
||||
import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_DATA;
|
||||
import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_DATA_ENDPOINT;
|
||||
import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_ENCOUNTER;
|
||||
import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_PARAMETERS;
|
||||
import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_PRACTITIONER;
|
||||
|
@ -113,22 +111,6 @@ public class CdsCrServiceDstu3 implements ICdsCrService {
|
|||
if (data.hasEntry()) {
|
||||
parameters.addParameter(part(APPLY_PARAMETER_DATA, data));
|
||||
}
|
||||
if (theJson.getFhirServer() != null) {
|
||||
Endpoint endpoint = new Endpoint().setAddress(theJson.getFhirServer());
|
||||
if (theJson.getServiceRequestAuthorizationJson().getAccessToken() != null) {
|
||||
String tokenType = getTokenType(theJson.getServiceRequestAuthorizationJson());
|
||||
endpoint.addHeader(String.format(
|
||||
"Authorization: %s %s",
|
||||
tokenType, theJson.getServiceRequestAuthorizationJson().getAccessToken()));
|
||||
if (theJson.getServiceRequestAuthorizationJson().getSubject() != null) {
|
||||
endpoint.addHeader(String.format(
|
||||
"%s: %s",
|
||||
myCdsConfigService.getCdsCrSettings().getClientIdHeaderName(),
|
||||
theJson.getServiceRequestAuthorizationJson().getSubject()));
|
||||
}
|
||||
}
|
||||
parameters.addParameter(part(APPLY_PARAMETER_DATA_ENDPOINT, endpoint));
|
||||
}
|
||||
return parameters;
|
||||
}
|
||||
|
||||
|
|
|
@ -36,7 +36,6 @@ import ca.uhn.hapi.fhir.cdshooks.api.json.CdsServiceResponseSystemActionJson;
|
|||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.CanonicalType;
|
||||
import org.hl7.fhir.r4.model.Endpoint;
|
||||
import org.hl7.fhir.r4.model.Extension;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.ParameterDefinition;
|
||||
|
@ -56,7 +55,6 @@ import java.util.Map;
|
|||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_DATA;
|
||||
import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_DATA_ENDPOINT;
|
||||
import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_ENCOUNTER;
|
||||
import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_PARAMETERS;
|
||||
import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_PRACTITIONER;
|
||||
|
@ -115,22 +113,6 @@ public class CdsCrServiceR4 implements ICdsCrService {
|
|||
if (data.hasEntry()) {
|
||||
parameters.addParameter(part(APPLY_PARAMETER_DATA, data));
|
||||
}
|
||||
if (theJson.getFhirServer() != null) {
|
||||
Endpoint endpoint = new Endpoint().setAddress(theJson.getFhirServer());
|
||||
if (theJson.getServiceRequestAuthorizationJson().getAccessToken() != null) {
|
||||
String tokenType = getTokenType(theJson.getServiceRequestAuthorizationJson());
|
||||
endpoint.addHeader(String.format(
|
||||
"Authorization: %s %s",
|
||||
tokenType, theJson.getServiceRequestAuthorizationJson().getAccessToken()));
|
||||
if (theJson.getServiceRequestAuthorizationJson().getSubject() != null) {
|
||||
endpoint.addHeader(String.format(
|
||||
"%s: %s",
|
||||
myCdsConfigService.getCdsCrSettings().getClientIdHeaderName(),
|
||||
theJson.getServiceRequestAuthorizationJson().getSubject()));
|
||||
}
|
||||
}
|
||||
parameters.addParameter(part(APPLY_PARAMETER_DATA_ENDPOINT, endpoint));
|
||||
}
|
||||
return parameters;
|
||||
}
|
||||
|
||||
|
|
|
@ -36,7 +36,6 @@ import ca.uhn.hapi.fhir.cdshooks.api.json.CdsServiceResponseSystemActionJson;
|
|||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r5.model.Bundle;
|
||||
import org.hl7.fhir.r5.model.CanonicalType;
|
||||
import org.hl7.fhir.r5.model.Endpoint;
|
||||
import org.hl7.fhir.r5.model.Extension;
|
||||
import org.hl7.fhir.r5.model.IdType;
|
||||
import org.hl7.fhir.r5.model.ParameterDefinition;
|
||||
|
@ -56,7 +55,6 @@ import java.util.Map;
|
|||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_DATA;
|
||||
import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_DATA_ENDPOINT;
|
||||
import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_ENCOUNTER;
|
||||
import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_PARAMETERS;
|
||||
import static ca.uhn.hapi.fhir.cdshooks.svc.cr.CdsCrConstants.APPLY_PARAMETER_PRACTITIONER;
|
||||
|
@ -115,22 +113,6 @@ public class CdsCrServiceR5 implements ICdsCrService {
|
|||
if (data.hasEntry()) {
|
||||
parameters.addParameter(part(APPLY_PARAMETER_DATA, data));
|
||||
}
|
||||
if (theJson.getFhirServer() != null) {
|
||||
Endpoint endpoint = new Endpoint().setAddress(theJson.getFhirServer());
|
||||
if (theJson.getServiceRequestAuthorizationJson().getAccessToken() != null) {
|
||||
String tokenType = getTokenType(theJson.getServiceRequestAuthorizationJson());
|
||||
endpoint.addHeader(String.format(
|
||||
"Authorization: %s %s",
|
||||
tokenType, theJson.getServiceRequestAuthorizationJson().getAccessToken()));
|
||||
if (theJson.getServiceRequestAuthorizationJson().getSubject() != null) {
|
||||
endpoint.addHeader(String.format(
|
||||
"%s: %s",
|
||||
myCdsConfigService.getCdsCrSettings().getClientIdHeaderName(),
|
||||
theJson.getServiceRequestAuthorizationJson().getSubject()));
|
||||
}
|
||||
}
|
||||
parameters.addParameter(part(APPLY_PARAMETER_DATA_ENDPOINT, endpoint));
|
||||
}
|
||||
return parameters;
|
||||
}
|
||||
|
||||
|
|
|
@ -68,13 +68,13 @@ public class CrDiscoveryElementDstu3 implements ICrDiscoveryElement {
|
|||
|| p.equals("Patient?_id=Patient/{{context.patientId}}"))) {
|
||||
String key = getKey(++itemNo);
|
||||
service.addPrefetch(key, "Patient?_id={{context.patientId}}");
|
||||
service.addSource(key, CdsResolutionStrategyEnum.SERVICE);
|
||||
service.addSource(key, CdsResolutionStrategyEnum.FHIR_CLIENT);
|
||||
}
|
||||
|
||||
for (String item : myPrefetchUrlList) {
|
||||
String key = getKey(++itemNo);
|
||||
service.addPrefetch(key, item);
|
||||
service.addSource(key, CdsResolutionStrategyEnum.SERVICE);
|
||||
service.addSource(key, CdsResolutionStrategyEnum.FHIR_CLIENT);
|
||||
}
|
||||
|
||||
return service;
|
||||
|
|
|
@ -68,13 +68,13 @@ public class CrDiscoveryElementR4 implements ICrDiscoveryElement {
|
|||
|| p.equals("Patient?_id=Patient/{{context.patientId}}"))) {
|
||||
String key = getKey(++itemNo);
|
||||
service.addPrefetch(key, "Patient?_id={{context.patientId}}");
|
||||
service.addSource(key, CdsResolutionStrategyEnum.NONE);
|
||||
service.addSource(key, CdsResolutionStrategyEnum.FHIR_CLIENT);
|
||||
}
|
||||
|
||||
for (String item : myPrefetchUrlList) {
|
||||
String key = getKey(++itemNo);
|
||||
service.addPrefetch(key, item);
|
||||
service.addSource(key, CdsResolutionStrategyEnum.NONE);
|
||||
service.addSource(key, CdsResolutionStrategyEnum.FHIR_CLIENT);
|
||||
}
|
||||
|
||||
return service;
|
||||
|
|
|
@ -68,13 +68,13 @@ public class CrDiscoveryElementR5 implements ICrDiscoveryElement {
|
|||
|| p.equals("Patient?_id=Patient/{{context.patientId}}"))) {
|
||||
String key = getKey(++itemNo);
|
||||
service.addPrefetch(key, "Patient?_id={{context.patientId}}");
|
||||
service.addSource(key, CdsResolutionStrategyEnum.SERVICE);
|
||||
service.addSource(key, CdsResolutionStrategyEnum.FHIR_CLIENT);
|
||||
}
|
||||
|
||||
for (String item : myPrefetchUrlList) {
|
||||
String key = getKey(++itemNo);
|
||||
service.addPrefetch(key, item);
|
||||
service.addSource(key, CdsResolutionStrategyEnum.SERVICE);
|
||||
service.addSource(key, CdsResolutionStrategyEnum.FHIR_CLIENT);
|
||||
}
|
||||
|
||||
return service;
|
||||
|
|
|
@ -27,7 +27,6 @@ import ca.uhn.hapi.fhir.cdshooks.api.ICdsHooksDaoAuthorizationSvc;
|
|||
import ca.uhn.hapi.fhir.cdshooks.api.ICdsServiceMethod;
|
||||
import ca.uhn.hapi.fhir.cdshooks.api.json.CdsServiceJson;
|
||||
import ca.uhn.hapi.fhir.cdshooks.api.json.CdsServiceRequestJson;
|
||||
import ca.uhn.hapi.fhir.cdshooks.svc.CdsCrServiceMethod;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -57,11 +56,6 @@ public class CdsPrefetchSvc {
|
|||
|
||||
public void augmentRequest(CdsServiceRequestJson theCdsServiceRequestJson, ICdsServiceMethod theServiceMethod) {
|
||||
CdsServiceJson serviceSpec = theServiceMethod.getCdsServiceJson();
|
||||
if (theServiceMethod instanceof CdsCrServiceMethod) {
|
||||
// CdsCrServices will retrieve data from the dao or fhir server passed in as needed,
|
||||
// checking for missing prefetch is not necessary.
|
||||
return;
|
||||
}
|
||||
Set<String> missingPrefetch = findMissingPrefetch(serviceSpec, theCdsServiceRequestJson);
|
||||
if (missingPrefetch.isEmpty()) {
|
||||
return;
|
||||
|
|
|
@ -47,7 +47,7 @@ public class CdsCrServiceR4Test extends BaseCrTest {
|
|||
requestDetails.setId(planDefinitionId);
|
||||
final Parameters params = new CdsCrServiceR4(requestDetails, repository, myCdsConfigService).encodeParams(cdsServiceRequestJson);
|
||||
|
||||
assertTrue(params.getParameter().size() == 3);
|
||||
assertTrue(params.getParameter().size() == 2);
|
||||
assertTrue(params.getParameter("parameters").hasResource());
|
||||
}
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.6-SNAPSHOT</version>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -1325,7 +1325,14 @@ public class RestfulServer extends HttpServlet implements IRestfulServer<Servlet
|
|||
}
|
||||
|
||||
protected void addRequestIdToResponse(ServletRequestDetails theRequestDetails, String theRequestId) {
|
||||
theRequestDetails.getResponse().addHeader(Constants.HEADER_REQUEST_ID, theRequestId);
|
||||
String caseSensitiveRequestIdKey = Constants.HEADER_REQUEST_ID;
|
||||
for (String key : theRequestDetails.getHeaders().keySet()) {
|
||||
if (Constants.HEADER_REQUEST_ID.equalsIgnoreCase(key)) {
|
||||
caseSensitiveRequestIdKey = key;
|
||||
break;
|
||||
}
|
||||
}
|
||||
theRequestDetails.getResponse().addHeader(caseSensitiveRequestIdKey, theRequestId);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue