Merge branch 'master' into ja_20240604_allow_disable_param
This commit is contained in:
commit
c948ef4707
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -78,18 +78,6 @@
|
|||
<groupId>com.google.code.findbugs</groupId>
|
||||
<artifactId>annotations</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.derby</groupId>
|
||||
<artifactId>derbyclient</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.derby</groupId>
|
||||
<artifactId>derbynet</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.derby</groupId>
|
||||
<artifactId>derbyclient</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.checkerframework</groupId>
|
||||
<artifactId>checker-compat-qual</artifactId>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -1209,12 +1209,23 @@ public interface IValidationSupport {
|
|||
}
|
||||
|
||||
/**
|
||||
* See VersionSpecificWorkerContextWrapper#validateCode in hapi-fhir-validation.
|
||||
* <p
|
||||
* Warning: This method's behaviour and naming is preserved for backwards compatibility, BUT the actual naming and
|
||||
* function are not aligned.
|
||||
* </p
|
||||
* <p>
|
||||
* If true, validation for codings will return a positive result if all codings are valid.
|
||||
* If false, validation for codings will return a positive result if there is any coding that is valid.
|
||||
*
|
||||
* @return if the application has configured validation to use logical AND, as opposed to logical OR, which is the default
|
||||
* See VersionSpecificWorkerContextWrapper#validateCode in hapi-fhir-validation, and the refer to the values below
|
||||
* for the behaviour associated with each value.
|
||||
* </p>
|
||||
* <p>
|
||||
* <ul>
|
||||
* <li>If <code>false</code> (default setting) the validation for codings will return a positive result only if
|
||||
* ALL codings are valid.</li>
|
||||
* <li>If <code>true</code> the validation for codings will return a positive result if ANY codings are valid.
|
||||
* </li>
|
||||
* </ul>
|
||||
* </p>
|
||||
* @return true or false depending on the desired coding validation behaviour.
|
||||
*/
|
||||
default boolean isEnabledValidationForCodingsLogicalAnd() {
|
||||
return false;
|
||||
|
|
|
@ -205,8 +205,6 @@ ca.uhn.fhir.jpa.partition.PartitionLookupSvcImpl.invalidName=Partition name "{0}
|
|||
ca.uhn.fhir.jpa.partition.PartitionLookupSvcImpl.cantCreateDuplicatePartitionName=Partition name "{0}" is already defined
|
||||
ca.uhn.fhir.jpa.partition.PartitionLookupSvcImpl.cantCreateDefaultPartition=Can not create partition with name "DEFAULT"
|
||||
|
||||
ca.uhn.fhir.rest.server.interceptor.partition.RequestTenantPartitionInterceptor.unknownTenantName=Unknown tenant: {0}
|
||||
|
||||
ca.uhn.fhir.jpa.dao.HistoryBuilder.noSystemOrTypeHistoryForPartitionAwareServer=Type- and Server- level history operation not supported across partitions on partitioned server
|
||||
|
||||
ca.uhn.fhir.jpa.provider.DiffProvider.cantDiffDifferentTypes=Unable to diff two resources of different types
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-bom</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<packaging>pom</packaging>
|
||||
<name>HAPI FHIR BOM</name>
|
||||
|
@ -12,7 +12,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -84,11 +84,6 @@
|
|||
<groupId>commons-cli</groupId>
|
||||
<artifactId>commons-cli</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.derby</groupId>
|
||||
<artifactId>derby</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.h2database</groupId>
|
||||
<artifactId>h2</artifactId>
|
||||
|
|
|
@ -6,8 +6,8 @@ import ca.uhn.fhir.system.HapiSystemProperties;
|
|||
import ca.uhn.fhir.test.utilities.RestServerR4Helper;
|
||||
import ca.uhn.fhir.test.utilities.TlsAuthenticationTestHelper;
|
||||
import ca.uhn.fhir.util.ParametersUtil;
|
||||
import ca.uhn.test.util.LogEventIterableAssert;
|
||||
import ca.uhn.test.util.LogbackTestExtension;
|
||||
import ca.uhn.test.util.LogbackTestExtensionAssert;
|
||||
import ch.qos.logback.classic.Logger;
|
||||
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
|
@ -30,6 +30,7 @@ import static org.mockito.Mockito.spy;
|
|||
@ExtendWith(MockitoExtension.class)
|
||||
class ReindexTerminologyCommandTest {
|
||||
|
||||
private static final String FAILURE_MESSAGE = "FAILURE";
|
||||
private final FhirContext myContext = FhirContext.forR4();
|
||||
|
||||
@Spy
|
||||
|
@ -68,10 +69,9 @@ class ReindexTerminologyCommandTest {
|
|||
);
|
||||
runAppWithStartupHook(args, getLoggingStartupHook());
|
||||
|
||||
LogEventIterableAssert.assertThat(myAppLogCapture.getLogEvents()).hasNoFailureMessages();
|
||||
LogbackTestExtensionAssert.assertThat(myAppLogCapture).doesNotHaveMessage(FAILURE_MESSAGE);
|
||||
}
|
||||
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(booleans = {true, false})
|
||||
public void testNoVersionThrows(boolean theIncludeTls) {
|
||||
|
@ -131,8 +131,9 @@ class ReindexTerminologyCommandTest {
|
|||
);
|
||||
runAppWithStartupHook(args, getLoggingStartupHook());
|
||||
|
||||
LogEventIterableAssert.assertThat(myAppLogCapture.getLogEvents()).hasAtLeastOneFailureMessage();
|
||||
LogEventIterableAssert.assertThat(myAppLogCapture.getLogEvents()).hasAtLeastOneEventWithMessage("Internal error. Command result unknown. Check system logs for details");
|
||||
LogbackTestExtensionAssert.assertThat(myAppLogCapture)
|
||||
.hasMessage(FAILURE_MESSAGE)
|
||||
.hasMessage("Internal error. Command result unknown. Check system logs for details");
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
|
@ -155,8 +156,9 @@ class ReindexTerminologyCommandTest {
|
|||
);
|
||||
runAppWithStartupHook(args, getLoggingStartupHook());
|
||||
|
||||
LogEventIterableAssert.assertThat(myAppLogCapture.getLogEvents()).hasAtLeastOneFailureMessage();
|
||||
LogEventIterableAssert.assertThat(myAppLogCapture.getLogEvents()).hasAtLeastOneEventWithMessage("Freetext service is not configured. Operation didn't run.");
|
||||
LogbackTestExtensionAssert.assertThat(myAppLogCapture)
|
||||
.hasMessage(FAILURE_MESSAGE)
|
||||
.hasMessage("Freetext service is not configured. Operation didn't run.");
|
||||
}
|
||||
|
||||
static void runAppWithStartupHook(String[] args, Consumer<BaseApp> startupHook) {
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6077
|
||||
title: "Previously, when a MDM rule tried to perform a phonetic match by HumanName (eg. Patient.name), a
|
||||
`ClassCastException` was thrown. This has now been fixed."
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: add
|
||||
issue: 6008
|
||||
title: "Previously, when partitioning is enabled, the reindex job could only be run against one partition.
|
||||
The reindex job parameters can now accept a list of partitions or all partitions (list of RequestPartitionId or RequestPartitionId.allPartitions).
|
||||
In the future, the functionality can be extended to other bulk operations run via batch2 jobs (e.g. $delete-expunge, $export).
|
||||
"
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: change
|
||||
issue: 6023
|
||||
title: "Previously, CDS hook extensions needed to be encoded as strings. This has been changed so that extensions are now properly provided as inline JSON."
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6036
|
||||
jira: SMILE-8605
|
||||
title: "The maximum length of `SP_VALUE_EXACT` and `SP_VALUE_NORMALIZED` in the `HFJ_SPIDX_STRING` table has been increased from 200 to 768. This allows for longer `:contains` searches. If you have previously stored values longer than 200 characters in String search
|
||||
parameters and want to perform a `:contains` search on the new longer maximum size, a reindex of the affected data is required."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: remove
|
||||
issue: 6056
|
||||
title: "The Derby JARs have been removed from this release. The older versions have multiple high-severity vulnerabilities against them, and the newer versions force usage of Java 21, which HAPI-FHIR does not yet support.
|
||||
If you wish to continue to use derby, you will have to provide the `derby`, `derbyclient`,`derbynet`, `derbyshared`, and `derbytools` jars on your classpath."
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: add
|
||||
issue: 6060
|
||||
title: "The server-generated CapabilityStatement will now use the server
|
||||
name defined by `RestfulServer#setServerName(..)` instead of the hardcoded
|
||||
string `HAPI FHIR`. Thanks to Renaud Subiger for the pull request!"
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6063
|
||||
title: "Removing deprecated thymeleaf syntax from existing templates,
|
||||
so as to avoid deprecation warnings in logs.
|
||||
"
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: add
|
||||
issue: 6070
|
||||
jira: SMILE-8503
|
||||
title: "Added paging support for `$everything` operation in synchronous search mode."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: add
|
||||
issue: 6079
|
||||
jira: SMILE-8644
|
||||
title: "It is now possible to save TermConceptDesignations with values over 2,000 characters."
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6090
|
||||
title: "A regression caused partition resolution to fail when creating non-partitionable resources. The issue is fixed."
|
|
@ -1,3 +1,14 @@
|
|||
## Derby JARs removed from HAPI-FHIR
|
||||
As of Derby 17, in order to support JDK17, the jars must be built from source, as the default package supports JDK21. Due to a [high severity vulnerability](https://github.com/hapifhir/hapi-fhir/issues/5471) in older versions, and a lack of appetite to host
|
||||
a forked packaged version of Derby, the Derby JARs have been removed from the HAPI-FHIR distribution. For those who wish to continue to use Derby, you may still do so, but the following jars must be manually added
|
||||
to your classpath:
|
||||
|
||||
- [derby](https://mvnrepository.com/artifact/org.apache.derby/derby)
|
||||
- [derbyclient](https://mvnrepository.com/artifact/org.apache.derby/derbyclient)
|
||||
- [derbyshared](https://mvnrepository.com/artifact/org.apache.derby/derbyshared)
|
||||
- [derbynet](https://mvnrepository.com/artifact/org.apache.derby/derbynet)
|
||||
- [derbytools](https://mvnrepository.com/artifact/org.apache.derby/derbytools)
|
||||
|
||||
## Possible migration errors on SQL Server (MSSQL)
|
||||
|
||||
* This affects only clients running SQL Server (MSSQL) who have custom indexes on `HFJ_SPIDX` tables, which
|
||||
|
|
|
@ -130,14 +130,14 @@ Once enabled, HTTP Requests to the FHIR server must include the name of the part
|
|||
POST www.example.com/fhir/Patient
|
||||
```
|
||||
|
||||
With partitioning enabled, if we were to now create a patient in the `DEFAULT` paritition, the request would now look like this:
|
||||
With partitioning enabled, if we were to now create a patient in the `P1` partition, the request would now look like this:
|
||||
|
||||
|
||||
```
|
||||
POST www.example.com/fhir/DEFAULT/Patient
|
||||
POST www.example.com/fhir/P1/Patient
|
||||
```
|
||||
|
||||
Failure to add a partition name to the request path will result in an error when multitenancy is enabled.
|
||||
If a tenant name is not provided in the request path, the request will default the tenant and use will use the 'DEFAULT' partition.
|
||||
|
||||
# Limitations
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
*/
|
||||
package ca.uhn.fhir.jpa.batch2;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobPartitionProvider;
|
||||
import ca.uhn.fhir.batch2.api.IJobPersistence;
|
||||
import ca.uhn.fhir.batch2.config.BaseBatch2Config;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
|
@ -27,6 +28,8 @@ import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository;
|
|||
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkMetadataViewRepository;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository;
|
||||
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import jakarta.persistence.EntityManager;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
@ -52,4 +55,10 @@ public class JpaBatch2Config extends BaseBatch2Config {
|
|||
theEntityManager,
|
||||
theInterceptorBroadcaster);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public IJobPartitionProvider jobPartitionProvider(
|
||||
IRequestPartitionHelperSvc theRequestPartitionHelperSvc, IPartitionLookupSvc thePartitionLookupSvc) {
|
||||
return new JpaJobPartitionProvider(theRequestPartitionHelperSvc, thePartitionLookupSvc);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,63 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.batch2;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobPartitionProvider;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.entity.PartitionEntity;
|
||||
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* The default JPA implementation, which uses {@link IRequestPartitionHelperSvc} and {@link IPartitionLookupSvc}
|
||||
* to compute the partition to run a batch2 job.
|
||||
* The latter will be used to handle cases when the job is configured to run against all partitions
|
||||
* (bulk system operation) and will return the actual list with all the configured partitions.
|
||||
*/
|
||||
public class JpaJobPartitionProvider implements IJobPartitionProvider {
|
||||
protected final IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
|
||||
private final IPartitionLookupSvc myPartitionLookupSvc;
|
||||
|
||||
public JpaJobPartitionProvider(
|
||||
IRequestPartitionHelperSvc theRequestPartitionHelperSvc, IPartitionLookupSvc thePartitionLookupSvc) {
|
||||
myRequestPartitionHelperSvc = theRequestPartitionHelperSvc;
|
||||
myPartitionLookupSvc = thePartitionLookupSvc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<RequestPartitionId> getPartitions(RequestDetails theRequestDetails, String theOperation) {
|
||||
RequestPartitionId partitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequestForServerOperation(
|
||||
theRequestDetails, theOperation);
|
||||
if (!partitionId.isAllPartitions()) {
|
||||
return List.of(partitionId);
|
||||
}
|
||||
// handle (bulk) system operations that are typically configured with RequestPartitionId.allPartitions()
|
||||
// populate the actual list of all partitions
|
||||
List<RequestPartitionId> partitionIdList = myPartitionLookupSvc.listPartitions().stream()
|
||||
.map(PartitionEntity::toRequestPartitionId)
|
||||
.collect(Collectors.toList());
|
||||
partitionIdList.add(RequestPartitionId.defaultPartition());
|
||||
return partitionIdList;
|
||||
}
|
||||
}
|
|
@ -1027,19 +1027,19 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
List<TagDefinition> tags = toTagList(theMetaAdd);
|
||||
for (TagDefinition nextDef : tags) {
|
||||
|
||||
boolean hasTag = false;
|
||||
boolean entityHasTag = false;
|
||||
for (BaseTag next : new ArrayList<>(theEntity.getTags())) {
|
||||
if (Objects.equals(next.getTag().getTagType(), nextDef.getTagType())
|
||||
&& Objects.equals(next.getTag().getSystem(), nextDef.getSystem())
|
||||
&& Objects.equals(next.getTag().getCode(), nextDef.getCode())
|
||||
&& Objects.equals(next.getTag().getVersion(), nextDef.getVersion())
|
||||
&& Objects.equals(next.getTag().getUserSelected(), nextDef.getUserSelected())) {
|
||||
hasTag = true;
|
||||
entityHasTag = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!hasTag) {
|
||||
if (!entityHasTag) {
|
||||
theEntity.setHasTags(true);
|
||||
|
||||
TagDefinition def = getTagOrNull(
|
||||
|
@ -1351,15 +1351,33 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
@Transactional
|
||||
public <MT extends IBaseMetaType> MT metaAddOperation(
|
||||
IIdType theResourceId, MT theMetaAdd, RequestDetails theRequest) {
|
||||
|
||||
RequestPartitionId requestPartitionId =
|
||||
myRequestPartitionHelperService.determineReadPartitionForRequestForServerOperation(
|
||||
theRequest, JpaConstants.OPERATION_META_ADD);
|
||||
|
||||
myTransactionService
|
||||
.withRequest(theRequest)
|
||||
.withRequestPartitionId(requestPartitionId)
|
||||
.execute(() -> doMetaAddOperation(theResourceId, theMetaAdd, theRequest, requestPartitionId));
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
MT retVal = (MT) metaGetOperation(theMetaAdd.getClass(), theResourceId, theRequest);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
protected <MT extends IBaseMetaType> void doMetaAddOperation(
|
||||
IIdType theResourceId, MT theMetaAdd, RequestDetails theRequest, RequestPartitionId theRequestPartitionId) {
|
||||
TransactionDetails transactionDetails = new TransactionDetails();
|
||||
|
||||
StopWatch w = new StopWatch();
|
||||
BaseHasResource entity = readEntity(theResourceId, theRequest);
|
||||
if (entity == null) {
|
||||
BaseHasResource entity = readEntity(theResourceId, true, theRequest, theRequestPartitionId);
|
||||
|
||||
if (isNull(entity)) {
|
||||
throw new ResourceNotFoundException(Msg.code(1993) + theResourceId);
|
||||
}
|
||||
|
||||
ResourceTable latestVersion = readEntityLatestVersion(theResourceId, theRequest, transactionDetails);
|
||||
ResourceTable latestVersion = readEntityLatestVersion(theResourceId, theRequestPartitionId, transactionDetails);
|
||||
if (latestVersion.getVersion() != entity.getVersion()) {
|
||||
doMetaAdd(theMetaAdd, entity, theRequest, transactionDetails);
|
||||
} else {
|
||||
|
@ -1372,27 +1390,43 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
|
||||
ourLog.debug("Processed metaAddOperation on {} in {}ms", theResourceId, w.getMillisAndRestart());
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
MT retVal = (MT) metaGetOperation(theMetaAdd.getClass(), theResourceId, theRequest);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public <MT extends IBaseMetaType> MT metaDeleteOperation(
|
||||
IIdType theResourceId, MT theMetaDel, RequestDetails theRequest) {
|
||||
TransactionDetails transactionDetails = new TransactionDetails();
|
||||
|
||||
RequestPartitionId requestPartitionId =
|
||||
myRequestPartitionHelperService.determineReadPartitionForRequestForServerOperation(
|
||||
theRequest, JpaConstants.OPERATION_META_DELETE);
|
||||
|
||||
myTransactionService
|
||||
.withRequest(theRequest)
|
||||
.withRequestPartitionId(requestPartitionId)
|
||||
.execute(() -> doMetaDeleteOperation(theResourceId, theMetaDel, theRequest, requestPartitionId));
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
MT retVal = (MT) metaGetOperation(theMetaDel.getClass(), theResourceId, theRequest);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public <MT extends IBaseMetaType> void doMetaDeleteOperation(
|
||||
IIdType theResourceId, MT theMetaDel, RequestDetails theRequest, RequestPartitionId theRequestPartitionId) {
|
||||
TransactionDetails transactionDetails = new TransactionDetails();
|
||||
StopWatch w = new StopWatch();
|
||||
BaseHasResource entity = readEntity(theResourceId, theRequest);
|
||||
if (entity == null) {
|
||||
|
||||
BaseHasResource entity = readEntity(theResourceId, true, theRequest, theRequestPartitionId);
|
||||
|
||||
if (isNull(entity)) {
|
||||
throw new ResourceNotFoundException(Msg.code(1994) + theResourceId);
|
||||
}
|
||||
|
||||
ResourceTable latestVersion = readEntityLatestVersion(theResourceId, theRequest, transactionDetails);
|
||||
ResourceTable latestVersion = readEntityLatestVersion(theResourceId, theRequestPartitionId, transactionDetails);
|
||||
boolean nonVersionedTags =
|
||||
myStorageSettings.getTagStorageMode() != JpaStorageSettings.TagStorageModeEnum.VERSIONED;
|
||||
|
||||
if (latestVersion.getVersion() != entity.getVersion() || nonVersionedTags) {
|
||||
doMetaDelete(theMetaDel, entity, theRequest, transactionDetails);
|
||||
} else {
|
||||
|
@ -1404,10 +1438,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
|
||||
ourLog.debug("Processed metaDeleteOperation on {} in {}ms", theResourceId.getValue(), w.getMillisAndRestart());
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
MT retVal = (MT) metaGetOperation(theMetaDel.getClass(), theResourceId, theRequest);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1825,7 +1855,11 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
IIdType theId, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails) {
|
||||
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForRead(
|
||||
theRequestDetails, getResourceName(), theId);
|
||||
return readEntityLatestVersion(theId, requestPartitionId, theTransactionDetails);
|
||||
|
||||
return myTransactionService
|
||||
.withRequest(theRequestDetails)
|
||||
.withRequestPartitionId(requestPartitionId)
|
||||
.execute(() -> readEntityLatestVersion(theId, requestPartitionId, theTransactionDetails));
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
*/
|
||||
package ca.uhn.fhir.jpa.dao;
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoPatient;
|
||||
import ca.uhn.fhir.jpa.api.dao.PatientEverythingParameters;
|
||||
|
@ -73,8 +72,7 @@ public class JpaResourceDaoPatient<T extends IBaseResource> extends BaseHapiFhir
|
|||
paramMap.setCount(theCount.getValue());
|
||||
}
|
||||
if (theOffset != null) {
|
||||
throw new IllegalArgumentException(
|
||||
Msg.code(1106) + "Everything operation does not support offset searching");
|
||||
paramMap.setOffset(theOffset.getValue());
|
||||
}
|
||||
if (theContent != null) {
|
||||
paramMap.add(Constants.PARAM_CONTENT, theContent);
|
||||
|
|
|
@ -35,8 +35,10 @@ import jakarta.persistence.SequenceGenerator;
|
|||
import jakarta.persistence.Table;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
import org.hibernate.Length;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.left;
|
||||
import static org.apache.commons.lang3.StringUtils.length;
|
||||
|
@ -82,8 +84,11 @@ public class TermConceptDesignation implements Serializable {
|
|||
@Column(name = "USE_DISPLAY", nullable = true, length = MAX_LENGTH)
|
||||
private String myUseDisplay;
|
||||
|
||||
@Column(name = "VAL", nullable = false, length = MAX_VAL_LENGTH)
|
||||
@Column(name = "VAL", nullable = true, length = MAX_VAL_LENGTH)
|
||||
private String myValue;
|
||||
|
||||
@Column(name = "VAL_VC", nullable = true, length = Length.LONG32)
|
||||
private String myValueVc;
|
||||
/**
|
||||
* TODO: Make this non-null
|
||||
*
|
||||
|
@ -144,14 +149,11 @@ public class TermConceptDesignation implements Serializable {
|
|||
}
|
||||
|
||||
public String getValue() {
|
||||
return myValue;
|
||||
return Objects.nonNull(myValueVc) ? myValueVc : myValue;
|
||||
}
|
||||
|
||||
public TermConceptDesignation setValue(@Nonnull String theValue) {
|
||||
ValidateUtil.isNotBlankOrThrowIllegalArgument(theValue, "theValue must not be null or empty");
|
||||
ValidateUtil.isNotTooLongOrThrowIllegalArgument(
|
||||
theValue, MAX_VAL_LENGTH, "Value exceeds maximum length (" + MAX_VAL_LENGTH + "): " + length(theValue));
|
||||
myValue = theValue;
|
||||
public TermConceptDesignation setValue(@Nonnull String theValueVc) {
|
||||
myValueVc = theValueVc;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
|
|
@ -439,6 +439,34 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
t -> ResourceIndexedComboStringUnique.calculateHashComplete2(
|
||||
t.getString("IDX_STRING")))
|
||||
.setColumnName("HASH_COMPLETE"));
|
||||
|
||||
{
|
||||
version.onTable("TRM_CONCEPT_DESIG")
|
||||
.modifyColumn("20240705.10", "VAL")
|
||||
.nullable()
|
||||
.withType(ColumnTypeEnum.STRING, 2000);
|
||||
|
||||
version.onTable("TRM_CONCEPT_DESIG")
|
||||
.addColumn("20240705.20", "VAL_VC")
|
||||
.nullable()
|
||||
.type(ColumnTypeEnum.TEXT);
|
||||
}
|
||||
{ // These migrations permit much longer values to be stored in SPIDX_TOKEN and SPIDX_STRING value
|
||||
Builder.BuilderWithTableName spidxString = version.onTable("HFJ_SPIDX_STRING");
|
||||
// components.
|
||||
// This is mostly helpful for `:contains` searches on long values, since exact searches use the hash
|
||||
// anyhow.
|
||||
spidxString
|
||||
.modifyColumn("20240708.10", "SP_VALUE_EXACT")
|
||||
.nullable()
|
||||
.withType(ColumnTypeEnum.STRING, 768)
|
||||
.failureAllowed();
|
||||
spidxString
|
||||
.modifyColumn("20240708.20", "SP_VALUE_NORMALIZED")
|
||||
.nullable()
|
||||
.withType(ColumnTypeEnum.STRING, 768)
|
||||
.failureAllowed();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1485,6 +1485,11 @@ public class QueryStack {
|
|||
mySqlBuilder.getSelect().addGroupings(firstPredicateBuilder.getResourceIdColumn());
|
||||
}
|
||||
|
||||
public void addOrdering() {
|
||||
BaseJoiningPredicateBuilder firstPredicateBuilder = mySqlBuilder.getOrCreateFirstPredicateBuilder();
|
||||
mySqlBuilder.getSelect().addOrderings(firstPredicateBuilder.getResourceIdColumn());
|
||||
}
|
||||
|
||||
public Condition createPredicateReferenceForEmbeddedChainedSearchResource(
|
||||
@Nullable DbColumn theSourceJoinColumn,
|
||||
String theResourceName,
|
||||
|
|
|
@ -68,6 +68,7 @@ import ca.uhn.fhir.jpa.searchparam.util.Dstu3DistanceHelper;
|
|||
import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil;
|
||||
import ca.uhn.fhir.jpa.searchparam.util.LastNParameterHelper;
|
||||
import ca.uhn.fhir.jpa.util.BaseIterator;
|
||||
import ca.uhn.fhir.jpa.util.CartesianProductUtil;
|
||||
import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener;
|
||||
import ca.uhn.fhir.jpa.util.PermutationBuilder;
|
||||
import ca.uhn.fhir.jpa.util.QueryChunker;
|
||||
|
@ -100,6 +101,7 @@ import ca.uhn.fhir.util.StopWatch;
|
|||
import ca.uhn.fhir.util.StringUtil;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Streams;
|
||||
import com.healthmarketscience.sqlbuilder.Condition;
|
||||
import jakarta.annotation.Nonnull;
|
||||
|
@ -570,8 +572,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
*
|
||||
* @param theTargetPids
|
||||
*/
|
||||
private void extractTargetPidsFromIdParams(
|
||||
HashSet<Long> theTargetPids, List<ISearchQueryExecutor> theSearchQueryExecutors) {
|
||||
private void extractTargetPidsFromIdParams(Set<Long> theTargetPids) {
|
||||
// get all the IQueryParameterType objects
|
||||
// for _id -> these should all be StringParam values
|
||||
HashSet<String> ids = new HashSet<>();
|
||||
|
@ -600,10 +601,6 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
for (JpaPid pid : idToPid.values()) {
|
||||
theTargetPids.add(pid.getId());
|
||||
}
|
||||
|
||||
// add the target pids to our executors as the first
|
||||
// results iterator to go through
|
||||
theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(new ArrayList<>(theTargetPids)));
|
||||
}
|
||||
|
||||
private void createChunkedQuery(
|
||||
|
@ -615,13 +612,29 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
RequestDetails theRequest,
|
||||
List<Long> thePidList,
|
||||
List<ISearchQueryExecutor> theSearchQueryExecutors) {
|
||||
String sqlBuilderResourceName = myParams.getEverythingMode() == null ? myResourceName : null;
|
||||
if (myParams.getEverythingMode() != null) {
|
||||
createChunkedQueryForEverythingSearch(
|
||||
theParams, theOffset, theMaximumResults, theCountOnlyFlag, thePidList, theSearchQueryExecutors);
|
||||
} else {
|
||||
createChunkedQueryNormalSearch(
|
||||
theParams, sort, theOffset, theCountOnlyFlag, theRequest, thePidList, theSearchQueryExecutors);
|
||||
}
|
||||
}
|
||||
|
||||
private void createChunkedQueryNormalSearch(
|
||||
SearchParameterMap theParams,
|
||||
SortSpec sort,
|
||||
Integer theOffset,
|
||||
boolean theCountOnlyFlag,
|
||||
RequestDetails theRequest,
|
||||
List<Long> thePidList,
|
||||
List<ISearchQueryExecutor> theSearchQueryExecutors) {
|
||||
SearchQueryBuilder sqlBuilder = new SearchQueryBuilder(
|
||||
myContext,
|
||||
myStorageSettings,
|
||||
myPartitionSettings,
|
||||
myRequestPartitionId,
|
||||
sqlBuilderResourceName,
|
||||
myResourceName,
|
||||
mySqlBuilderFactory,
|
||||
myDialectProvider,
|
||||
theCountOnlyFlag);
|
||||
|
@ -639,67 +652,22 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
}
|
||||
}
|
||||
|
||||
JdbcTemplate jdbcTemplate = new JdbcTemplate(myEntityManagerFactory.getDataSource());
|
||||
jdbcTemplate.setFetchSize(myFetchSize);
|
||||
if (theMaximumResults != null) {
|
||||
jdbcTemplate.setMaxRows(theMaximumResults);
|
||||
/*
|
||||
* If we're doing a filter, always use the resource table as the root - This avoids the possibility of
|
||||
* specific filters with ORs as their root from working around the natural resource type / deletion
|
||||
* status / partition IDs built into queries.
|
||||
*/
|
||||
if (theParams.containsKey(Constants.PARAM_FILTER)) {
|
||||
Condition partitionIdPredicate = sqlBuilder
|
||||
.getOrCreateResourceTablePredicateBuilder()
|
||||
.createPartitionIdPredicate(myRequestPartitionId);
|
||||
if (partitionIdPredicate != null) {
|
||||
sqlBuilder.addPredicate(partitionIdPredicate);
|
||||
}
|
||||
}
|
||||
|
||||
if (myParams.getEverythingMode() != null) {
|
||||
HashSet<Long> targetPids = new HashSet<>();
|
||||
if (myParams.get(IAnyResource.SP_RES_ID) != null) {
|
||||
// will add an initial search executor for
|
||||
// _id params
|
||||
extractTargetPidsFromIdParams(targetPids, theSearchQueryExecutors);
|
||||
} else {
|
||||
// For Everything queries, we make the query root by the ResourceLink table, since this query
|
||||
// is basically a reverse-include search. For type/Everything (as opposed to instance/Everything)
|
||||
// the one problem with this approach is that it doesn't catch Patients that have absolutely
|
||||
// nothing linked to them. So we do one additional query to make sure we catch those too.
|
||||
SearchQueryBuilder fetchPidsSqlBuilder = new SearchQueryBuilder(
|
||||
myContext,
|
||||
myStorageSettings,
|
||||
myPartitionSettings,
|
||||
myRequestPartitionId,
|
||||
myResourceName,
|
||||
mySqlBuilderFactory,
|
||||
myDialectProvider,
|
||||
theCountOnlyFlag);
|
||||
GeneratedSql allTargetsSql = fetchPidsSqlBuilder.generate(theOffset, myMaxResultsToFetch);
|
||||
String sql = allTargetsSql.getSql();
|
||||
Object[] args = allTargetsSql.getBindVariables().toArray(new Object[0]);
|
||||
|
||||
List<Long> output = jdbcTemplate.query(sql, args, new SingleColumnRowMapper<>(Long.class));
|
||||
|
||||
// we add a search executor to fetch unlinked patients first
|
||||
theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(output));
|
||||
}
|
||||
|
||||
List<String> typeSourceResources = new ArrayList<>();
|
||||
if (myParams.get(Constants.PARAM_TYPE) != null) {
|
||||
typeSourceResources.addAll(extractTypeSourceResourcesFromParams());
|
||||
}
|
||||
|
||||
queryStack3.addPredicateEverythingOperation(
|
||||
myResourceName, typeSourceResources, targetPids.toArray(new Long[0]));
|
||||
} else {
|
||||
/*
|
||||
* If we're doing a filter, always use the resource table as the root - This avoids the possibility of
|
||||
* specific filters with ORs as their root from working around the natural resource type / deletion
|
||||
* status / partition IDs built into queries.
|
||||
*/
|
||||
if (theParams.containsKey(Constants.PARAM_FILTER)) {
|
||||
Condition partitionIdPredicate = sqlBuilder
|
||||
.getOrCreateResourceTablePredicateBuilder()
|
||||
.createPartitionIdPredicate(myRequestPartitionId);
|
||||
if (partitionIdPredicate != null) {
|
||||
sqlBuilder.addPredicate(partitionIdPredicate);
|
||||
}
|
||||
}
|
||||
|
||||
// Normal search
|
||||
searchForIdsWithAndOr(sqlBuilder, queryStack3, myParams, theRequest);
|
||||
}
|
||||
// Normal search
|
||||
searchForIdsWithAndOr(sqlBuilder, queryStack3, myParams, theRequest);
|
||||
|
||||
// If we haven't added any predicates yet, we're doing a search for all resources. Make sure we add the
|
||||
// partition ID predicate in that case.
|
||||
|
@ -713,16 +681,10 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
}
|
||||
|
||||
// Add PID list predicate for full text search and/or lastn operation
|
||||
if (thePidList != null && thePidList.size() > 0) {
|
||||
sqlBuilder.addResourceIdsPredicate(thePidList);
|
||||
}
|
||||
addPidListPredicate(thePidList, sqlBuilder);
|
||||
|
||||
// Last updated
|
||||
DateRangeParam lu = myParams.getLastUpdated();
|
||||
if (lu != null && !lu.isEmpty()) {
|
||||
Condition lastUpdatedPredicates = sqlBuilder.addPredicateLastUpdated(lu);
|
||||
sqlBuilder.addPredicate(lastUpdatedPredicates);
|
||||
}
|
||||
addLastUpdatePredicate(sqlBuilder);
|
||||
|
||||
/*
|
||||
* Exclude the pids already in the previous iterator. This is an optimization, as opposed
|
||||
|
@ -769,6 +731,11 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
/*
|
||||
* Now perform the search
|
||||
*/
|
||||
executeSearch(theOffset, theSearchQueryExecutors, sqlBuilder);
|
||||
}
|
||||
|
||||
private void executeSearch(
|
||||
Integer theOffset, List<ISearchQueryExecutor> theSearchQueryExecutors, SearchQueryBuilder sqlBuilder) {
|
||||
GeneratedSql generatedSql = sqlBuilder.generate(theOffset, myMaxResultsToFetch);
|
||||
if (!generatedSql.isMatchNothing()) {
|
||||
SearchQueryExecutor executor =
|
||||
|
@ -777,6 +744,111 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
}
|
||||
}
|
||||
|
||||
private void createChunkedQueryForEverythingSearch(
|
||||
SearchParameterMap theParams,
|
||||
Integer theOffset,
|
||||
Integer theMaximumResults,
|
||||
boolean theCountOnlyFlag,
|
||||
List<Long> thePidList,
|
||||
List<ISearchQueryExecutor> theSearchQueryExecutors) {
|
||||
|
||||
SearchQueryBuilder sqlBuilder = new SearchQueryBuilder(
|
||||
myContext,
|
||||
myStorageSettings,
|
||||
myPartitionSettings,
|
||||
myRequestPartitionId,
|
||||
null,
|
||||
mySqlBuilderFactory,
|
||||
myDialectProvider,
|
||||
theCountOnlyFlag);
|
||||
|
||||
QueryStack queryStack3 = new QueryStack(
|
||||
theParams, myStorageSettings, myContext, sqlBuilder, mySearchParamRegistry, myPartitionSettings);
|
||||
|
||||
JdbcTemplate jdbcTemplate = initializeJdbcTemplate(theMaximumResults);
|
||||
|
||||
Set<Long> targetPids = new HashSet<>();
|
||||
if (myParams.get(IAnyResource.SP_RES_ID) != null) {
|
||||
|
||||
extractTargetPidsFromIdParams(targetPids);
|
||||
|
||||
// add the target pids to our executors as the first
|
||||
// results iterator to go through
|
||||
theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(new ArrayList<>(targetPids)));
|
||||
} else {
|
||||
// For Everything queries, we make the query root by the ResourceLink table, since this query
|
||||
// is basically a reverse-include search. For type/Everything (as opposed to instance/Everything)
|
||||
// the one problem with this approach is that it doesn't catch Patients that have absolutely
|
||||
// nothing linked to them. So we do one additional query to make sure we catch those too.
|
||||
SearchQueryBuilder fetchPidsSqlBuilder = new SearchQueryBuilder(
|
||||
myContext,
|
||||
myStorageSettings,
|
||||
myPartitionSettings,
|
||||
myRequestPartitionId,
|
||||
myResourceName,
|
||||
mySqlBuilderFactory,
|
||||
myDialectProvider,
|
||||
theCountOnlyFlag);
|
||||
GeneratedSql allTargetsSql = fetchPidsSqlBuilder.generate(theOffset, myMaxResultsToFetch);
|
||||
String sql = allTargetsSql.getSql();
|
||||
Object[] args = allTargetsSql.getBindVariables().toArray(new Object[0]);
|
||||
|
||||
List<Long> output = jdbcTemplate.query(sql, args, new SingleColumnRowMapper<>(Long.class));
|
||||
|
||||
// we add a search executor to fetch unlinked patients first
|
||||
theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(output));
|
||||
}
|
||||
|
||||
List<String> typeSourceResources = new ArrayList<>();
|
||||
if (myParams.get(Constants.PARAM_TYPE) != null) {
|
||||
typeSourceResources.addAll(extractTypeSourceResourcesFromParams());
|
||||
}
|
||||
|
||||
queryStack3.addPredicateEverythingOperation(
|
||||
myResourceName, typeSourceResources, targetPids.toArray(new Long[0]));
|
||||
|
||||
// Add PID list predicate for full text search and/or lastn operation
|
||||
addPidListPredicate(thePidList, sqlBuilder);
|
||||
|
||||
/*
|
||||
* If offset is present, we want deduplicate the results by using GROUP BY
|
||||
* ORDER BY is required to make sure we return unique results for each page
|
||||
*/
|
||||
if (theOffset != null) {
|
||||
queryStack3.addGrouping();
|
||||
queryStack3.addOrdering();
|
||||
queryStack3.setUseAggregate(true);
|
||||
}
|
||||
|
||||
/*
|
||||
* Now perform the search
|
||||
*/
|
||||
executeSearch(theOffset, theSearchQueryExecutors, sqlBuilder);
|
||||
}
|
||||
|
||||
private void addPidListPredicate(List<Long> thePidList, SearchQueryBuilder theSqlBuilder) {
|
||||
if (thePidList != null && !thePidList.isEmpty()) {
|
||||
theSqlBuilder.addResourceIdsPredicate(thePidList);
|
||||
}
|
||||
}
|
||||
|
||||
private void addLastUpdatePredicate(SearchQueryBuilder theSqlBuilder) {
|
||||
DateRangeParam lu = myParams.getLastUpdated();
|
||||
if (lu != null && !lu.isEmpty()) {
|
||||
Condition lastUpdatedPredicates = theSqlBuilder.addPredicateLastUpdated(lu);
|
||||
theSqlBuilder.addPredicate(lastUpdatedPredicates);
|
||||
}
|
||||
}
|
||||
|
||||
private JdbcTemplate initializeJdbcTemplate(Integer theMaximumResults) {
|
||||
JdbcTemplate jdbcTemplate = new JdbcTemplate(myEntityManagerFactory.getDataSource());
|
||||
jdbcTemplate.setFetchSize(myFetchSize);
|
||||
if (theMaximumResults != null) {
|
||||
jdbcTemplate.setMaxRows(theMaximumResults);
|
||||
}
|
||||
return jdbcTemplate;
|
||||
}
|
||||
|
||||
private Collection<String> extractTypeSourceResourcesFromParams() {
|
||||
|
||||
List<List<IQueryParameterType>> listOfList = myParams.get(Constants.PARAM_TYPE);
|
||||
|
@ -1921,6 +1993,21 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
// Since we're going to remove elements below
|
||||
theParams.values().forEach(this::ensureSubListsAreWritable);
|
||||
|
||||
/*
|
||||
* Apply search against the combo param index in a loop:
|
||||
*
|
||||
* 1. First we check whether the actual parameter values in the
|
||||
* parameter map are actually usable for searching against the combo
|
||||
* param index. E.g. no search modifiers, date comparators, etc.,
|
||||
* since these mean you can't use the combo index.
|
||||
*
|
||||
* 2. Apply and create the join SQl. We remove parameter values from
|
||||
* the map as we apply them, so any parameter values remaining in the
|
||||
* map after each loop haven't yet been factored into the SQL.
|
||||
*
|
||||
* The loop allows us to create multiple combo index joins if there
|
||||
* are multiple AND expressions for the related parameters.
|
||||
*/
|
||||
while (validateParamValuesAreValidForComboParam(theRequest, theParams, comboParamNames)) {
|
||||
applyComboSearchParam(theQueryStack, theParams, theRequest, comboParamNames, comboParam);
|
||||
}
|
||||
|
@ -1940,8 +2027,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
inputs.add(nextValues);
|
||||
}
|
||||
|
||||
List<List<IQueryParameterType>> inputPermutations = PermutationBuilder.calculatePermutations(inputs);
|
||||
List<String> indexStrings = new ArrayList<>(PermutationBuilder.calculatePermutationCount(inputs));
|
||||
List<List<IQueryParameterType>> inputPermutations = Lists.cartesianProduct(inputs);
|
||||
List<String> indexStrings = new ArrayList<>(CartesianProductUtil.calculateCartesianProductSize(inputs));
|
||||
for (List<IQueryParameterType> nextPermutation : inputPermutations) {
|
||||
|
||||
StringBuilder searchStringBuilder = new StringBuilder();
|
||||
|
@ -1955,8 +2042,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
IQueryParameterType nextOr = nextPermutation.get(paramIndex);
|
||||
String nextOrValue = nextOr.getValueAsQueryToken(myContext);
|
||||
|
||||
RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam(
|
||||
myResourceName, nextParamName, ISearchParamRegistry.ContextEnum.SEARCH);
|
||||
RuntimeSearchParam nextParamDef =
|
||||
mySearchParamRegistry.getActiveSearchParam(myResourceName, nextParamName);
|
||||
if (theComboParam.getComboSearchParamType() == ComboSearchParamType.NON_UNIQUE) {
|
||||
if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.STRING) {
|
||||
nextOrValue = StringUtil.normalizeStringForSearchIndexing(nextOrValue);
|
||||
|
@ -2081,7 +2168,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
}
|
||||
}
|
||||
|
||||
if (PermutationBuilder.calculatePermutationCount(paramOrValues) > 500) {
|
||||
if (CartesianProductUtil.calculateCartesianProductSize(paramOrValues) > 500) {
|
||||
ourLog.debug(
|
||||
"Search is not a candidate for unique combo searching - Too many OR values would result in too many permutations");
|
||||
paramValuesAreValidForCombo = false;
|
||||
|
@ -2264,13 +2351,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
// If we don't have a query yet, create one
|
||||
if (myResultsIterator == null) {
|
||||
if (myMaxResultsToFetch == null) {
|
||||
if (myParams.getLoadSynchronousUpTo() != null) {
|
||||
myMaxResultsToFetch = myParams.getLoadSynchronousUpTo();
|
||||
} else if (myParams.getOffset() != null && myParams.getCount() != null) {
|
||||
myMaxResultsToFetch = myParams.getCount();
|
||||
} else {
|
||||
myMaxResultsToFetch = myStorageSettings.getFetchSizeDefaultMaximum();
|
||||
}
|
||||
myMaxResultsToFetch = calculateMaxResultsToFetch();
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -2296,19 +2377,12 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
|
||||
Long nextLong = myResultsIterator.next();
|
||||
if (myHavePerfTraceFoundIdHook) {
|
||||
HookParams params = new HookParams()
|
||||
.add(Integer.class, System.identityHashCode(this))
|
||||
.add(Object.class, nextLong);
|
||||
CompositeInterceptorBroadcaster.doCallHooks(
|
||||
myInterceptorBroadcaster,
|
||||
myRequest,
|
||||
Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID,
|
||||
params);
|
||||
callPerformanceTracingHook(nextLong);
|
||||
}
|
||||
|
||||
if (nextLong != null) {
|
||||
JpaPid next = JpaPid.fromId(nextLong);
|
||||
if (myPidSet.add(next)) {
|
||||
if (myPidSet.add(next) && doNotSkipNextPidForEverything()) {
|
||||
myNext = next;
|
||||
myNonSkipCount++;
|
||||
break;
|
||||
|
@ -2341,11 +2415,10 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
if (myIncludesIterator != null) {
|
||||
while (myIncludesIterator.hasNext()) {
|
||||
JpaPid next = myIncludesIterator.next();
|
||||
if (next != null)
|
||||
if (myPidSet.add(next)) {
|
||||
myNext = next;
|
||||
break;
|
||||
}
|
||||
if (next != null && myPidSet.add(next) && doNotSkipNextPidForEverything()) {
|
||||
myNext = next;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (myNext == null) {
|
||||
myNext = NO_MORE;
|
||||
|
@ -2384,6 +2457,30 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
}
|
||||
}
|
||||
|
||||
private Integer calculateMaxResultsToFetch() {
|
||||
if (myParams.getLoadSynchronousUpTo() != null) {
|
||||
return myParams.getLoadSynchronousUpTo();
|
||||
} else if (myParams.getOffset() != null && myParams.getCount() != null) {
|
||||
return myParams.getEverythingMode() != null
|
||||
? myParams.getOffset() + myParams.getCount()
|
||||
: myParams.getCount();
|
||||
} else {
|
||||
return myStorageSettings.getFetchSizeDefaultMaximum();
|
||||
}
|
||||
}
|
||||
|
||||
private boolean doNotSkipNextPidForEverything() {
|
||||
return !(myParams.getEverythingMode() != null && (myOffset != null && myOffset >= myPidSet.size()));
|
||||
}
|
||||
|
||||
private void callPerformanceTracingHook(Long theNextLong) {
|
||||
HookParams params = new HookParams()
|
||||
.add(Integer.class, System.identityHashCode(this))
|
||||
.add(Object.class, theNextLong);
|
||||
CompositeInterceptorBroadcaster.doCallHooks(
|
||||
myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID, params);
|
||||
}
|
||||
|
||||
private void sendProcessingMsgAndFirePerformanceHook() {
|
||||
String msg = "Pass completed with no matching results seeking rows "
|
||||
+ myPidSet.size() + "-" + mySkipCount
|
||||
|
@ -2393,11 +2490,18 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
}
|
||||
|
||||
private void initializeIteratorQuery(Integer theOffset, Integer theMaxResultsToFetch) {
|
||||
Integer offset = theOffset;
|
||||
if (myQueryList.isEmpty()) {
|
||||
// Capture times for Lucene/Elasticsearch queries as well
|
||||
mySearchRuntimeDetails.setQueryStopwatch(new StopWatch());
|
||||
|
||||
// setting offset to 0 to fetch all resource ids to guarantee
|
||||
// correct output result for everything operation during paging
|
||||
if (myParams.getEverythingMode() != null) {
|
||||
offset = 0;
|
||||
}
|
||||
myQueryList = createQuery(
|
||||
myParams, mySort, theOffset, theMaxResultsToFetch, false, myRequest, mySearchRuntimeDetails);
|
||||
myParams, mySort, offset, theMaxResultsToFetch, false, myRequest, mySearchRuntimeDetails);
|
||||
}
|
||||
|
||||
mySearchRuntimeDetails.setQueryStopwatch(new StopWatch());
|
||||
|
|
|
@ -23,9 +23,8 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
|||
import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedComboTokenNonUnique;
|
||||
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
|
||||
import com.healthmarketscience.sqlbuilder.BinaryCondition;
|
||||
import ca.uhn.fhir.jpa.util.QueryParameterUtils;
|
||||
import com.healthmarketscience.sqlbuilder.Condition;
|
||||
import com.healthmarketscience.sqlbuilder.InCondition;
|
||||
import com.healthmarketscience.sqlbuilder.dbspec.basic.DbColumn;
|
||||
|
||||
import java.util.List;
|
||||
|
@ -48,18 +47,12 @@ public class ComboNonUniqueSearchParameterPredicateBuilder extends BaseSearchPar
|
|||
RequestPartitionId theRequestPartitionId, List<String> theIndexStrings) {
|
||||
PartitionablePartitionId partitionId =
|
||||
PartitionablePartitionId.toStoragePartition(theRequestPartitionId, getPartitionSettings());
|
||||
Condition predicate;
|
||||
if (theIndexStrings.size() == 1) {
|
||||
long hash = ResourceIndexedComboTokenNonUnique.calculateHashComplete(
|
||||
getPartitionSettings(), partitionId, theIndexStrings.get(0));
|
||||
predicate = BinaryCondition.equalTo(myColumnHashComplete, generatePlaceholder(hash));
|
||||
} else {
|
||||
List<Long> hashes = theIndexStrings.stream()
|
||||
.map(t -> ResourceIndexedComboTokenNonUnique.calculateHashComplete(
|
||||
getPartitionSettings(), partitionId, t))
|
||||
.collect(Collectors.toList());
|
||||
predicate = new InCondition(myColumnHashComplete, generatePlaceholders(hashes));
|
||||
}
|
||||
List<Long> hashes = theIndexStrings.stream()
|
||||
.map(t -> ResourceIndexedComboTokenNonUnique.calculateHashComplete(
|
||||
getPartitionSettings(), partitionId, t))
|
||||
.collect(Collectors.toList());
|
||||
Condition predicate =
|
||||
QueryParameterUtils.toEqualToOrInPredicate(myColumnHashComplete, generatePlaceholders(hashes));
|
||||
return combineWithRequestPartitionIdPredicate(theRequestPartitionId, predicate);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,9 +21,8 @@ package ca.uhn.fhir.jpa.search.builder.predicate;
|
|||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
|
||||
import com.healthmarketscience.sqlbuilder.BinaryCondition;
|
||||
import ca.uhn.fhir.jpa.util.QueryParameterUtils;
|
||||
import com.healthmarketscience.sqlbuilder.Condition;
|
||||
import com.healthmarketscience.sqlbuilder.InCondition;
|
||||
import com.healthmarketscience.sqlbuilder.dbspec.basic.DbColumn;
|
||||
|
||||
import java.util.List;
|
||||
|
@ -43,12 +42,8 @@ public class ComboUniqueSearchParameterPredicateBuilder extends BaseSearchParamP
|
|||
|
||||
public Condition createPredicateIndexString(
|
||||
RequestPartitionId theRequestPartitionId, List<String> theIndexStrings) {
|
||||
Condition predicate;
|
||||
if (theIndexStrings.size() == 1) {
|
||||
predicate = BinaryCondition.equalTo(myColumnString, generatePlaceholder(theIndexStrings.get(0)));
|
||||
} else {
|
||||
predicate = new InCondition(myColumnString, generatePlaceholders(theIndexStrings));
|
||||
}
|
||||
Condition predicate =
|
||||
QueryParameterUtils.toEqualToOrInPredicate(myColumnString, generatePlaceholders(theIndexStrings));
|
||||
return combineWithRequestPartitionIdPredicate(theRequestPartitionId, predicate);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,51 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.util;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Utility class for working with cartesian products - Use Guava's
|
||||
* {@link com.google.common.collect.Lists#cartesianProduct(List)} method
|
||||
* to actually calculate the product.
|
||||
*/
|
||||
public class CartesianProductUtil {
|
||||
|
||||
/**
|
||||
* Non instantiable
|
||||
*/
|
||||
private CartesianProductUtil() {
|
||||
// nothing
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the size of the cartesian product
|
||||
*
|
||||
* @throws ArithmeticException If size exceeds {@link Integer#MAX_VALUE}
|
||||
* @since 7.4.0
|
||||
*/
|
||||
public static <T> int calculateCartesianProductSize(List<List<T>> theLists) throws ArithmeticException {
|
||||
int retVal = !theLists.isEmpty() ? 1 : 0;
|
||||
for (List<T> theList : theLists) {
|
||||
retVal = Math.multiplyExact(retVal, theList.size());
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,76 @@
|
|||
package ca.uhn.fhir.jpa.batch2;
|
||||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.entity.PartitionEntity;
|
||||
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import org.assertj.core.api.Assertions;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.ArgumentMatchers;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
public class JpaJobPartitionProviderTest {
|
||||
@Mock
|
||||
private IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
|
||||
@Mock
|
||||
private IPartitionLookupSvc myPartitionLookupSvc;
|
||||
@InjectMocks
|
||||
private JpaJobPartitionProvider myJobPartitionProvider;
|
||||
|
||||
@Test
|
||||
public void getPartitions_requestSpecificPartition_returnsPartition() {
|
||||
// setup
|
||||
SystemRequestDetails requestDetails = new SystemRequestDetails();
|
||||
String operation = ProviderConstants.OPERATION_EXPORT;
|
||||
|
||||
RequestPartitionId partitionId = RequestPartitionId.fromPartitionId(1);
|
||||
when(myRequestPartitionHelperSvc.determineReadPartitionForRequestForServerOperation(ArgumentMatchers.eq(requestDetails), ArgumentMatchers.eq(operation))).thenReturn(partitionId);
|
||||
|
||||
// test
|
||||
List <RequestPartitionId> partitionIds = myJobPartitionProvider.getPartitions(requestDetails, operation);
|
||||
|
||||
// verify
|
||||
Assertions.assertThat(partitionIds).hasSize(1);
|
||||
Assertions.assertThat(partitionIds).containsExactlyInAnyOrder(partitionId);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getPartitions_requestAllPartitions_returnsListOfAllSpecificPartitions() {
|
||||
// setup
|
||||
SystemRequestDetails requestDetails = new SystemRequestDetails();
|
||||
String operation = ProviderConstants.OPERATION_EXPORT;
|
||||
|
||||
when(myRequestPartitionHelperSvc.determineReadPartitionForRequestForServerOperation(ArgumentMatchers.eq(requestDetails), ArgumentMatchers.eq(operation)))
|
||||
.thenReturn( RequestPartitionId.allPartitions());
|
||||
List<RequestPartitionId> partitionIds = List.of(RequestPartitionId.fromPartitionIds(1), RequestPartitionId.fromPartitionIds(2));
|
||||
|
||||
List<PartitionEntity> partitionEntities = new ArrayList<>();
|
||||
partitionIds.forEach(partitionId -> {
|
||||
PartitionEntity entity = mock(PartitionEntity.class);
|
||||
when(entity.toRequestPartitionId()).thenReturn(partitionId);
|
||||
partitionEntities.add(entity);
|
||||
});
|
||||
when(myPartitionLookupSvc.listPartitions()).thenReturn(partitionEntities);
|
||||
List<RequestPartitionId> expectedPartitionIds = new ArrayList<>(partitionIds);
|
||||
expectedPartitionIds.add(RequestPartitionId.defaultPartition());
|
||||
|
||||
// test
|
||||
List<RequestPartitionId> actualPartitionIds = myJobPartitionProvider.getPartitions(requestDetails, operation);
|
||||
|
||||
// verify
|
||||
Assertions.assertThat(actualPartitionIds).hasSize(expectedPartitionIds.size());
|
||||
Assertions.assertThat(actualPartitionIds).containsExactlyInAnyOrder(expectedPartitionIds.toArray(new RequestPartitionId[0]));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
package ca.uhn.fhir.jpa.util;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotSame;
|
||||
import static org.junit.jupiter.api.Assertions.assertSame;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
class CartesianProductUtilTest {
|
||||
|
||||
@Test
|
||||
public void testCalculateCartesianProductSize() {
|
||||
List<List<String>> input = List.of(
|
||||
List.of("A0", "A1"),
|
||||
List.of("B0", "B1", "B2"),
|
||||
List.of("C0", "C1")
|
||||
);
|
||||
assertEquals(12, CartesianProductUtil.calculateCartesianProductSize(input));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCalculateCartesianProductSizeEmpty() {
|
||||
List<List<String>> input = List.of();
|
||||
assertEquals(0, CartesianProductUtil.calculateCartesianProductSize(input));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCalculateCartesianProductSizeOverflow() {
|
||||
List<Integer> ranges = IntStream.range(0, 10001).boxed().toList();
|
||||
List<List<Integer>> input = IntStream.range(0, 20).boxed().map(t -> ranges).toList();
|
||||
assertThrows(ArithmeticException.class, () -> CartesianProductUtil.calculateCartesianProductSize(input));
|
||||
}
|
||||
|
||||
}
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -67,10 +67,6 @@
|
|||
<groupId>com.h2database</groupId>
|
||||
<artifactId>h2</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.derby</groupId>
|
||||
<artifactId>derby</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-test</artifactId>
|
||||
|
|
|
@ -27,9 +27,7 @@ import ca.uhn.fhir.jpa.search.BaseSourceSearchParameterTestCases;
|
|||
import ca.uhn.fhir.jpa.search.CompositeSearchParameterTestCases;
|
||||
import ca.uhn.fhir.jpa.search.QuantitySearchParameterTestCases;
|
||||
import ca.uhn.fhir.jpa.search.builder.SearchBuilder;
|
||||
import ca.uhn.fhir.jpa.search.lastn.ElasticsearchRestClientFactory;
|
||||
import ca.uhn.fhir.jpa.search.lastn.ElasticsearchSvcImpl;
|
||||
import ca.uhn.fhir.jpa.search.lastn.json.ObservationJson;
|
||||
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
|
||||
|
@ -60,17 +58,11 @@ import ca.uhn.fhir.test.utilities.docker.RequiresDocker;
|
|||
import ca.uhn.fhir.validation.FhirValidator;
|
||||
import ca.uhn.fhir.validation.ValidationResult;
|
||||
import ca.uhn.test.util.LogbackTestExtension;
|
||||
import ca.uhn.test.util.LogbackTestExtensionAssert;
|
||||
import ch.qos.logback.classic.Level;
|
||||
import ch.qos.logback.classic.spi.ILoggingEvent;
|
||||
import co.elastic.clients.elasticsearch.ElasticsearchClient;
|
||||
import co.elastic.clients.elasticsearch.core.SearchRequest;
|
||||
import co.elastic.clients.elasticsearch.core.SearchResponse;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import jakarta.json.JsonValue;
|
||||
import jakarta.persistence.EntityManager;
|
||||
import org.apache.commons.lang3.RandomStringUtils;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.hl7.fhir.instance.model.api.IBaseCoding;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
@ -91,6 +83,8 @@ import org.hl7.fhir.r4.model.Questionnaire;
|
|||
import org.hl7.fhir.r4.model.QuestionnaireResponse;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.hl7.fhir.r4.model.RiskAssessment;
|
||||
import org.hl7.fhir.r4.model.SearchParameter;
|
||||
import org.hl7.fhir.r4.model.ServiceRequest;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
|
@ -222,6 +216,12 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
|
|||
@Qualifier("myQuestionnaireResponseDaoR4")
|
||||
private IFhirResourceDao<QuestionnaireResponse> myQuestionnaireResponseDao;
|
||||
@Autowired
|
||||
@Qualifier("myServiceRequestDaoR4")
|
||||
private IFhirResourceDao<ServiceRequest> myServiceRequestDao;
|
||||
@Autowired
|
||||
@Qualifier("mySearchParameterDaoR4")
|
||||
private IFhirResourceDao<SearchParameter> mySearchParameterDao;
|
||||
@Autowired
|
||||
private TestHSearchEventDispatcher myHSearchEventDispatcher;
|
||||
@Autowired
|
||||
ElasticsearchContainer myElasticsearchContainer;
|
||||
|
@ -274,7 +274,7 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
|
|||
|
||||
|
||||
class ElasticPerformanceTracingInterceptor {
|
||||
private List<StorageProcessingMessage> messages = new ArrayList<>();
|
||||
private final List<StorageProcessingMessage> messages = new ArrayList<>();
|
||||
|
||||
@Hook(Pointcut.JPA_PERFTRACE_INFO)
|
||||
public void logPerformance(StorageProcessingMessage theMessage) {
|
||||
|
@ -285,6 +285,8 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
|
|||
return messages;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testFullTextSearchesArePerformanceLogged() {
|
||||
|
||||
|
@ -945,9 +947,7 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
|
|||
assertThat(result).hasSize(1);
|
||||
assertEquals(((Observation) result.get(0)).getIdElement().getIdPart(), id1.getIdPart());
|
||||
|
||||
List<ILoggingEvent> events = myLogbackTestExtension.filterLoggingEventsWithPredicate(e -> e.getLevel() == Level.WARN);
|
||||
assertFalse(events.isEmpty());
|
||||
assertTrue(events.stream().anyMatch(e -> e.getFormattedMessage().contains("Some resources were not found in index. Make sure all resources were indexed. Resorting to database search.")));
|
||||
LogbackTestExtensionAssert.assertThat(myLogbackTestExtension).hasWarnMessage("Some resources were not found in index. Make sure all resources were indexed. Resorting to database search.");
|
||||
|
||||
// restore changed property
|
||||
JpaStorageSettings defaultConfig = new JpaStorageSettings();
|
||||
|
@ -1756,7 +1756,7 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
|
|||
public class LastUpdatedTests {
|
||||
|
||||
private String myOldObsId, myNewObsId;
|
||||
private String myOldLastUpdatedDateTime = "2017-03-24T03:21:47";
|
||||
private final String myOldLastUpdatedDateTime = "2017-03-24T03:21:47";
|
||||
|
||||
@BeforeEach
|
||||
public void enableResourceStorage() {
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -73,7 +73,7 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
|
|||
/*
|
||||
* Note that MYSQL chokes on unique indexes for lengths > 255 so be careful here
|
||||
*/
|
||||
public static final int MAX_LENGTH = 200;
|
||||
public static final int MAX_LENGTH = 768;
|
||||
public static final int HASH_PREFIX_LENGTH = 1;
|
||||
private static final long serialVersionUID = 1L;
|
||||
public static final String HFJ_SPIDX_STRING = "HFJ_SPIDX_STRING";
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -1,3 +1,22 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA - Search Parameters
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.searchparam.extractor;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -13,9 +13,8 @@ import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
|||
import ca.uhn.fhir.rest.server.SimpleBundleProvider;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.subscription.SubscriptionConstants;
|
||||
import ca.uhn.test.util.LogEventIterableAssert;
|
||||
import ca.uhn.test.util.LogbackTestExtension;
|
||||
import ch.qos.logback.classic.Level;
|
||||
import ca.uhn.test.util.LogbackTestExtensionAssert;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.model.Subscription;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
|
@ -125,10 +124,8 @@ public class SubscriptionLoaderTest {
|
|||
verify(mySubscriptionDao)
|
||||
.searchForResources(any(SearchParameterMap.class), any(SystemRequestDetails.class));
|
||||
|
||||
String expected = "Subscription "
|
||||
+ subscription.getIdElement().getIdPart()
|
||||
+ " could not be activated.";
|
||||
LogEventIterableAssert.assertThat(myLogCapture.getLogEvents()).hasEventWithLevelAndMessageContains(Level.ERROR, expected);
|
||||
LogEventIterableAssert.assertThat(myLogCapture.getLogEvents()).hasAtLeastOneEventWithMessage(subscription.getError());
|
||||
LogbackTestExtensionAssert.assertThat(myLogCapture).hasErrorMessage(
|
||||
"Subscription " + subscription.getIdElement().getIdPart() + " could not be activated.");
|
||||
LogbackTestExtensionAssert.assertThat(myLogCapture).hasMessage(subscription.getError());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -1437,7 +1437,7 @@ public class FhirResourceDaoDstu2SearchNoFtTest extends BaseJpaDstu2Test {
|
|||
@Test
|
||||
public void testSearchStringParamReallyLong() {
|
||||
String methodName = "testSearchStringParamReallyLong";
|
||||
String value = StringUtils.rightPad(methodName, 200, 'a');
|
||||
String value = StringUtils.rightPad(methodName, ResourceIndexedSearchParamString.MAX_LENGTH, 'a');
|
||||
|
||||
IIdType longId;
|
||||
IIdType shortId;
|
||||
|
|
|
@ -2645,7 +2645,7 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
|
|||
myStorageSettings.setAdvancedHSearchIndexing(false);
|
||||
|
||||
Organization org = new Organization();
|
||||
String str = "testStringParamLong__lvdaoy843s89tll8gvs89l4s3gelrukveilufyebrew8r87bv4b77feli7fsl4lv3vb7rexloxe7olb48vov4o78ls7bvo7vb48o48l4bb7vbvx";
|
||||
String str = "testStringParamLong__" + RandomStringUtils.randomAlphabetic(ResourceIndexedSearchParamString.MAX_LENGTH);
|
||||
str = str + str;
|
||||
org.getNameElement().setValue(str);
|
||||
|
||||
|
@ -2764,8 +2764,8 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
|
|||
org.getNameElement().setValue("testTokenParamWhichIsTooLong");
|
||||
org.getType().addCoding().setSystem(longStr1).setCode(longStr2);
|
||||
|
||||
String subStr1 = longStr1.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
|
||||
String subStr2 = longStr2.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
|
||||
String subStr1 = longStr1.substring(0, ResourceIndexedSearchParamToken.MAX_LENGTH);
|
||||
String subStr2 = longStr2.substring(0, ResourceIndexedSearchParamToken.MAX_LENGTH);
|
||||
List<IResourcePersistentId> val = myOrganizationDao.searchForIds(new SearchParameterMap("type", new IdentifierDt(subStr1, subStr2)), null);
|
||||
int initial = val.size();
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -2023,7 +2023,7 @@ public class FhirResourceDaoDstu3SearchNoFtTest extends BaseJpaDstu3Test {
|
|||
@Test
|
||||
public void testSearchStringParamReallyLong() {
|
||||
String methodName = "testSearchStringParamReallyLong";
|
||||
String value = StringUtils.rightPad(methodName, 200, 'a');
|
||||
String value = StringUtils.rightPad(methodName, ResourceIndexedSearchParamString.MAX_LENGTH, 'a');
|
||||
|
||||
IIdType longId;
|
||||
IIdType shortId;
|
||||
|
|
|
@ -3236,7 +3236,7 @@ public class FhirResourceDaoDstu3Test extends BaseJpaDstu3Test {
|
|||
public void testStringParamWhichIsTooLong() {
|
||||
|
||||
Organization org = new Organization();
|
||||
String str = "testStringParamLong__lvdaoy843s89tll8gvs89l4s3gelrukveilufyebrew8r87bv4b77feli7fsl4lv3vb7rexloxe7olb48vov4o78ls7bvo7vb48o48l4bb7vbvx";
|
||||
String str = "testStringParamLong__" + RandomStringUtils.randomAlphanumeric(ResourceIndexedSearchParamString.MAX_LENGTH);
|
||||
str = str + str;
|
||||
org.getNameElement().setValue(str);
|
||||
|
||||
|
@ -3419,8 +3419,8 @@ public class FhirResourceDaoDstu3Test extends BaseJpaDstu3Test {
|
|||
org.getNameElement().setValue("testTokenParamWhichIsTooLong");
|
||||
org.addType().addCoding().setSystem(longStr1).setCode(longStr2);
|
||||
|
||||
String subStr1 = longStr1.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
|
||||
String subStr2 = longStr2.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
|
||||
String subStr1 = longStr1.substring(0, ResourceIndexedSearchParamToken.MAX_LENGTH);
|
||||
String subStr2 = longStr2.substring(0, ResourceIndexedSearchParamToken.MAX_LENGTH);
|
||||
List<IResourcePersistentId> val = myOrganizationDao.searchForIds(new SearchParameterMap("type", new TokenParam(subStr1, subStr2)), null);
|
||||
int initial = val.size();
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -1,8 +1,5 @@
|
|||
package ca.uhn.fhir.jpa.bulk.imprt2;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
|
||||
import ca.uhn.fhir.batch2.jobs.imprt.ConsumeFilesStep;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
|
@ -24,6 +21,9 @@ import java.util.List;
|
|||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
@TestMethodOrder(MethodOrderer.MethodName.class)
|
||||
|
@ -141,15 +141,17 @@ public class ConsumeFilesStepR4Test extends BasePartitioningR4Test {
|
|||
|
||||
// Validate
|
||||
|
||||
if (partitionEnabled) {
|
||||
assertEquals(7, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
} else {
|
||||
assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
}
|
||||
int expectedSelectQueryCount = partitionEnabled ? 8 : 6;
|
||||
assertEquals(expectedSelectQueryCount, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(2, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(4, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
assertEquals(1, myCaptureQueriesListener.countCommits());
|
||||
|
||||
// PartitionLookupSvcImpl#lookupPartitionByName generates one additional commit
|
||||
// because it executes in a transaction (calls executeInTransaction)
|
||||
// we may want to change that in the future
|
||||
int expectedCommitCount = partitionEnabled ? 2 : 1;
|
||||
assertEquals(expectedCommitCount, myCaptureQueriesListener.countCommits());
|
||||
assertEquals(0, myCaptureQueriesListener.countRollbacks());
|
||||
|
||||
patient = myPatientDao.read(new IdType("Patient/A"), mySrd);
|
||||
|
|
|
@ -232,6 +232,9 @@ class BaseHapiFhirResourceDaoTest {
|
|||
entity.setId(123L);
|
||||
entity.setFhirId("456");
|
||||
|
||||
// set a transactionService that will actually execute the callback
|
||||
mySvc.setTransactionService(new MockHapiTransactionService());
|
||||
|
||||
// mock
|
||||
when(myRequestPartitionHelperSvc.determineReadPartitionForRequestForRead(
|
||||
any(RequestDetails.class),
|
||||
|
|
|
@ -5,6 +5,7 @@ import static org.junit.jupiter.api.Assertions.assertNotNull;
|
|||
import ca.uhn.fhir.interceptor.api.Hook;
|
||||
import ca.uhn.fhir.interceptor.api.Interceptor;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.entity.PartitionEntity;
|
||||
|
@ -277,7 +278,8 @@ public abstract class BasePartitioningR4Test extends BaseJpaR4SystemTest {
|
|||
}
|
||||
|
||||
@Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_READ)
|
||||
public RequestPartitionId partitionIdentifyRead(ServletRequestDetails theRequestDetails) {
|
||||
public RequestPartitionId partitionIdentifyRead(ServletRequestDetails theRequestDetails,
|
||||
ReadPartitionIdRequestDetails theDetails) {
|
||||
|
||||
// Just to be nice, figure out the first line in the stack that isn't a part of the
|
||||
// partitioning or interceptor infrastructure, just so it's obvious who is asking
|
||||
|
|
|
@ -11,6 +11,7 @@ import ca.uhn.fhir.interceptor.api.Pointcut;
|
|||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
|
||||
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
||||
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
|
@ -21,6 +22,7 @@ import ca.uhn.fhir.rest.param.NumberParam;
|
|||
import ca.uhn.fhir.rest.param.ReferenceOrListParam;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.param.TokenOrListParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
|
@ -581,6 +583,40 @@ public class FhirResourceDaoR4SearchCustomSearchParamTest extends BaseJpaR4Test
|
|||
assertEquals(RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE, sp.getStatus());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchParamStringOnExtensionForVeryLongContainsSearch() {
|
||||
myStorageSettings.setAllowContainsSearches(true);
|
||||
String body = "{\n" +
|
||||
" \"resourceType\": \"SearchParameter\",\n" +
|
||||
" \"url\": \"https://health.gov.on.ca/idms/fhir/SearchParameter/ServiceRequest-Indication\",\n" +
|
||||
" \"title\": \"ServiceRequest Indication\",\n" +
|
||||
" \"status\": \"active\",\n" +
|
||||
" \"publisher\": \"MOH-IDMS\",\n" +
|
||||
" \"code\": \"ServiceRequestIndication\",\n" +
|
||||
" \"base\": [\n" +
|
||||
" \"ServiceRequest\"\n" +
|
||||
" ],\n" +
|
||||
" \"type\": \"string\",\n" +
|
||||
" \"expression\": \"ServiceRequest.extension('https://health.gov.on.ca/idms/fhir/StructureDefinition/Extension-Indication')\"\n" +
|
||||
"}";
|
||||
SearchParameter searchParameter = myFhirContext.newJsonParser().parseResource(SearchParameter.class, body);
|
||||
|
||||
mySearchParameterDao.create(searchParameter, mySrd);
|
||||
mySearchParamRegistry.forceRefresh();
|
||||
|
||||
ServiceRequest sr = new ServiceRequest();
|
||||
sr.addExtension().setUrl("https://health.gov.on.ca/idms/fhir/StructureDefinition/Extension-Indication").setValue(new StringType("Lorem ipsum dolor sit amet, consectetuer adipiscing elit. Aenean commodo ligula eget dolor. Aenean massa. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Donec quam felis, ultricies nec, pellentesque eu, pretium quis, sem. Nulla consequat massa quis enim. Donec pede justo, fringilla vel, aliquet nec, vulputate eget, arcu. In enim justo, rhoncus ut, imperdiet a, venenatis vitae, justo. Nullam dictum felis eu pede mollis pretium. Integer tincidunt. Cras dapib"));
|
||||
|
||||
myServiceRequestDao.create(sr, mySrd);
|
||||
|
||||
SearchParameterMap searchParameter1 = new SearchParameterMap();
|
||||
searchParameter1.add("ServiceRequestIndication", new StringParam("Lorem ipsum dolor sit amet, consectetuer adipiscing elit. Aenean commodo ligula eget dolor. Aenean massa. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Donec quam felis, ultricies nec, pellentesque eu, pretium quis, sem. Nulla consequat massa quis enim. Donec pede justo, fringilla vel, aliquet nec, vulputate eget, arcu. In enim justo, rhoncus ut, imperdiet a, venenatis vitae, justo. Nullam dictum felis eu pede mollis pretium. Integer tincidunt. Cras dapib").setContains(true));
|
||||
IBundleProvider search = myServiceRequestDao.search(searchParameter1, mySrd);
|
||||
assertThat(search.size()).isEqualTo(1);
|
||||
|
||||
myStorageSettings.setAllowContainsSearches(new StorageSettings().isAllowContainsSearches());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testOverrideAndDisableBuiltInSearchParametersWithOverridingDisabled() {
|
||||
|
|
|
@ -3723,7 +3723,7 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
@Test
|
||||
public void testSearchStringParamReallyLong() {
|
||||
String methodName = "testSearchStringParamReallyLong";
|
||||
String value = StringUtils.rightPad(methodName, 200, 'a');
|
||||
String value = StringUtils.rightPad(methodName, ResourceIndexedSearchParamString.MAX_LENGTH, 'a');
|
||||
|
||||
IIdType longId;
|
||||
IIdType shortId;
|
||||
|
|
|
@ -6,6 +6,7 @@ import ca.uhn.fhir.i18n.Msg;
|
|||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptDesignation;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
|
@ -48,8 +49,10 @@ import java.util.Date;
|
|||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.assertj.core.api.Assertions.assertThatThrownBy;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
@ -1374,6 +1377,27 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test {
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
void termConceptDesignationOver2000CharVal() {
|
||||
final String stringWith8000Chars = IntStream.range(0, 8000)
|
||||
.mapToObj(anInt -> "B")
|
||||
.collect(Collectors.joining());
|
||||
|
||||
final TermConceptDesignation termConceptDesignation8000Chars = new TermConceptDesignation()
|
||||
.setValue(stringWith8000Chars);
|
||||
myTermConceptDesignationDao.save(termConceptDesignation8000Chars);
|
||||
|
||||
final List<TermConceptDesignation> allTermConceptDesignations = myTermConceptDesignationDao.findAll();
|
||||
|
||||
assertThat(allTermConceptDesignations)
|
||||
.hasSize(1);
|
||||
|
||||
final TermConceptDesignation termConceptDesignationFromDb = allTermConceptDesignations.get(0);
|
||||
|
||||
assertThat(termConceptDesignationFromDb.getValue())
|
||||
.isEqualTo(stringWith8000Chars);
|
||||
}
|
||||
|
||||
private ArrayList<String> toCodesContains(List<ValueSetExpansionContainsComponent> theContains) {
|
||||
ArrayList<String> retVal = new ArrayList<>();
|
||||
for (ValueSetExpansionContainsComponent next : theContains) {
|
||||
|
|
|
@ -3954,7 +3954,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
|
|||
public void testStringParamWhichIsTooLong() {
|
||||
|
||||
Organization org = new Organization();
|
||||
String str = "testStringParamLong__lvdaoy843s89tll8gvs89l4s3gelrukveilufyebrew8r87bv4b77feli7fsl4lv3vb7rexloxe7olb48vov4o78ls7bvo7vb48o48l4bb7vbvx";
|
||||
String str = "testStringParamLong__" + RandomStringUtils.randomAlphanumeric(ResourceIndexedSearchParamString.MAX_LENGTH);
|
||||
str = str + str;
|
||||
org.getNameElement().setValue(str);
|
||||
|
||||
|
@ -4137,8 +4137,8 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
|
|||
org.getNameElement().setValue("testTokenParamWhichIsTooLong");
|
||||
org.addType().addCoding().setSystem(longStr1).setCode(longStr2);
|
||||
|
||||
String subStr1 = longStr1.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
|
||||
String subStr2 = longStr2.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
|
||||
String subStr1 = longStr1.substring(0, ResourceIndexedSearchParamToken.MAX_LENGTH);
|
||||
String subStr2 = longStr2.substring(0, ResourceIndexedSearchParamToken.MAX_LENGTH);
|
||||
List<IResourcePersistentId> val = myOrganizationDao.searchForIds(new SearchParameterMap("type", new TokenParam(subStr1, subStr2)), null);
|
||||
int initial = val.size();
|
||||
|
||||
|
|
|
@ -32,8 +32,8 @@ import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
|||
import ca.uhn.fhir.util.OperationOutcomeUtil;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import ca.uhn.fhir.validation.IValidatorModule;
|
||||
import ca.uhn.test.util.LogbackTestExtensionAssert;
|
||||
import ch.qos.logback.classic.Level;
|
||||
import ch.qos.logback.classic.spi.ILoggingEvent;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.hl7.fhir.common.hapi.validation.support.InMemoryTerminologyServerValidationSupport;
|
||||
import org.hl7.fhir.common.hapi.validation.support.UnknownCodeSystemWarningValidationSupport;
|
||||
|
@ -103,6 +103,7 @@ import static org.hl7.fhir.common.hapi.validation.support.CommonCodeSystemsTermi
|
|||
import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_LOW;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertInstanceOf;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
@ -1514,7 +1515,7 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test {
|
|||
|
||||
// validator
|
||||
assertNotNull(outcome);
|
||||
assertTrue(outcome.getOperationOutcome() instanceof OperationOutcome);
|
||||
assertInstanceOf(OperationOutcome.class, outcome.getOperationOutcome());
|
||||
List<OperationOutcome.OperationOutcomeIssueComponent> issues = ((OperationOutcome) outcome.getOperationOutcome()).getIssue();
|
||||
assertFalse(issues.isEmpty());
|
||||
List<OperationOutcome.OperationOutcomeIssueComponent> errors = issues.stream()
|
||||
|
@ -1523,12 +1524,7 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test {
|
|||
// we have errors
|
||||
assertFalse(errors.isEmpty());
|
||||
|
||||
List<ILoggingEvent> events = myLogbackTestExtension.filterLoggingEventsWithPredicate(e -> {
|
||||
return e.getLevel() == Level.WARN;
|
||||
});
|
||||
// and we have warning logs
|
||||
assertFalse(events.isEmpty());
|
||||
assertTrue(events.stream().anyMatch(e -> e.getFormattedMessage().contains("Unrecognized profile uri")));
|
||||
LogbackTestExtensionAssert.assertThat(myLogbackTestExtension).hasWarnMessage("Unrecognized profile uri");
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -1654,7 +1650,7 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test {
|
|||
|
||||
// verify
|
||||
assertNotNull(outcome);
|
||||
assertTrue(outcome.getOperationOutcome() instanceof OperationOutcome);
|
||||
assertInstanceOf(OperationOutcome.class, outcome.getOperationOutcome());
|
||||
List<OperationOutcome.OperationOutcomeIssueComponent> issues = ((OperationOutcome) outcome.getOperationOutcome()).getIssue();
|
||||
assertFalse(issues.isEmpty());
|
||||
List<OperationOutcome.OperationOutcomeIssueComponent> errors = issues.stream()
|
||||
|
|
|
@ -15,6 +15,8 @@ import org.mockito.InjectMocks;
|
|||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
|
@ -40,9 +42,10 @@ public class ReindexStepTest {
|
|||
public void testMethodReindex_withRequestPartitionId_willExecuteWithPartitionId(){
|
||||
// given
|
||||
Integer expectedPartitionId = 1;
|
||||
ResourceIdListWorkChunkJson data = new ResourceIdListWorkChunkJson();
|
||||
RequestPartitionId partitionId = RequestPartitionId.fromPartitionId(expectedPartitionId);
|
||||
ResourceIdListWorkChunkJson data = new ResourceIdListWorkChunkJson(List.of(), partitionId);
|
||||
ReindexJobParameters reindexJobParameters = new ReindexJobParameters();
|
||||
reindexJobParameters.setRequestPartitionId(RequestPartitionId.fromPartitionId(expectedPartitionId));
|
||||
reindexJobParameters.setRequestPartitionId(partitionId);
|
||||
when(myHapiTransactionService.withRequest(any())).thenCallRealMethod();
|
||||
when(myHapiTransactionService.buildExecutionBuilder(any())).thenCallRealMethod();
|
||||
|
||||
|
|
|
@ -20,8 +20,8 @@ import org.junit.jupiter.api.Test;
|
|||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
public class DeleteExpungeJobTest extends BaseJpaR4Test {
|
||||
@Autowired
|
||||
|
@ -62,7 +62,8 @@ public class DeleteExpungeJobTest extends BaseJpaR4Test {
|
|||
assertEquals(2, myDiagnosticReportDao.search(SearchParameterMap.newSynchronous()).size());
|
||||
|
||||
DeleteExpungeJobParameters jobParameters = new DeleteExpungeJobParameters();
|
||||
jobParameters.addUrl("Observation?subject.active=false").addUrl("DiagnosticReport?subject.active=false");
|
||||
jobParameters.addUrl("Observation?subject.active=false");
|
||||
jobParameters.addUrl("DiagnosticReport?subject.active=false");
|
||||
|
||||
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
|
||||
startRequest.setParameters(jobParameters);
|
||||
|
|
|
@ -311,6 +311,29 @@ public class ReindexJobTest extends BaseJpaR4Test {
|
|||
myStorageSettings.setMarkResourcesForReindexingUponSearchParameterChange(reindexPropertyCache);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReindex_byMultipleUrls_indexesMatchingResources() {
|
||||
// setup
|
||||
createObservation(withStatus(Observation.ObservationStatus.FINAL.toCode()));
|
||||
createObservation(withStatus(Observation.ObservationStatus.CANCELLED.toCode()));
|
||||
createPatient(withActiveTrue());
|
||||
createPatient(withActiveFalse());
|
||||
|
||||
// Only reindex one of them
|
||||
ReindexJobParameters parameters = new ReindexJobParameters();
|
||||
parameters.addUrl("Observation?status=final");
|
||||
parameters.addUrl("Patient?");
|
||||
|
||||
// execute
|
||||
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
|
||||
startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX);
|
||||
startRequest.setParameters(parameters);
|
||||
Batch2JobStartResponse res = myJobCoordinator.startInstance(startRequest);
|
||||
JobInstance jobInstance = myBatch2JobHelper.awaitJobCompletion(res);
|
||||
|
||||
assertThat(jobInstance.getCombinedRecordsProcessed()).isEqualTo(3);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReindexDeletedResources_byUrl_willRemoveDeletedResourceEntriesFromIndexTables(){
|
||||
IIdType obsId = myReindexTestHelper.createObservationWithAlleleExtension(Observation.ObservationStatus.FINAL);
|
||||
|
|
|
@ -0,0 +1,116 @@
|
|||
package ca.uhn.fhir.jpa.delete.job;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
||||
import ca.uhn.fhir.batch2.jobs.parameters.JobParameters;
|
||||
import ca.uhn.fhir.batch2.jobs.reindex.ReindexAppCtx;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
||||
import ca.uhn.fhir.jpa.entity.PartitionEntity;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.interceptor.partition.RequestTenantPartitionInterceptor;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.TestInstance;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.Arguments;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
|
||||
public class ReindexJobWithPartitioningTest extends BaseJpaR4Test {
|
||||
@Autowired
|
||||
private IJobCoordinator myJobCoordinator;
|
||||
private final RequestTenantPartitionInterceptor myPartitionInterceptor = new RequestTenantPartitionInterceptor();
|
||||
|
||||
@BeforeEach
|
||||
public void before() {
|
||||
myInterceptorRegistry.registerInterceptor(myPartitionInterceptor);
|
||||
myPartitionSettings.setPartitioningEnabled(true);
|
||||
|
||||
myPartitionConfigSvc.createPartition(new PartitionEntity().setId(1).setName("TestPartition1"), null);
|
||||
myPartitionConfigSvc.createPartition(new PartitionEntity().setId(2).setName("TestPartition2"), null);
|
||||
|
||||
RequestPartitionId partition1 = RequestPartitionId.fromPartitionId(1);
|
||||
RequestPartitionId partition2 = RequestPartitionId.fromPartitionId(2);
|
||||
RequestPartitionId defaultPartition = RequestPartitionId.defaultPartition();
|
||||
|
||||
Observation observation1 = buildResource("Observation", withStatus(Observation.ObservationStatus.FINAL.toCode()));
|
||||
myObservationDao.create(observation1, new SystemRequestDetails().setRequestPartitionId(partition1));
|
||||
Observation observation2 = buildResource("Observation", withStatus(Observation.ObservationStatus.REGISTERED.toCode()));
|
||||
myObservationDao.create(observation2, new SystemRequestDetails().setRequestPartitionId(partition1));
|
||||
Observation observation3 = buildResource("Observation", withStatus(Observation.ObservationStatus.FINAL.toCode()));
|
||||
myObservationDao.create(observation3, new SystemRequestDetails().setRequestPartitionId(partition2));
|
||||
|
||||
Patient patient1 = buildResource("Patient", withActiveTrue());
|
||||
myPatientDao.create(patient1, new SystemRequestDetails().setRequestPartitionId(partition1));
|
||||
Patient patient2 = buildResource("Patient", withActiveFalse());
|
||||
myPatientDao.create(patient2, new SystemRequestDetails().setRequestPartitionId(partition2));
|
||||
Patient patient3 = buildResource("Patient", withActiveFalse());
|
||||
myPatientDao.create(patient3, new SystemRequestDetails().setRequestPartitionId(defaultPartition));
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
public void after() {
|
||||
myInterceptorRegistry.unregisterInterceptor(myPartitionInterceptor);
|
||||
myPartitionSettings.setPartitioningEnabled(new PartitionSettings().isPartitioningEnabled());
|
||||
}
|
||||
|
||||
public static Stream<Arguments> getReindexParameters() {
|
||||
List<RequestPartitionId> twoPartitions = List.of(RequestPartitionId.fromPartitionId(1), RequestPartitionId.fromPartitionId(2));
|
||||
List<RequestPartitionId> partition1 = List.of(RequestPartitionId.fromPartitionId(1));
|
||||
List<RequestPartitionId> allPartitions = List.of(RequestPartitionId.allPartitions());
|
||||
return Stream.of(
|
||||
// includes all resources from all partitions - partition 1, partition 2 and default partition
|
||||
Arguments.of(List.of(), List.of(), false, 6),
|
||||
// includes all Observations
|
||||
Arguments.of(List.of("Observation?"), twoPartitions, false, 3),
|
||||
// includes all Observations
|
||||
Arguments.of(List.of("Observation?"), allPartitions, false, 3),
|
||||
Arguments.of(List.of("Observation?"), List.of(), false, 0),
|
||||
// includes Observations in partition 1
|
||||
Arguments.of(List.of("Observation?"), partition1, true, 2),
|
||||
// includes all Patients from all partitions - partition 1, partition 2 and default partition
|
||||
Arguments.of(List.of("Patient?"), allPartitions, false, 3),
|
||||
// includes Patients and Observations in partitions 1 and 2
|
||||
Arguments.of(List.of("Observation?", "Patient?"), twoPartitions, false, 5),
|
||||
// includes Observations from partition 1 and Patients from partition 2
|
||||
Arguments.of(List.of("Observation?", "Patient?"), twoPartitions, true, 3),
|
||||
// includes final Observations and Patients from partitions 1 and 2
|
||||
Arguments.of(List.of("Observation?status=final", "Patient?"), twoPartitions, false, 4),
|
||||
// includes final Observations from partition 1 and Patients from partition 2
|
||||
Arguments.of(List.of("Observation?status=final", "Patient?"), twoPartitions, true, 2),
|
||||
// includes final Observations and Patients from partitions 1
|
||||
Arguments.of(List.of("Observation?status=final", "Patient?"), partition1, false, 2)
|
||||
);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource(value = "getReindexParameters")
|
||||
public void testReindex_byMultipleUrlsAndPartitions_indexesMatchingResources(List<String> theUrls,
|
||||
List<RequestPartitionId> thePartitions,
|
||||
boolean theShouldAssignPartitionToUrl,
|
||||
int theExpectedIndexedResourceCount) {
|
||||
|
||||
JobParameters parameters = JobParameters.from(theUrls, thePartitions, theShouldAssignPartitionToUrl);
|
||||
|
||||
// execute
|
||||
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
|
||||
startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX);
|
||||
startRequest.setParameters(parameters);
|
||||
Batch2JobStartResponse res = myJobCoordinator.startInstance(new SystemRequestDetails(), startRequest);
|
||||
JobInstance jobInstance = myBatch2JobHelper.awaitJobCompletion(res);
|
||||
|
||||
// verify only resources matching URLs and partitions provided via parameters were re-indexed
|
||||
assertThat(jobInstance.getCombinedRecordsProcessed()).isEqualTo(theExpectedIndexedResourceCount);
|
||||
}
|
||||
}
|
|
@ -200,7 +200,7 @@ public class PartitioningInterceptorR4Test extends BaseJpaR4SystemTest {
|
|||
myPatientDao.search(map);
|
||||
fail();
|
||||
} catch (InternalErrorException e) {
|
||||
assertEquals(Msg.code(1319) + "No interceptor provided a value for pointcut: STORAGE_PARTITION_IDENTIFY_READ", e.getMessage());
|
||||
assertEquals(Msg.code(1319) + "No interceptor provided a value for pointcuts: [STORAGE_PARTITION_IDENTIFY_ANY, STORAGE_PARTITION_IDENTIFY_READ]", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
package ca.uhn.fhir.jpa.partition;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.dao.data.IPartitionDao;
|
||||
import ca.uhn.fhir.jpa.entity.PartitionEntity;
|
||||
|
@ -8,9 +7,11 @@ import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
|||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
@ -18,6 +19,8 @@ import org.springframework.beans.factory.annotation.Autowired;
|
|||
|
||||
import java.util.Set;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
@ -52,6 +55,25 @@ class RequestPartitionHelperSvcTest extends BaseJpaR4Test {
|
|||
myPatient.setId(new IdType("Patient", "123", "1"));
|
||||
}
|
||||
|
||||
// TODO EHP: re-enable this test
|
||||
@Disabled
|
||||
@Test
|
||||
public void testDetermineReadPartitionForSystemRequest_whenResourceIsNonPartitionable_returnsDefaultPartition() {
|
||||
// setup
|
||||
SystemRequestDetails srd = new SystemRequestDetails();
|
||||
srd.setRequestPartitionId(RequestPartitionId.allPartitions());
|
||||
|
||||
// execute
|
||||
ConceptMap conceptMap = new ConceptMap();
|
||||
RequestPartitionId result = mySvc.determineCreatePartitionForRequest(srd, conceptMap, conceptMap.fhirType());
|
||||
|
||||
// verify
|
||||
assertThat(result.isAllPartitions()).isFalse();
|
||||
assertThat(result.hasPartitionNames()).isFalse();
|
||||
assertThat(result.isDefaultPartition()).isTrue();
|
||||
assertThat(result.hasDefaultPartitionId()).isTrue();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDetermineReadPartitionForSystemRequest_withPartitionIdOnly_returnsCorrectPartition() {
|
||||
// setup
|
||||
|
|
|
@ -42,6 +42,7 @@ import org.hl7.fhir.r4.model.UnsignedIntType;
|
|||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -1240,6 +1241,234 @@ public class ResourceProviderR4EverythingTest extends BaseResourceProviderR4Test
|
|||
assertThat(actualResourceIds).containsExactlyInAnyOrder(desiredPid, desiredPractitionerId, groupId);
|
||||
}
|
||||
|
||||
@Nested
|
||||
public class ResourceProviderR4EverythingWithOffsetTest {
|
||||
|
||||
// Patient 1 resources
|
||||
private IIdType o1Id;
|
||||
private IIdType e1Id;
|
||||
private IIdType p1Id;
|
||||
private IIdType c1Id;
|
||||
|
||||
// Patient 2 resources
|
||||
private IIdType o2Id;
|
||||
private IIdType p2Id;
|
||||
private IIdType c2Id;
|
||||
|
||||
// Patient 3 resources
|
||||
private IIdType o3Id;
|
||||
private IIdType p3Id;
|
||||
private IIdType c3Id;
|
||||
|
||||
// Patient 4 resources
|
||||
private IIdType o4Id;
|
||||
private IIdType p4Id;
|
||||
private IIdType c4Id;
|
||||
|
||||
// Resource id not linked to any Patient
|
||||
private IIdType c5Id;
|
||||
|
||||
@Test
|
||||
public void testPagingOverEverything_onPatientInstance_returnsCorrectBundles() {
|
||||
String methodName = "testEverythingPatientInstanceOffset";
|
||||
createPatientResources(methodName);
|
||||
|
||||
// There are 4 results, lets make 4 pages of 1.
|
||||
Parameters parameters = new Parameters();
|
||||
addOffsetAndCount(parameters, 0, 1);
|
||||
|
||||
// first page
|
||||
Parameters output = myClient.operation().onInstance(p1Id).named("everything").withParameters(parameters).execute();
|
||||
Bundle bundle = (Bundle) output.getParameter().get(0).getResource();
|
||||
List<IIdType> results = new ArrayList<>(validateAndGetIdListFromBundle(bundle, 1));
|
||||
|
||||
// second page
|
||||
Bundle nextBundle = getNextBundle(bundle);
|
||||
results.addAll(validateAndGetIdListFromBundle(nextBundle, 1));
|
||||
|
||||
// third page
|
||||
nextBundle = getNextBundle(nextBundle);
|
||||
results.addAll(validateAndGetIdListFromBundle(nextBundle, 1));
|
||||
|
||||
// fourth page
|
||||
nextBundle = getNextBundle(nextBundle);
|
||||
results.addAll(validateAndGetIdListFromBundle(nextBundle, 1));
|
||||
|
||||
// no next link
|
||||
assertNull(nextBundle.getLink("next"));
|
||||
assertThat(results).containsOnly(p1Id, c1Id, o1Id, e1Id);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPagingOverEverything_onPatientType_returnsCorrectBundles() {
|
||||
String methodName = "testEverythingPatientTypeOffset";
|
||||
createPatientResources(methodName);
|
||||
|
||||
// Test paging works.
|
||||
// There are 13 results, lets make 3 pages of 4 and 1 page of 1.
|
||||
Parameters parameters = new Parameters();
|
||||
addOffsetAndCount(parameters, 0, 4);
|
||||
|
||||
Parameters output = myClient.operation().onType(Patient.class).named("everything").withParameters(parameters).execute();
|
||||
validateEverythingBundle(output);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPagingOverEverything_onPatientTypeWithAndIdParameter_returnsCorrectBundles() {
|
||||
String methodName = "testEverythingPatientTypeWithAndIdParameter";
|
||||
createPatientResources(methodName);
|
||||
|
||||
// Test 4 patients using and List
|
||||
// e.g. _id=1&_id=2&_id=3&_id=4
|
||||
Parameters parameters = new Parameters();
|
||||
parameters.addParameter("_id", p1Id.getIdPart());
|
||||
parameters.addParameter("_id", p2Id.getIdPart());
|
||||
parameters.addParameter("_id", p3Id.getIdPart());
|
||||
parameters.addParameter("_id", p4Id.getIdPart());
|
||||
// Test for Patient 1-4 with _count=4 and _offset=0 with paging
|
||||
addOffsetAndCount(parameters, 0, 4);
|
||||
|
||||
Parameters output = myClient.operation().onType(Patient.class).named("everything").withParameters(parameters).execute();
|
||||
validateEverythingBundle(output);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPagingOverEverything_onPatientTypeWithOrIdParameter_returnsCorrectBundles() {
|
||||
String methodName = "testEverythingPatientTypeWithOrIdParameter";
|
||||
createPatientResources(methodName);
|
||||
|
||||
// Test 4 patients using or List
|
||||
// e.g. _id=1,2,3,4
|
||||
Parameters parameters = new Parameters();
|
||||
parameters.addParameter("_id", p1Id.getIdPart() + "," + p2Id.getIdPart() + "," + p3Id.getIdPart() + "," + p4Id.getIdPart());
|
||||
// Test for Patient 1-4 with _count=4 and _offset=0 with paging
|
||||
addOffsetAndCount(parameters, 0, 4);
|
||||
|
||||
Parameters output = myClient.operation().onType(Patient.class).named("everything").withParameters(parameters).execute();
|
||||
validateEverythingBundle(output);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPagingOverEverything_onPatientTypeWithOrAndIdParameter_returnsCorrectBundles() {
|
||||
String methodName = "testEverythingPatientTypeWithOrAndIdParameter";
|
||||
createPatientResources(methodName);
|
||||
|
||||
// Test combining 2 or-listed params
|
||||
// e.g. _id=1,2&_id=3,4
|
||||
Parameters parameters = new Parameters();
|
||||
parameters.addParameter("_id", p1Id.getIdPart() + "," + p2Id.getIdPart());
|
||||
parameters.addParameter("_id", p3Id.getIdPart() + "," + p4Id.getIdPart());
|
||||
// Test for Patient 1-4 with _count=4 and _offset=0 with paging
|
||||
addOffsetAndCount(parameters, 0, 4);
|
||||
|
||||
Parameters output = myClient.operation().onType(Patient.class).named("everything").withParameters(parameters).execute();
|
||||
validateEverythingBundle(output);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPagingOverEverything_onPatientTypeWithNotLinkedPatients_returnsCorrectBundles() {
|
||||
String methodName = "testEverythingPatientTypeWithNotLinkedPatients";
|
||||
|
||||
// setup
|
||||
p1Id = createPatient(methodName, "1");
|
||||
p2Id = createPatient(methodName, "2");
|
||||
p3Id = createPatient(methodName, "3");
|
||||
p4Id = createPatient(methodName, "4");
|
||||
|
||||
// Test patients without links
|
||||
Parameters parameters = new Parameters();
|
||||
addOffsetAndCount(parameters, 0, 1);
|
||||
|
||||
// first page
|
||||
Parameters output = myClient.operation().onType(Patient.class).named("everything").withParameters(parameters).execute();
|
||||
Bundle bundle = (Bundle) output.getParameter().get(0).getResource();
|
||||
List<IIdType> results = new ArrayList<>(validateAndGetIdListFromBundle(bundle, 1));
|
||||
|
||||
// second page
|
||||
Bundle nextBundle = getNextBundle(bundle);
|
||||
results.addAll(validateAndGetIdListFromBundle(nextBundle, 1));
|
||||
|
||||
// third page
|
||||
nextBundle = getNextBundle(nextBundle);
|
||||
results.addAll(validateAndGetIdListFromBundle(nextBundle, 1));
|
||||
|
||||
// fourth page
|
||||
nextBundle = getNextBundle(nextBundle);
|
||||
results.addAll(validateAndGetIdListFromBundle(nextBundle, 1));
|
||||
|
||||
// no next link
|
||||
assertNull(nextBundle.getLink("next"));
|
||||
assertThat(results).containsOnly(p1Id, p2Id, p3Id, p4Id);
|
||||
}
|
||||
|
||||
private void addOffsetAndCount(Parameters theParameters, int theOffset, int theCount) {
|
||||
theParameters.addParameter(new Parameters.ParametersParameterComponent()
|
||||
.setName("_count").setValue(new UnsignedIntType(theCount)));
|
||||
theParameters.addParameter(new Parameters.ParametersParameterComponent()
|
||||
.setName("_offset").setValue(new UnsignedIntType(theOffset)));
|
||||
}
|
||||
|
||||
private void validateEverythingBundle(Parameters theParameters) {
|
||||
// first page
|
||||
Bundle bundle = (Bundle) theParameters.getParameter().get(0).getResource();
|
||||
List<IIdType> results = new ArrayList<>(validateAndGetIdListFromBundle(bundle, 4));
|
||||
|
||||
// second page
|
||||
Bundle nextBundle = getNextBundle(bundle);
|
||||
results.addAll(validateAndGetIdListFromBundle(nextBundle, 4));
|
||||
|
||||
// third page
|
||||
nextBundle = getNextBundle(nextBundle);
|
||||
results.addAll(validateAndGetIdListFromBundle(nextBundle, 4));
|
||||
|
||||
// fourth page
|
||||
nextBundle = getNextBundle(nextBundle);
|
||||
results.addAll(validateAndGetIdListFromBundle(nextBundle, 1));
|
||||
|
||||
// no next link
|
||||
assertNull(nextBundle.getLink("next"));
|
||||
// make sure all resources are returned, order doesn't matter
|
||||
assertThat(results).containsOnly(p1Id, p2Id, p3Id, p4Id, c1Id, c2Id, c3Id, c4Id, o1Id, e1Id, o2Id, o3Id, o4Id);
|
||||
}
|
||||
|
||||
private Bundle getNextBundle(Bundle theBundle) {
|
||||
String next = theBundle.getLink("next").getUrl();
|
||||
return myClient.loadPage().byUrl(next).andReturnBundle(Bundle.class).execute();
|
||||
}
|
||||
|
||||
private List<IIdType> validateAndGetIdListFromBundle(Bundle theBundle, int theSize) {
|
||||
assertEquals(Bundle.BundleType.SEARCHSET, theBundle.getType());
|
||||
assertThat(theBundle.getEntry()).hasSize(theSize);
|
||||
return toUnqualifiedVersionlessIds(theBundle);
|
||||
}
|
||||
|
||||
private void createPatientResources(String theMethodName) {
|
||||
// Patient 1 resources
|
||||
o1Id = createOrganization(theMethodName, "1");
|
||||
e1Id = createEncounter(theMethodName, "1");
|
||||
p1Id = createPatientWithIndexAtOrganization(theMethodName, "1", o1Id);
|
||||
c1Id = createConditionWithEncounterForPatient(theMethodName, "1", p1Id, e1Id);
|
||||
|
||||
// Patient 2 resources
|
||||
o2Id = createOrganization(theMethodName, "2");
|
||||
p2Id = createPatientWithIndexAtOrganization(theMethodName, "2", o2Id);
|
||||
c2Id = createConditionForPatient(theMethodName, "2", p2Id);
|
||||
|
||||
// Patient 3 resources
|
||||
o3Id = createOrganization(theMethodName, "3");
|
||||
p3Id = createPatientWithIndexAtOrganization(theMethodName, "3", o3Id);
|
||||
c3Id = createConditionForPatient(theMethodName, "3", p3Id);
|
||||
|
||||
// Patient 4 resources
|
||||
o4Id = createOrganization(theMethodName, "4");
|
||||
p4Id = createPatientWithIndexAtOrganization(theMethodName, "4", o4Id);
|
||||
c4Id = createConditionForPatient(theMethodName, "4", p4Id);
|
||||
|
||||
// Resource not linked to any Patient
|
||||
c5Id = createConditionForPatient(theMethodName, "5", null);
|
||||
}
|
||||
}
|
||||
|
||||
private Bundle executeEverythingOperationOnInstance(IIdType theInstanceIdType) {
|
||||
return myClient
|
||||
.operation()
|
||||
|
@ -1250,12 +1479,22 @@ public class ResourceProviderR4EverythingTest extends BaseResourceProviderR4Test
|
|||
.execute();
|
||||
}
|
||||
|
||||
private IIdType createOrganization(String methodName, String s) {
|
||||
private IIdType createOrganization(String methodName, String theIndex) {
|
||||
Organization o1 = new Organization();
|
||||
o1.setName(methodName + s);
|
||||
o1.setName(methodName + theIndex);
|
||||
return myClient.create().resource(o1).execute().getId().toUnqualifiedVersionless();
|
||||
}
|
||||
|
||||
private IIdType createEncounter(String methodName, String theIndex) {
|
||||
Encounter e1 = new Encounter();
|
||||
e1.setLanguage(methodName + theIndex);
|
||||
return myClient.create().resource(e1).execute().getId().toUnqualifiedVersionless();
|
||||
}
|
||||
|
||||
public IIdType createPatient(String theMethodName, String theIndex) {
|
||||
return createPatientWithIndexAtOrganization(theMethodName, theIndex, null);
|
||||
}
|
||||
|
||||
public IIdType createPatientWithIndexAtOrganization(String theMethodName, String theIndex, IIdType theOrganizationId) {
|
||||
Patient p1 = new Patient();
|
||||
p1.addName().setFamily(theMethodName + theIndex);
|
||||
|
@ -1265,13 +1504,22 @@ public class ResourceProviderR4EverythingTest extends BaseResourceProviderR4Test
|
|||
}
|
||||
|
||||
public IIdType createConditionForPatient(String theMethodName, String theIndex, IIdType thePatientId) {
|
||||
return createConditionWithEncounterForPatient(theMethodName, theIndex, thePatientId, null);
|
||||
}
|
||||
|
||||
public IIdType createConditionWithEncounterForPatient(String theMethodName,
|
||||
String theIndex,
|
||||
IIdType thePatientId,
|
||||
IIdType theEncounterId) {
|
||||
Condition c = new Condition();
|
||||
c.addIdentifier().setValue(theMethodName + theIndex);
|
||||
if (thePatientId != null) {
|
||||
c.getSubject().setReferenceElement(thePatientId);
|
||||
}
|
||||
IIdType cId = myClient.create().resource(c).execute().getId().toUnqualifiedVersionless();
|
||||
return cId;
|
||||
if (theEncounterId != null) {
|
||||
c.setEncounter(new Reference(theEncounterId));
|
||||
}
|
||||
return myClient.create().resource(c).execute().getId().toUnqualifiedVersionless();
|
||||
}
|
||||
|
||||
private IIdType createMedicationRequestForPatient(IIdType thePatientId, String theIndex) {
|
||||
|
|
|
@ -6,6 +6,7 @@ import static org.junit.jupiter.api.Assertions.assertFalse;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
|
||||
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
|
||||
|
@ -725,7 +726,7 @@ public class InMemorySubscriptionMatcherR4Test {
|
|||
@Test
|
||||
public void testSearchStringParamReallyLong() {
|
||||
String methodName = "testSearchStringParamReallyLong";
|
||||
String value = StringUtils.rightPad(methodName, 200, 'a');
|
||||
String value = StringUtils.rightPad(methodName, ResourceIndexedSearchParamString.MAX_LENGTH, 'a');
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.addIdentifier().setSystem("urn:system").setValue("001");
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -81,14 +81,6 @@
|
|||
<groupId>com.h2database</groupId>
|
||||
<artifactId>h2</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.derby</groupId>
|
||||
<artifactId>derby</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.derby</groupId>
|
||||
<artifactId>derbytools</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-test</artifactId>
|
||||
|
|
|
@ -12,9 +12,8 @@ import ca.uhn.fhir.rest.server.interceptor.auth.IAuthorizationSearchParamMatcher
|
|||
import ca.uhn.fhir.rest.server.interceptor.auth.PolicyEnum;
|
||||
import ca.uhn.fhir.rest.server.interceptor.auth.RuleBuilder;
|
||||
import ca.uhn.fhir.test.utilities.ITestDataBuilder;
|
||||
import ca.uhn.test.util.LogEventIterableAssert;
|
||||
import ca.uhn.test.util.LogbackTestExtension;
|
||||
import ch.qos.logback.classic.Level;
|
||||
import ca.uhn.test.util.LogbackTestExtensionAssert;
|
||||
import jakarta.annotation.Nullable;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
@ -272,7 +271,7 @@ class FhirQueryRuleImplTest implements ITestDataBuilder {
|
|||
|
||||
// then
|
||||
assertNull(verdict);
|
||||
LogEventIterableAssert.assertThat(myLogCapture.getLogEvents()).hasEventWithLevelAndMessageContains(Level.WARN, "unsupported chain XXX");
|
||||
LogbackTestExtensionAssert.assertThat(myLogCapture).hasWarnMessage("unsupported chain XXX");
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -287,7 +286,7 @@ class FhirQueryRuleImplTest implements ITestDataBuilder {
|
|||
|
||||
// then
|
||||
assertEquals(PolicyEnum.DENY, verdict.getDecision());
|
||||
LogEventIterableAssert.assertThat(myLogCapture.getLogEvents()).hasEventWithLevelAndMessageContains(Level.WARN, "unsupported chain XXX");
|
||||
LogbackTestExtensionAssert.assertThat(myLogCapture).hasWarnMessage("unsupported chain XXX");
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -306,7 +305,7 @@ class FhirQueryRuleImplTest implements ITestDataBuilder {
|
|||
|
||||
// then
|
||||
assertNull(verdict);
|
||||
LogEventIterableAssert.assertThat(myLogCapture.getLogEvents()).hasEventWithLevelAndMessageContains(Level.WARN, "No matcher provided");
|
||||
LogbackTestExtensionAssert.assertThat(myLogCapture).hasWarnMessage("No matcher provided");
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -36,6 +36,7 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
|
||||
import static ca.uhn.fhir.jpa.embedded.HapiEmbeddedDatabasesExtension.FIRST_TESTED_VERSION;
|
||||
import static ca.uhn.fhir.jpa.migrate.SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME;
|
||||
|
@ -58,10 +59,14 @@ public class HapiSchemaMigrationTest {
|
|||
private static final String METADATA_IS_NULLABLE_YES = "YES";
|
||||
|
||||
private static final String TABLE_HFJ_RES_SEARCH_URL = "HFJ_RES_SEARCH_URL";
|
||||
private static final String TABLE_TRM_CONCEPT_DESIG = "TRM_CONCEPT_DESIG";
|
||||
private static final String COLUMN_RES_SEARCH_URL = "RES_SEARCH_URL";
|
||||
private static final String COLUMN_PARTITION_ID = "PARTITION_ID";
|
||||
private static final String COLUMN_PARTITION_DATE = "PARTITION_DATE";
|
||||
|
||||
private static final String COLUMN_VAL = "VAL";
|
||||
private static final String COLUMN_VAL_VC = "VAL_VC";
|
||||
|
||||
private static final String NULL_PLACEHOLDER = "[NULL]";
|
||||
|
||||
static {
|
||||
|
@ -120,6 +125,8 @@ public class HapiSchemaMigrationTest {
|
|||
verifyForcedIdMigration(dataSource);
|
||||
|
||||
verifyHfjResSearchUrlMigration(database, theDriverType);
|
||||
|
||||
verifyTrm_Concept_Desig(database, theDriverType);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -176,8 +183,6 @@ public class HapiSchemaMigrationTest {
|
|||
}
|
||||
}
|
||||
|
||||
ourLog.info("6145: actualColumnResults: {}", actualColumnResults);
|
||||
|
||||
final List<Map<String,String>> actualPrimaryKeyResults = new ArrayList<>();
|
||||
|
||||
try (final ResultSet primaryKeyResultSet = tableMetaData.getPrimaryKeys(null, null, TABLE_HFJ_RES_SEARCH_URL)) {
|
||||
|
@ -207,6 +212,61 @@ public class HapiSchemaMigrationTest {
|
|||
}
|
||||
}
|
||||
|
||||
private void verifyTrm_Concept_Desig(JpaEmbeddedDatabase theDatabase, DriverTypeEnum theDriverType) throws SQLException {
|
||||
final List<Map<String, Object>> allCount = theDatabase.query(String.format("SELECT count(*) FROM %s", TABLE_TRM_CONCEPT_DESIG));
|
||||
final List<Map<String, Object>> nonNullValCount = theDatabase.query(String.format("SELECT count(*) FROM %s WHERE %s IS NOT NULL", TABLE_TRM_CONCEPT_DESIG, COLUMN_VAL));
|
||||
final List<Map<String, Object>> nullValVcCount = theDatabase.query(String.format("SELECT count(*) FROM %s WHERE %s IS NULL", TABLE_TRM_CONCEPT_DESIG, COLUMN_VAL_VC));
|
||||
|
||||
assertThat(nonNullValCount).hasSize(1);
|
||||
final Collection<Object> queryResultValuesVal = nonNullValCount.get(0).values();
|
||||
assertThat(queryResultValuesVal).hasSize(1);
|
||||
final Object queryResultValueVal = queryResultValuesVal.iterator().next();
|
||||
assertThat(queryResultValueVal).isInstanceOf(Number.class);
|
||||
if (queryResultValueVal instanceof Number queryResultNumber) {
|
||||
assertThat(queryResultNumber.intValue()).isEqualTo(1);
|
||||
}
|
||||
|
||||
assertThat(nullValVcCount).hasSize(1);
|
||||
final Collection<Object> queryResultValuesValVc = nullValVcCount.get(0).values();
|
||||
assertThat(queryResultValuesValVc).hasSize(1);
|
||||
final Object queryResultValueValVc = queryResultValuesValVc.iterator().next();
|
||||
assertThat(queryResultValueValVc).isInstanceOf(Number.class);
|
||||
if (queryResultValueValVc instanceof Number queryResultNumber) {
|
||||
assertThat(queryResultNumber.intValue()).isEqualTo(1);
|
||||
}
|
||||
|
||||
final Object allCountValue = allCount.get(0).values().iterator().next();
|
||||
if (allCountValue instanceof Number allCountNumber) {
|
||||
assertThat(allCountNumber.intValue()).isEqualTo(1);
|
||||
}
|
||||
|
||||
try (final Connection connection = theDatabase.getDataSource().getConnection()) {
|
||||
final DatabaseMetaData tableMetaData = connection.getMetaData();
|
||||
|
||||
final List<Map<String, String>> actualColumnResults = new ArrayList<>();
|
||||
try (final ResultSet columnsResultSet = tableMetaData.getColumns(null, null, getTableNameWithDbSpecificCase(theDriverType, TABLE_TRM_CONCEPT_DESIG), null)) {
|
||||
while (columnsResultSet.next()) {
|
||||
final Map<String, String> columnMap = new HashMap<>();
|
||||
actualColumnResults.add(columnMap);
|
||||
|
||||
extractAndAddToMap(columnsResultSet, columnMap, METADATA_COLUMN_NAME);
|
||||
extractAndAddToMap(columnsResultSet, columnMap, METADATA_DATA_TYPE);
|
||||
extractAndAddToMap(columnsResultSet, columnMap, METADATA_IS_NULLABLE);
|
||||
extractAndAddToMap(columnsResultSet, columnMap, METADATA_DEFAULT_VALUE);
|
||||
}
|
||||
|
||||
assertThat(actualColumnResults).contains(addExpectedColumnMetadata(COLUMN_VAL, Integer.toString(Types.VARCHAR), METADATA_IS_NULLABLE_YES, null));
|
||||
assertThat(actualColumnResults).contains(addExpectedColumnMetadata(COLUMN_VAL_VC, getExpectedSqlTypeForValVc(theDriverType), METADATA_IS_NULLABLE_YES, null));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private String getTableNameWithDbSpecificCase(DriverTypeEnum theDriverType, String theTableName) {
|
||||
return Set.of(DriverTypeEnum.ORACLE_12C, DriverTypeEnum.H2_EMBEDDED).contains(theDriverType)
|
||||
? theTableName
|
||||
: theTableName.toLowerCase();
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private Map<String, String> addExpectedColumnMetadata(String theColumnName, String theDataType, String theNullable, @Nullable String theDefaultValue) {
|
||||
return Map.of(METADATA_COLUMN_NAME, theColumnName,
|
||||
|
@ -234,6 +294,12 @@ public class HapiSchemaMigrationTest {
|
|||
: Integer.toString(Types.DATE);
|
||||
}
|
||||
|
||||
private String getExpectedSqlTypeForValVc(DriverTypeEnum theDriverType) {
|
||||
return Set.of(DriverTypeEnum.ORACLE_12C, DriverTypeEnum.H2_EMBEDDED).contains(theDriverType)
|
||||
? Integer.toString(Types.CLOB)
|
||||
: Integer.toString(Types.VARCHAR);
|
||||
}
|
||||
|
||||
private void extractAndAddToMap(ResultSet theResultSet, Map<String,String> theMap, String theColumn) throws SQLException {
|
||||
theMap.put(theColumn, Optional.ofNullable(theResultSet.getString(theColumn))
|
||||
.map(defaultValueNonNull -> defaultValueNonNull.equals("((-1))") ? "-1" : defaultValueNonNull) // MSSQL returns "((-1))" for default value
|
||||
|
|
|
@ -56,27 +56,27 @@ public abstract class BaseTest {
|
|||
}
|
||||
});
|
||||
|
||||
// Derby
|
||||
retVal.add(new Supplier<TestDatabaseDetails>() {
|
||||
@Override
|
||||
public TestDatabaseDetails get() {
|
||||
String url = "jdbc:derby:memory:" + DATABASE_NAME + UUID.randomUUID() + ";create=true";
|
||||
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.DERBY_EMBEDDED.newConnectionProperties(url, "SA", "SA");
|
||||
BasicDataSource dataSource = new BasicDataSource();
|
||||
dataSource.setUrl(url);
|
||||
dataSource.setUsername("SA");
|
||||
dataSource.setPassword("SA");
|
||||
dataSource.setDriverClassName(DriverTypeEnum.DERBY_EMBEDDED.getDriverClassName());
|
||||
HapiMigrator migrator = new HapiMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, dataSource, DriverTypeEnum.DERBY_EMBEDDED);
|
||||
return new TestDatabaseDetails(url, connectionProperties, dataSource, migrator);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Derby";
|
||||
}
|
||||
});
|
||||
|
||||
// Derby
|
||||
// retVal.add(new Supplier<TestDatabaseDetails>() {
|
||||
// @Override
|
||||
// public TestDatabaseDetails get() {
|
||||
// String url = "jdbc:derby:memory:" + DATABASE_NAME + UUID.randomUUID() + ";create=true";
|
||||
// DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.DERBY_EMBEDDED.newConnectionProperties(url, "SA", "SA");
|
||||
// BasicDataSource dataSource = new BasicDataSource();
|
||||
// dataSource.setUrl(url);
|
||||
// dataSource.setUsername("SA");
|
||||
// dataSource.setPassword("SA");
|
||||
// dataSource.setDriverClassName(DriverTypeEnum.DERBY_EMBEDDED.getDriverClassName());
|
||||
// HapiMigrator migrator = new HapiMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, dataSource, DriverTypeEnum.DERBY_EMBEDDED);
|
||||
// return new TestDatabaseDetails(url, connectionProperties, dataSource, migrator);
|
||||
// }
|
||||
//
|
||||
// @Override
|
||||
// public String toString() {
|
||||
// return "Derby";
|
||||
// }
|
||||
// });
|
||||
//
|
||||
return retVal.stream();
|
||||
}
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -1,20 +1,20 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head th:include="tmpl-head :: head">
|
||||
<head th:insert="~{tmpl-head :: head}">
|
||||
<title>About This Server</title>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<form action="" method="get" id="outerForm">
|
||||
<div th:replace="tmpl-navbar-top :: top" ></div>
|
||||
<div th:replace="~{tmpl-navbar-top :: top}" ></div>
|
||||
|
||||
<div class="container-fluid">
|
||||
<div class="row">
|
||||
<div th:replace="tmpl-navbar-left :: left" ></div>
|
||||
<div th:replace="~{tmpl-navbar-left :: left}" ></div>
|
||||
|
||||
<div class="col-sm-9 col-sm-offset-3 col-md-9 col-md-offset-3 main">
|
||||
|
||||
<div th:replace="tmpl-banner :: banner"></div>
|
||||
<div th:replace="~{tmpl-banner :: banner}"></div>
|
||||
|
||||
<div class="panel panel-default">
|
||||
<div class="panel-heading">
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head th:include="tmpl-head :: head">
|
||||
<title>RESTful Tester</title>
|
||||
<head>
|
||||
<th:block th:insert="~{tmpl-head :: head}" />
|
||||
<title>RESTful Tester</title>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
@ -9,16 +10,16 @@
|
|||
<input type="hidden" id="serverId" name="serverId"
|
||||
th:value="${serverId}" />
|
||||
|
||||
<div th:replace="tmpl-navbar-top :: top"></div>
|
||||
<div th:replace="~{tmpl-navbar-top :: top}"></div>
|
||||
|
||||
<div class="container-fluid">
|
||||
<div class="row">
|
||||
|
||||
<div th:replace="tmpl-navbar-left :: left"></div>
|
||||
<div th:replace="~{tmpl-navbar-left :: left}"></div>
|
||||
|
||||
<div class="col-sm-9 col-sm-offset-3 col-md-9 col-md-offset-3 main">
|
||||
|
||||
<div th:replace="tmpl-banner :: banner"></div>
|
||||
<div th:replace="~{tmpl-banner :: banner}"></div>
|
||||
|
||||
<!-- ********************************************************** -->
|
||||
<!-- ** Subscriptions Playground ** -->
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.8-SNAPSHOT</version>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
*/
|
||||
package ca.uhn.hapi.fhir.cdshooks.api;
|
||||
|
||||
import ca.uhn.hapi.fhir.cdshooks.api.json.CdsHooksExtension;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
|
@ -72,4 +74,6 @@ public @interface CdsService {
|
|||
* An arbitrary string which will be used to store stringify JSON
|
||||
*/
|
||||
String extension() default "";
|
||||
|
||||
Class<? extends CdsHooksExtension> extensionClass() default CdsHooksExtension.class;
|
||||
}
|
||||
|
|
|
@ -54,7 +54,7 @@ public interface ICdsServiceRegistry {
|
|||
* @param theCdsServiceFeedbackJson the request
|
||||
* @return the response
|
||||
*/
|
||||
String callFeedback(String theServiceId, CdsServiceFeedbackJson theCdsServiceFeedbackJson);
|
||||
CdsServiceFeedbackJson callFeedback(String theServiceId, CdsServiceFeedbackJson theCdsServiceFeedbackJson);
|
||||
|
||||
/**
|
||||
* Register a new CDS Service with the endpoint.
|
||||
|
@ -86,4 +86,12 @@ public interface ICdsServiceRegistry {
|
|||
* @param theServiceId the id of the service to be removed
|
||||
*/
|
||||
void unregisterService(String theServiceId, String theModuleId);
|
||||
|
||||
/**
|
||||
* Get registered CDS service with service ID
|
||||
* @param theServiceId the id of the service to be retrieved
|
||||
* @return CdsServiceJson
|
||||
* @throws IllegalArgumentException if a CDS service with provided serviceId is not found
|
||||
*/
|
||||
CdsServiceJson getCdsServiceJson(String theServiceId);
|
||||
}
|
||||
|
|
|
@ -24,18 +24,29 @@ import com.fasterxml.jackson.annotation.JsonProperty;
|
|||
|
||||
/**
|
||||
* @see <a href=" https://cds-hooks.hl7.org/1.0/#extensions">For reading more about Extension support in CDS hooks</a>
|
||||
* Example can be found <a href="https://build.fhir.org/ig/HL7/davinci-crd/deviations.html#configuration-options-extension">here</a>
|
||||
*/
|
||||
public abstract class BaseCdsServiceJson implements IModelJson {
|
||||
|
||||
@JsonProperty(value = "extension", required = true)
|
||||
String myExtension;
|
||||
@JsonProperty(value = "extension")
|
||||
CdsHooksExtension myExtension;
|
||||
|
||||
public String getExtension() {
|
||||
private Class<? extends CdsHooksExtension> myExtensionClass;
|
||||
|
||||
public CdsHooksExtension getExtension() {
|
||||
return myExtension;
|
||||
}
|
||||
|
||||
public BaseCdsServiceJson setExtension(String theExtension) {
|
||||
public BaseCdsServiceJson setExtension(CdsHooksExtension theExtension) {
|
||||
this.myExtension = theExtension;
|
||||
return this;
|
||||
}
|
||||
|
||||
public void setExtensionClass(Class<? extends CdsHooksExtension> theClass) {
|
||||
this.myExtensionClass = theClass;
|
||||
}
|
||||
|
||||
public Class<? extends CdsHooksExtension> getExtensionClass() {
|
||||
return myExtensionClass;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR Test Utilities
|
||||
* HAPI FHIR - CDS Hooks
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
|
@ -17,14 +17,12 @@
|
|||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.test.util;
|
||||
package ca.uhn.hapi.fhir.cdshooks.api.json;
|
||||
|
||||
import ch.qos.logback.classic.spi.ILoggingEvent;
|
||||
import org.assertj.core.api.AbstractAssert;
|
||||
import ca.uhn.fhir.model.api.IModelJson;
|
||||
|
||||
public class LogEventAssert extends AbstractAssert<LogEventAssert, ILoggingEvent> {
|
||||
|
||||
protected LogEventAssert(ILoggingEvent actual) {
|
||||
super(actual, LogEventAssert.class);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Users can define CDS Hooks extensions by extending this class.
|
||||
* Implementors can extend this class for defining their custom extensions.
|
||||
*/
|
||||
public class CdsHooksExtension implements IModelJson {}
|
|
@ -98,13 +98,15 @@ public class CdsHooksConfig {
|
|||
CdsPrefetchSvc theCdsPrefetchSvc,
|
||||
@Qualifier(CDS_HOOKS_OBJECT_MAPPER_FACTORY) ObjectMapper theObjectMapper,
|
||||
ICdsCrServiceFactory theCdsCrServiceFactory,
|
||||
ICrDiscoveryServiceFactory theCrDiscoveryServiceFactory) {
|
||||
ICrDiscoveryServiceFactory theCrDiscoveryServiceFactory,
|
||||
FhirContext theFhirContext) {
|
||||
return new CdsServiceRegistryImpl(
|
||||
theCdsHooksContextBooter,
|
||||
theCdsPrefetchSvc,
|
||||
theObjectMapper,
|
||||
theCdsCrServiceFactory,
|
||||
theCrDiscoveryServiceFactory);
|
||||
theCrDiscoveryServiceFactory,
|
||||
theFhirContext);
|
||||
}
|
||||
|
||||
@Bean
|
||||
|
|
|
@ -94,13 +94,12 @@ public class CdsHooksController {
|
|||
path = "{cds_hook}/feedback",
|
||||
method = {RequestMethod.POST},
|
||||
consumes = {MediaType.APPLICATION_JSON_VALUE})
|
||||
public ResponseEntity<String> cdsServiceFeedback(
|
||||
public ResponseEntity<CdsServiceFeedbackJson> cdsServiceFeedback(
|
||||
@PathVariable("cds_hook") String theCdsHook,
|
||||
@RequestBody CdsServiceFeedbackJson theCdsServiceFeedbackJson) {
|
||||
String json = myCdsServiceRegistry.callFeedback(theCdsHook, theCdsServiceFeedbackJson);
|
||||
|
||||
CdsServiceFeedbackJson response = myCdsServiceRegistry.callFeedback(theCdsHook, theCdsServiceFeedbackJson);
|
||||
return ResponseEntity.status(200)
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.body(json);
|
||||
.body(response);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,117 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR - CDS Hooks
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.hapi.fhir.cdshooks.serializer;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
import ca.uhn.fhir.serializer.FhirResourceDeserializer;
|
||||
import ca.uhn.hapi.fhir.cdshooks.api.json.CdsHooksExtension;
|
||||
import ca.uhn.hapi.fhir.cdshooks.api.json.CdsServiceJson;
|
||||
import ca.uhn.hapi.fhir.cdshooks.api.json.CdsServiceRequestContextJson;
|
||||
import ca.uhn.hapi.fhir.cdshooks.api.json.CdsServiceRequestJson;
|
||||
import ca.uhn.hapi.fhir.cdshooks.svc.CdsServiceRegistryImpl;
|
||||
import com.fasterxml.jackson.core.JsonParser;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.DeserializationContext;
|
||||
import com.fasterxml.jackson.databind.DeserializationFeature;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
|
||||
import com.fasterxml.jackson.databind.module.SimpleModule;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class CdsServiceRequestJsonDeserializer extends StdDeserializer<CdsServiceRequestJson> {
|
||||
|
||||
private final CdsServiceRegistryImpl myCdsServiceRegistry;
|
||||
private final ObjectMapper myObjectMapper;
|
||||
private final FhirContext myFhirContext;
|
||||
private final IParser myParser;
|
||||
|
||||
public CdsServiceRequestJsonDeserializer(CdsServiceRegistryImpl theCdsServiceRegistry, FhirContext theFhirContext) {
|
||||
super(CdsServiceRequestJson.class);
|
||||
myCdsServiceRegistry = theCdsServiceRegistry;
|
||||
myFhirContext = theFhirContext;
|
||||
myParser = myFhirContext.newJsonParser().setPrettyPrint(true);
|
||||
// We create a new ObjectMapper instead of using the one from the ApplicationContext to avoid an infinite loop
|
||||
// during deserialization.
|
||||
myObjectMapper = new ObjectMapper();
|
||||
configureObjectMapper(myObjectMapper);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CdsServiceRequestJson deserialize(JsonParser theJsonParser, DeserializationContext theDeserializationContext)
|
||||
throws IOException {
|
||||
final JsonNode cdsServiceRequestJsonNode = theJsonParser.getCodec().readTree(theJsonParser);
|
||||
final JsonNode hookNode = cdsServiceRequestJsonNode.get("hook");
|
||||
final JsonNode extensionNode = cdsServiceRequestJsonNode.get("extension");
|
||||
final JsonNode requestContext = cdsServiceRequestJsonNode.get("context");
|
||||
final CdsServiceRequestJson cdsServiceRequestJson =
|
||||
myObjectMapper.treeToValue(cdsServiceRequestJsonNode, CdsServiceRequestJson.class);
|
||||
if (extensionNode != null) {
|
||||
CdsHooksExtension myRequestExtension = deserializeExtension(hookNode.textValue(), extensionNode.toString());
|
||||
cdsServiceRequestJson.setExtension(myRequestExtension);
|
||||
}
|
||||
if (requestContext != null) {
|
||||
LinkedHashMap<String, Object> map =
|
||||
myObjectMapper.readValue(requestContext.toString(), LinkedHashMap.class);
|
||||
cdsServiceRequestJson.setContext(deserializeRequestContext(map));
|
||||
}
|
||||
return cdsServiceRequestJson;
|
||||
}
|
||||
|
||||
void configureObjectMapper(ObjectMapper theObjectMapper) {
|
||||
SimpleModule module = new SimpleModule();
|
||||
module.addDeserializer(IBaseResource.class, new FhirResourceDeserializer(myFhirContext));
|
||||
theObjectMapper.registerModule(module);
|
||||
// set this as we will need to ignore properties which are not defined by specific implementation.
|
||||
theObjectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
||||
}
|
||||
|
||||
CdsHooksExtension deserializeExtension(String theServiceId, String theExtension) throws JsonProcessingException {
|
||||
final CdsServiceJson cdsServicesJson = myCdsServiceRegistry.getCdsServiceJson(theServiceId);
|
||||
Class<? extends CdsHooksExtension> extensionClass = cdsServicesJson.getExtensionClass();
|
||||
if (extensionClass == null) {
|
||||
return null;
|
||||
}
|
||||
return myObjectMapper.readValue(theExtension, extensionClass);
|
||||
}
|
||||
|
||||
CdsServiceRequestContextJson deserializeRequestContext(LinkedHashMap<String, Object> theMap)
|
||||
throws JsonProcessingException {
|
||||
final CdsServiceRequestContextJson cdsServiceRequestContextJson = new CdsServiceRequestContextJson();
|
||||
for (Map.Entry<String, Object> entry : theMap.entrySet()) {
|
||||
String key = entry.getKey();
|
||||
Object value = entry.getValue();
|
||||
// Convert LinkedHashMap entries to Resources
|
||||
if (value instanceof LinkedHashMap) {
|
||||
String json = myObjectMapper.writeValueAsString(value);
|
||||
IBaseResource resource = myParser.parseResource(json);
|
||||
cdsServiceRequestContextJson.put(key, resource);
|
||||
} else {
|
||||
cdsServiceRequestContextJson.put(key, value);
|
||||
}
|
||||
}
|
||||
return cdsServiceRequestContextJson;
|
||||
}
|
||||
}
|
|
@ -25,6 +25,7 @@ import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
|||
import ca.uhn.hapi.fhir.cdshooks.api.CdsService;
|
||||
import ca.uhn.hapi.fhir.cdshooks.api.CdsServiceFeedback;
|
||||
import ca.uhn.hapi.fhir.cdshooks.api.CdsServicePrefetch;
|
||||
import ca.uhn.hapi.fhir.cdshooks.api.json.CdsHooksExtension;
|
||||
import ca.uhn.hapi.fhir.cdshooks.api.json.CdsServiceJson;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
@ -85,7 +86,8 @@ public class CdsHooksContextBooter {
|
|||
cdsServiceJson.setHook(annotation.hook());
|
||||
cdsServiceJson.setDescription(annotation.description());
|
||||
cdsServiceJson.setTitle(annotation.title());
|
||||
cdsServiceJson.setExtension(validateJson(annotation.extension()));
|
||||
cdsServiceJson.setExtension(serializeExtensions(annotation.extension(), annotation.extensionClass()));
|
||||
cdsServiceJson.setExtensionClass(annotation.extensionClass());
|
||||
for (CdsServicePrefetch prefetch : annotation.prefetch()) {
|
||||
cdsServiceJson.addPrefetch(prefetch.value(), prefetch.query());
|
||||
cdsServiceJson.addSource(prefetch.value(), prefetch.source());
|
||||
|
@ -104,14 +106,13 @@ public class CdsHooksContextBooter {
|
|||
}
|
||||
}
|
||||
|
||||
protected String validateJson(String theExtension) {
|
||||
CdsHooksExtension serializeExtensions(String theExtension, Class<? extends CdsHooksExtension> theClass) {
|
||||
if (StringUtils.isEmpty(theExtension)) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
final ObjectMapper mapper = new ObjectMapper();
|
||||
mapper.readTree(theExtension);
|
||||
return theExtension;
|
||||
return mapper.readValue(theExtension, theClass);
|
||||
} catch (JsonProcessingException e) {
|
||||
final String message = String.format("Invalid JSON: %s", e.getMessage());
|
||||
ourLog.debug(message);
|
||||
|
|
|
@ -131,4 +131,11 @@ public class CdsServiceCache {
|
|||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
CdsServiceJson getCdsServiceJson(String theString) {
|
||||
return myCdsServiceJson.getServices().stream()
|
||||
.filter(x -> x.getId().equals(theString))
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,21 +20,26 @@
|
|||
package ca.uhn.hapi.fhir.cdshooks.svc;
|
||||
|
||||
import ca.uhn.fhir.context.ConfigurationException;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.hapi.fhir.cdshooks.api.ICdsMethod;
|
||||
import ca.uhn.hapi.fhir.cdshooks.api.ICdsServiceMethod;
|
||||
import ca.uhn.hapi.fhir.cdshooks.api.ICdsServiceRegistry;
|
||||
import ca.uhn.hapi.fhir.cdshooks.api.json.CdsHooksExtension;
|
||||
import ca.uhn.hapi.fhir.cdshooks.api.json.CdsServiceFeedbackJson;
|
||||
import ca.uhn.hapi.fhir.cdshooks.api.json.CdsServiceJson;
|
||||
import ca.uhn.hapi.fhir.cdshooks.api.json.CdsServiceRequestJson;
|
||||
import ca.uhn.hapi.fhir.cdshooks.api.json.CdsServiceResponseJson;
|
||||
import ca.uhn.hapi.fhir.cdshooks.api.json.CdsServicesJson;
|
||||
import ca.uhn.hapi.fhir.cdshooks.serializer.CdsServiceRequestJsonDeserializer;
|
||||
import ca.uhn.hapi.fhir.cdshooks.svc.cr.ICdsCrServiceFactory;
|
||||
import ca.uhn.hapi.fhir.cdshooks.svc.cr.discovery.ICrDiscoveryServiceFactory;
|
||||
import ca.uhn.hapi.fhir.cdshooks.svc.prefetch.CdsPrefetchSvc;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.module.SimpleModule;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
|
@ -59,10 +64,18 @@ public class CdsServiceRegistryImpl implements ICdsServiceRegistry {
|
|||
CdsPrefetchSvc theCdsPrefetchSvc,
|
||||
ObjectMapper theObjectMapper,
|
||||
ICdsCrServiceFactory theCdsCrServiceFactory,
|
||||
ICrDiscoveryServiceFactory theCrDiscoveryServiceFactory) {
|
||||
ICrDiscoveryServiceFactory theCrDiscoveryServiceFactory,
|
||||
FhirContext theFhirContext) {
|
||||
myCdsHooksContextBooter = theCdsHooksContextBooter;
|
||||
myCdsPrefetchSvc = theCdsPrefetchSvc;
|
||||
myObjectMapper = theObjectMapper;
|
||||
// registering this deserializer here instead of
|
||||
// CdsHooksObjectMapperFactory to avoid circular
|
||||
// dependency
|
||||
SimpleModule module = new SimpleModule();
|
||||
module.addDeserializer(
|
||||
CdsServiceRequestJson.class, new CdsServiceRequestJsonDeserializer(this, theFhirContext));
|
||||
myObjectMapper.registerModule(module);
|
||||
myCdsCrServiceFactory = theCdsCrServiceFactory;
|
||||
myCrDiscoveryServiceFactory = theCrDiscoveryServiceFactory;
|
||||
}
|
||||
|
@ -82,61 +95,14 @@ public class CdsServiceRegistryImpl implements ICdsServiceRegistry {
|
|||
ICdsServiceMethod serviceMethod = (ICdsServiceMethod) getCdsServiceMethodOrThrowException(theServiceId);
|
||||
myCdsPrefetchSvc.augmentRequest(theCdsServiceRequestJson, serviceMethod);
|
||||
Object response = serviceMethod.invoke(myObjectMapper, theCdsServiceRequestJson, theServiceId);
|
||||
|
||||
return encodeServiceResponse(theServiceId, response);
|
||||
}
|
||||
|
||||
private CdsServiceResponseJson encodeServiceResponse(String theServiceId, Object result) {
|
||||
String json;
|
||||
if (result instanceof String) {
|
||||
json = (String) result;
|
||||
} else {
|
||||
try {
|
||||
json = myObjectMapper.writeValueAsString(result);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new ConfigurationException(
|
||||
Msg.code(2389) + "Failed to json serialize Cds service response of type "
|
||||
+ result.getClass().getName() + " when calling CDS Hook Service " + theServiceId,
|
||||
e);
|
||||
}
|
||||
}
|
||||
try {
|
||||
return myObjectMapper.readValue(json, CdsServiceResponseJson.class);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new ConfigurationException(
|
||||
Msg.code(2390) + "Failed to json deserialize Cds service response of type "
|
||||
+ result.getClass().getName() + " when calling CDS Hook Service " + theServiceId
|
||||
+ ". Json: " + json,
|
||||
e);
|
||||
}
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private ICdsMethod getCdsServiceMethodOrThrowException(String theId) {
|
||||
ICdsMethod retval = myServiceCache.getServiceMethod(theId);
|
||||
if (retval == null) {
|
||||
throw new ResourceNotFoundException(
|
||||
Msg.code(2391) + "No service with id " + theId + " is registered on this server");
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private ICdsMethod getCdsFeedbackMethodOrThrowException(String theId) {
|
||||
ICdsMethod retval = myServiceCache.getFeedbackMethod(theId);
|
||||
if (retval == null) {
|
||||
throw new ResourceNotFoundException(
|
||||
Msg.code(2392) + "No feedback service with id " + theId + " is registered on this server");
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String callFeedback(String theServiceId, CdsServiceFeedbackJson theCdsServiceFeedbackJson) {
|
||||
public CdsServiceFeedbackJson callFeedback(String theServiceId, CdsServiceFeedbackJson theCdsServiceFeedbackJson) {
|
||||
ICdsMethod feedbackMethod = getCdsFeedbackMethodOrThrowException(theServiceId);
|
||||
Object response = feedbackMethod.invoke(myObjectMapper, theCdsServiceFeedbackJson, theServiceId);
|
||||
|
||||
return encodeFeedbackResponse(theServiceId, theCdsServiceFeedbackJson, response);
|
||||
return encodeFeedbackResponse(theServiceId, response);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -146,6 +112,9 @@ public class CdsServiceRegistryImpl implements ICdsServiceRegistry {
|
|||
CdsServiceJson theCdsServiceJson,
|
||||
boolean theAllowAutoFhirClientPrefetch,
|
||||
String theModuleId) {
|
||||
if (theCdsServiceJson.getExtensionClass() == null) {
|
||||
theCdsServiceJson.setExtensionClass(CdsHooksExtension.class);
|
||||
}
|
||||
myServiceCache.registerDynamicService(
|
||||
theServiceId, theServiceFunction, theCdsServiceJson, theAllowAutoFhirClientPrefetch, theModuleId);
|
||||
}
|
||||
|
@ -171,25 +140,100 @@ public class CdsServiceRegistryImpl implements ICdsServiceRegistry {
|
|||
}
|
||||
}
|
||||
|
||||
private String encodeFeedbackResponse(
|
||||
String theServiceId, CdsServiceFeedbackJson theCdsServiceFeedbackJson, Object response) {
|
||||
if (response instanceof String) {
|
||||
return (String) response;
|
||||
@Override
|
||||
public CdsServiceJson getCdsServiceJson(String theServiceId) {
|
||||
CdsServiceJson cdsServiceJson = myServiceCache.getCdsServiceJson(theServiceId);
|
||||
if (cdsServiceJson == null) {
|
||||
throw new IllegalArgumentException(Msg.code(2536) + "No service with " + theServiceId + " is registered.");
|
||||
}
|
||||
return cdsServiceJson;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private ICdsMethod getCdsServiceMethodOrThrowException(String theId) {
|
||||
ICdsMethod retval = myServiceCache.getServiceMethod(theId);
|
||||
if (retval == null) {
|
||||
throw new ResourceNotFoundException(
|
||||
Msg.code(2391) + "No service with id " + theId + " is registered on this server");
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
CdsServiceResponseJson encodeServiceResponse(String theServiceId, Object result) {
|
||||
if (result instanceof String) {
|
||||
return buildResponseFromString(theServiceId, result, (String) result);
|
||||
} else {
|
||||
try {
|
||||
// Try to json encode the response
|
||||
return myObjectMapper.writeValueAsString(response);
|
||||
} catch (JsonProcessingException e) {
|
||||
try {
|
||||
ourLog.warn("Failed to deserialize response from {} feedback method", theServiceId, e);
|
||||
// Just send back what we received
|
||||
return myObjectMapper.writeValueAsString(theCdsServiceFeedbackJson);
|
||||
} catch (JsonProcessingException f) {
|
||||
ourLog.error("Failed to deserialize request parameter to {} feedback method", theServiceId, e);
|
||||
// Okay then...
|
||||
return "{}";
|
||||
}
|
||||
}
|
||||
return buildResponseFromImplementation(theServiceId, result);
|
||||
}
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private ICdsMethod getCdsFeedbackMethodOrThrowException(String theId) {
|
||||
ICdsMethod retval = myServiceCache.getFeedbackMethod(theId);
|
||||
if (retval == null) {
|
||||
throw new ResourceNotFoundException(
|
||||
Msg.code(2392) + "No feedback service with id " + theId + " is registered on this server");
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
CdsServiceFeedbackJson encodeFeedbackResponse(String theServiceId, Object theResponse) {
|
||||
if (theResponse instanceof String) {
|
||||
return buildFeedbackFromString(theServiceId, (String) theResponse);
|
||||
} else {
|
||||
return buildFeedbackFromImplementation(theServiceId, theResponse);
|
||||
}
|
||||
}
|
||||
|
||||
private CdsServiceResponseJson buildResponseFromImplementation(String theServiceId, Object theResult) {
|
||||
try {
|
||||
return (CdsServiceResponseJson) theResult;
|
||||
} catch (ClassCastException e) {
|
||||
throw new ConfigurationException(
|
||||
Msg.code(2389)
|
||||
+ "Failed to cast Cds service response to CdsServiceResponseJson when calling CDS Hook Service "
|
||||
+ theServiceId + ". The type "
|
||||
+ theResult.getClass().getName()
|
||||
+ " cannot be casted to CdsServiceResponseJson",
|
||||
e);
|
||||
}
|
||||
}
|
||||
|
||||
private CdsServiceResponseJson buildResponseFromString(String theServiceId, Object theResult, String theJson) {
|
||||
try {
|
||||
return myObjectMapper.readValue(theJson, CdsServiceResponseJson.class);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new ConfigurationException(
|
||||
Msg.code(2390) + "Failed to json deserialize Cds service response of type "
|
||||
+ theResult.getClass().getName() + " when calling CDS Hook Service " + theServiceId
|
||||
+ ". Json: " + theJson,
|
||||
e);
|
||||
}
|
||||
}
|
||||
|
||||
private CdsServiceFeedbackJson buildFeedbackFromImplementation(String theServiceId, Object theResponse) {
|
||||
try {
|
||||
return (CdsServiceFeedbackJson) theResponse;
|
||||
} catch (ClassCastException e) {
|
||||
throw new ClassCastException(
|
||||
Msg.code(2537) + "Failed to cast feedback response CdsServiceFeedbackJson for service "
|
||||
+ theServiceId + ". " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private CdsServiceFeedbackJson buildFeedbackFromString(String theServiceId, String theResponse) {
|
||||
try {
|
||||
return myObjectMapper.readValue(theResponse, CdsServiceFeedbackJson.class);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(Msg.code(2538) + "Failed to serialize json Cds Feedback response for service "
|
||||
+ theServiceId + ". " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
void setServiceCache(CdsServiceCache theCdsServiceCache) {
|
||||
myServiceCache = theCdsServiceCache;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@ import ca.uhn.hapi.fhir.cdshooks.api.json.CdsServiceResponseCardJson;
|
|||
import ca.uhn.hapi.fhir.cdshooks.api.json.CdsServiceResponseJson;
|
||||
import ca.uhn.hapi.fhir.cdshooks.config.CdsHooksConfig;
|
||||
import ca.uhn.hapi.fhir.cdshooks.config.TestCdsHooksConfig;
|
||||
import ca.uhn.hapi.fhir.cdshooks.custom.extensions.model.RequestExtension;
|
||||
import ca.uhn.hapi.fhir.cdshooks.svc.prefetch.CdsPrefetchFhirClientSvc;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
@ -21,6 +22,7 @@ import org.springframework.beans.factory.annotation.Autowired;
|
|||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.boot.test.mock.mockito.MockBean;
|
||||
import org.springframework.http.MediaType;
|
||||
import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||
import org.springframework.test.web.servlet.MockMvc;
|
||||
|
@ -30,6 +32,7 @@ import org.springframework.test.web.servlet.setup.MockMvcBuilders;
|
|||
import java.util.UUID;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
|
||||
|
@ -67,7 +70,9 @@ public class CdsHooksControllerTest {
|
|||
|
||||
@BeforeEach
|
||||
public void before() {
|
||||
myMockMvc = MockMvcBuilders.standaloneSetup(new CdsHooksController(myCdsHooksRegistry)).build();
|
||||
myMockMvc = MockMvcBuilders.standaloneSetup(new CdsHooksController(myCdsHooksRegistry))
|
||||
.setMessageConverters(new MappingJackson2HttpMessageConverter(myObjectMapper))
|
||||
.build();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -82,25 +87,29 @@ public class CdsHooksControllerTest {
|
|||
.andExpect(jsonPath("services[2].title").value(GreeterCdsService.TEST_HOOK_TITLE))
|
||||
.andExpect(jsonPath("services[2].id").value(GreeterCdsService.TEST_HOOK_STRING_ID))
|
||||
.andExpect(jsonPath("services[2].prefetch." + GreeterCdsService.TEST_HOOK_PREFETCH_USER_KEY).value(GreeterCdsService.TEST_HOOK_PREFETCH_USER_QUERY))
|
||||
.andExpect(jsonPath("services[2].prefetch." + GreeterCdsService.TEST_HOOK_PREFETCH_PATIENT_KEY).value(GreeterCdsService.TEST_HOOK_PREFETCH_PATIENT_QUERY));
|
||||
.andExpect(jsonPath("services[2].prefetch." + GreeterCdsService.TEST_HOOK_PREFETCH_PATIENT_KEY).value(GreeterCdsService.TEST_HOOK_PREFETCH_PATIENT_QUERY))
|
||||
.andExpect(jsonPath("services[5].extension.example-client-conformance").value("http://hooks.example.org/fhir/102/Conformance/patientview"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testExampleFeedback() throws Exception {
|
||||
CdsServiceFeedbackJson request = new CdsServiceFeedbackJson();
|
||||
// setup
|
||||
final CdsServiceFeedbackJson request = new CdsServiceFeedbackJson();
|
||||
request.setCard(TEST_HOOK_INSTANCE);
|
||||
request.setOutcome(CdsServiceFeebackOutcomeEnum.accepted);
|
||||
request.setOutcomeTimestamp(OUTCOME_TIMESTAMP);
|
||||
|
||||
String requestBody = myObjectMapper.writeValueAsString(request);
|
||||
|
||||
myMockMvc
|
||||
final String requestBody = myObjectMapper.writeValueAsString(request);
|
||||
// execute
|
||||
final MvcResult actual = myMockMvc
|
||||
.perform(post(CdsHooksController.BASE + "/example-service/feedback").contentType(MediaType.APPLICATION_JSON).content(requestBody))
|
||||
.andDo(print())
|
||||
.andExpect(status().is2xxSuccessful())
|
||||
.andExpect(content().contentType(MediaType.APPLICATION_JSON))
|
||||
.andExpect(jsonPath("message").value("Thank you for your feedback dated " + OUTCOME_TIMESTAMP + "!"))
|
||||
;
|
||||
.andReturn();
|
||||
// validate
|
||||
final CdsServiceFeedbackJson cdsServiceFeedbackJson = myObjectMapper.readValue(actual.getResponse().getContentAsString(), CdsServiceFeedbackJson.class);
|
||||
assertThat(cdsServiceFeedbackJson.getAcceptedSuggestions()).hasSize(1);
|
||||
assertThat(cdsServiceFeedbackJson).usingRecursiveComparison().ignoringFields("myAcceptedSuggestions").isEqualTo(request);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -127,8 +136,14 @@ public class CdsHooksControllerTest {
|
|||
|
||||
@Test
|
||||
void testCallHelloUniverse() throws Exception {
|
||||
RequestExtension requestExtension = new RequestExtension();
|
||||
requestExtension.setConfigItem("request-config-item");
|
||||
|
||||
CdsServiceRequestJson request = new CdsServiceRequestJson();
|
||||
request.setExtension(requestExtension);
|
||||
request.setFhirServer(TEST_FHIR_SERVER);
|
||||
request.setHook(HelloWorldService.TEST_HOOK_UNIVERSE_ID);
|
||||
|
||||
|
||||
String requestBody = myObjectMapper.writeValueAsString(request);
|
||||
|
||||
|
@ -139,7 +154,7 @@ public class CdsHooksControllerTest {
|
|||
.andExpect(content().contentType(MediaType.APPLICATION_JSON))
|
||||
.andExpect(jsonPath("cards[0].summary").value("Hello Universe!"))
|
||||
.andExpect(jsonPath("cards[0].indicator").value("critical"))
|
||||
;
|
||||
.andExpect(jsonPath("cards[0].extension.example-config-item").value("request-config-item"));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -163,21 +178,24 @@ public class CdsHooksControllerTest {
|
|||
|
||||
@Test
|
||||
void testHelloWorldFeedback() throws Exception {
|
||||
CdsServiceFeedbackJson request = new CdsServiceFeedbackJson();
|
||||
// setup
|
||||
final CdsServiceFeedbackJson request = new CdsServiceFeedbackJson();
|
||||
request.setCard(TEST_HOOK_INSTANCE);
|
||||
request.setOutcome(CdsServiceFeebackOutcomeEnum.accepted);
|
||||
request.setOutcomeTimestamp(OUTCOME_TIMESTAMP);
|
||||
|
||||
String requestBody = myObjectMapper.writeValueAsString(request);
|
||||
|
||||
TestServerAppCtx.ourHelloWorldService.setExpectedCount(1);
|
||||
myMockMvc
|
||||
// execute
|
||||
MvcResult actual = myMockMvc
|
||||
.perform(post(CdsHooksController.BASE + "/" + HelloWorldService.TEST_HOOK_WORLD_ID + "/feedback").contentType(MediaType.APPLICATION_JSON).content(requestBody))
|
||||
.andDo(print())
|
||||
.andExpect(status().is2xxSuccessful())
|
||||
.andExpect(content().contentType(MediaType.APPLICATION_JSON))
|
||||
.andExpect(jsonPath("message").value("Thank you for your feedback dated " + OUTCOME_TIMESTAMP + "!"))
|
||||
;
|
||||
.andReturn();
|
||||
// validate
|
||||
final CdsServiceFeedbackJson cdsServiceFeedbackJson = myObjectMapper.readValue(actual.getResponse().getContentAsString(), CdsServiceFeedbackJson.class);
|
||||
assertThat(cdsServiceFeedbackJson.getAcceptedSuggestions()).hasSize(1);
|
||||
assertThat(cdsServiceFeedbackJson).usingRecursiveComparison().ignoringFields("myAcceptedSuggestions").isEqualTo(request);
|
||||
TestServerAppCtx.ourHelloWorldService.awaitExpected();
|
||||
}
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue