Merge remote-tracking branch 'origin/master' into issue-2401-add-support-for-group-id-export
This commit is contained in:
commit
30905aeba7
|
@ -37,31 +37,7 @@
|
|||
</signature>
|
||||
</configuration>
|
||||
</execution>
|
||||
<!--
|
||||
<execution>
|
||||
<id>check-android-api</id>
|
||||
<phase>test</phase>
|
||||
<inherited>true</inherited>
|
||||
<goals>
|
||||
<goal>check</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<signature>
|
||||
<groupId>net.sf.androidscents.signature</groupId>
|
||||
<artifactId>android-api-level-21</artifactId>
|
||||
<version>5.0.1_r2</version>
|
||||
</signature>
|
||||
</configuration>
|
||||
</execution>
|
||||
-->
|
||||
</executions>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.ow2.asm</groupId>
|
||||
<artifactId>asm-all</artifactId>
|
||||
<version>5.0.4</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.basepom.maven</groupId>
|
||||
|
|
|
@ -226,6 +226,11 @@ public class RequestPartitionId {
|
|||
return fromPartitionIds(Collections.singletonList(null));
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static RequestPartitionId defaultPartition(@Nullable LocalDate thePartitionDate) {
|
||||
return fromPartitionIds(Collections.singletonList(null), thePartitionDate);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static RequestPartitionId fromPartitionId(@Nullable Integer thePartitionId) {
|
||||
return fromPartitionIds(Collections.singletonList(thePartitionId));
|
||||
|
@ -238,7 +243,12 @@ public class RequestPartitionId {
|
|||
|
||||
@Nonnull
|
||||
public static RequestPartitionId fromPartitionIds(@Nonnull Collection<Integer> thePartitionIds) {
|
||||
return new RequestPartitionId(null, toListOrNull(thePartitionIds), null);
|
||||
return fromPartitionIds(thePartitionIds, null);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static RequestPartitionId fromPartitionIds(@Nonnull Collection<Integer> thePartitionIds, @Nullable LocalDate thePartitionDate) {
|
||||
return new RequestPartitionId(null, toListOrNull(thePartitionIds), thePartitionDate);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
|
|
|
@ -78,4 +78,31 @@ public class StringUtil {
|
|||
return new String(bytes, StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the string prefix of the specified length.
|
||||
*
|
||||
* @param theString
|
||||
* String to get the prefix from
|
||||
* @param theCodePointCount
|
||||
* Length of the prefix in code points
|
||||
* @return
|
||||
* Returns the string prefix of the specified number of codepoints.
|
||||
*/
|
||||
public static String left(String theString, int theCodePointCount) {
|
||||
if (theString == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (theCodePointCount < 0) {
|
||||
return "";
|
||||
}
|
||||
|
||||
// char count can only be bigger than the code point count
|
||||
if (theString.length() <= theCodePointCount) {
|
||||
return theString;
|
||||
}
|
||||
|
||||
return theString.substring(0, theString.offsetByCodePoints(0, theCodePointCount));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -164,7 +164,7 @@ ca.uhn.fhir.jpa.patch.JsonPatchUtils.failedToApplyPatch=Failed to apply JSON pat
|
|||
|
||||
ca.uhn.fhir.jpa.graphql.JpaStorageServices.invalidGraphqlArgument=Unknown GraphQL argument "{0}". Value GraphQL argument for this type are: {1}
|
||||
|
||||
ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc.blacklistedResourceTypeForPartitioning=Resource type {0} can not be partitioned
|
||||
ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc.nonDefaultPartitionSelectedForNonPartitionable=Resource type {0} can not be partitioned
|
||||
ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc.unknownPartitionId=Unknown partition ID: {0}
|
||||
ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc.unknownPartitionName=Unknown partition name: {0}
|
||||
|
||||
|
|
|
@ -10,8 +10,21 @@ import java.io.OutputStreamWriter;
|
|||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
|
||||
public class StringUtilTest {
|
||||
|
||||
@Test
|
||||
public void testLeft() {
|
||||
assertNull(StringUtil.left(null, 1));
|
||||
assertEquals("", StringUtil.left("", 10));
|
||||
assertEquals("STR", StringUtil.left("STR", 10));
|
||||
assertEquals(".", StringUtil.left("...", 1));
|
||||
|
||||
// check supplementary chars
|
||||
assertEquals("\uD800\uDF01", StringUtil.left("\uD800\uDF01\uD800\uDF02", 1));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNormalizeString() {
|
||||
assertEquals("TEST TEST", StringUtil.normalizeStringForSearchIndexing("TEST teSt"));
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2404
|
||||
title: "Supplementary characters in the parameters might cause errors when calculating hash values"
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2407
|
||||
title: "When using the JPA server in partitioned mode with a partition interceptor, the interceptor is now called even for
|
||||
resource types that can not be placed in a non-default partition (e.g. SearchParameter, CodeSystem, etc.). The interceptor
|
||||
may return null or default in this case, but can include a non-null partition date if needed."
|
|
@ -51,7 +51,7 @@ Here is an example of a full HAPI MDM rules json document:
|
|||
{
|
||||
"name": "firstname-meta",
|
||||
"resourceType": "Patient",
|
||||
"resourcePath": "name.given",
|
||||
"fhirPath": "name.given.first()",
|
||||
"matcher": {
|
||||
"algorithm": "METAPHONE"
|
||||
}
|
||||
|
@ -196,6 +196,53 @@ Here is a matcher matchField that only matches when two family names are identic
|
|||
}
|
||||
```
|
||||
|
||||
While it is often suitable to use the `resourcePath` field to indicate the location of the data to be matched, occasionally you will need more direct control over precisely which fields are matched. When performing string matching, the matcher will indiscriminately try to match all elements of the left resource to all elements of the right resource. For example, consider the following two patients and matcher.
|
||||
|
||||
```json
|
||||
{
|
||||
"resourceType": "Patient",
|
||||
"name": [{
|
||||
"given": ["Frank", "John"]
|
||||
}]
|
||||
}
|
||||
```
|
||||
|
||||
```json
|
||||
{
|
||||
"resourceType": "Patient",
|
||||
"name": [{
|
||||
"given": ["John", "Frank"]
|
||||
}]
|
||||
}
|
||||
```
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "firstname-meta",
|
||||
"resourceType": "Patient",
|
||||
"resourcePath": "name.given",
|
||||
"matcher": {
|
||||
"algorithm": "METAPHONE"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
In this example, these two patients would match, as the matcher will compare all elements of `["John", "Frank"]` to all elements of `["Frank", "John"]` and find that there are matches. This is when you would want to use a FHIRPath matcher, as FHIRPath expressions give you more direct control. This following example shows a matcher that would cause these two patient's not to match to each other.
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "firstname-meta-fhirpath",
|
||||
"resourceType": "Patient",
|
||||
"fhirPath": "name.given[0]",
|
||||
"matcher": {
|
||||
"algorithm": "METAPHONE"
|
||||
}
|
||||
}
|
||||
```
|
||||
Since FHIRPath expressions support indexing it is possible to directly indicate that you would only like to compare the first element of each resource.
|
||||
|
||||
|
||||
|
||||
Special identifier matching is also available if you need to match on a particular identifier system:
|
||||
```json
|
||||
{
|
||||
|
|
|
@ -82,6 +82,25 @@ A hook against the [`Pointcut.STORAGE_PARTITION_IDENTIFY_READ`](/hapi-fhir/apido
|
|||
|
||||
As of HAPI FHIR 5.3.0, the *Identify Partition for Read* hook method may return multiple partition names or IDs. If more than one partition is identified, the server will search in all identified partitions.
|
||||
|
||||
## Non-Partitionable Resources
|
||||
|
||||
Some resource types can not be placed in any partition other than the DEFAULT partition. When a resource of one of these types is being created, the *STORAGE_PARTITION_IDENTIFY_CREATE* pointcut is invoked, but the hook method must return [defaultPartition()](https://hapifhir.io/hapi-fhir/apidocs/hapi-fhir-base/ca/uhn/fhir/interceptor/model/RequestPartitionId.html#defaultPartition()). A partition date may optionally be included.
|
||||
|
||||
The following resource types may not be placed in any partition except the default partition:
|
||||
|
||||
* CapabilityStatement
|
||||
* CodeSystem
|
||||
* CompartmentDefinition
|
||||
* ConceptMap
|
||||
* NamingSystem
|
||||
* OperationDefinition
|
||||
* Questionnaire
|
||||
* SearchParameter
|
||||
* StructureDefinition
|
||||
* StructureMap
|
||||
* Subscription
|
||||
* ValueSet
|
||||
|
||||
## Examples
|
||||
|
||||
See [Partition Interceptor Examples](./partition_interceptor_examples.html) for various samples of how partitioning interceptors can be set up.
|
||||
|
|
|
@ -10,7 +10,7 @@ The **RepositoryValidatingInterceptor** interceptor can be used to easily add th
|
|||
|
||||
# Benefits and Limitations
|
||||
|
||||
For a HAPI FHIR JPA Server, the RepositoryValidatingInterceptor is a very powerful option compared to the [Request and Response Validation](/docs/interceptors/built_in_server_interceptors.html#request_and_response_validation) that is also often used for validation.
|
||||
For a HAPI FHIR JPA Server, the RepositoryValidatingInterceptor is a very powerful addition to the [Request and Response Validation](/docs/interceptors/built_in_server_interceptors.html#request_and_response_validation) that is also often used for validation.
|
||||
|
||||
## Request and Response Validation
|
||||
|
||||
|
@ -20,6 +20,8 @@ The *Request and Response Validation* interceptors examine incoming HTTP payload
|
|||
|
||||
* It may miss validating data that is added or modified through other interceptors
|
||||
|
||||
* It may provide you with a validated resource in your Java API so that you can make certain reasonable assumptions - eg. a required field does not need a null check.
|
||||
|
||||
* It is not able to validate changes coming from operations such as FHIR Patch, since the patch itself may pass validation, but may ultimately result in modifying a resource so that it is no longer valid.
|
||||
|
||||
## Repository Validation
|
||||
|
@ -32,6 +34,8 @@ This means that:
|
|||
|
||||
* Repository validation requires pointcuts that are thrown directly by the storage engine, meaning that it can not be used from a plain server unless the plain server code manually invokes the same pointcuts.
|
||||
|
||||
* Repository validation does *NOT* provide your custom pre-storage business logical layer with any guarantees of the profile as the resource has not been hit by the proper pointcut. This means that you cannot make reasonable profile assumptions in your pre-storage logic handling the resource.
|
||||
|
||||
# Using the Repository Validating Interceptor
|
||||
|
||||
Using the repository validating interceptor is as simple as creating a new instance of [RepositoryValidatingInterceptor](/hapi-fhir/apidocs/hapi-fhir-jpaserver-base/ca/uhn/fhir/jpa/interceptor/validation/RepositoryValidatingInterceptor.html) and registering it with the interceptor registry. The only tricky part is initializing your rules, which must be done using a [RepositoryValidatingRuleBuilder](/hapi-fhir/apidocs/hapi-fhir-jpaserver-base/ca/uhn/fhir/jpa/interceptor/validation/RepositoryValidatingRuleBuilder.html).
|
||||
|
|
|
@ -50,7 +50,7 @@ import static ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster.hasHooks;
|
|||
|
||||
public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc {
|
||||
|
||||
private final HashSet<Object> myPartitioningBlacklist;
|
||||
private final HashSet<Object> myNonPartitionableResourceNames;
|
||||
|
||||
@Autowired
|
||||
private IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||
|
@ -62,25 +62,25 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc {
|
|||
private PartitionSettings myPartitionSettings;
|
||||
|
||||
public RequestPartitionHelperSvc() {
|
||||
myPartitioningBlacklist = new HashSet<>();
|
||||
myNonPartitionableResourceNames = new HashSet<>();
|
||||
|
||||
// Infrastructure
|
||||
myPartitioningBlacklist.add("Subscription");
|
||||
myPartitioningBlacklist.add("SearchParameter");
|
||||
myNonPartitionableResourceNames.add("Subscription");
|
||||
myNonPartitionableResourceNames.add("SearchParameter");
|
||||
|
||||
// Validation and Conformance
|
||||
myPartitioningBlacklist.add("StructureDefinition");
|
||||
myPartitioningBlacklist.add("Questionnaire");
|
||||
myPartitioningBlacklist.add("CapabilityStatement");
|
||||
myPartitioningBlacklist.add("CompartmentDefinition");
|
||||
myPartitioningBlacklist.add("OperationDefinition");
|
||||
myNonPartitionableResourceNames.add("StructureDefinition");
|
||||
myNonPartitionableResourceNames.add("Questionnaire");
|
||||
myNonPartitionableResourceNames.add("CapabilityStatement");
|
||||
myNonPartitionableResourceNames.add("CompartmentDefinition");
|
||||
myNonPartitionableResourceNames.add("OperationDefinition");
|
||||
|
||||
// Terminology
|
||||
myPartitioningBlacklist.add("ConceptMap");
|
||||
myPartitioningBlacklist.add("CodeSystem");
|
||||
myPartitioningBlacklist.add("ValueSet");
|
||||
myPartitioningBlacklist.add("NamingSystem");
|
||||
myPartitioningBlacklist.add("StructureMap");
|
||||
myNonPartitionableResourceNames.add("ConceptMap");
|
||||
myNonPartitionableResourceNames.add("CodeSystem");
|
||||
myNonPartitionableResourceNames.add("ValueSet");
|
||||
myNonPartitionableResourceNames.add("NamingSystem");
|
||||
myNonPartitionableResourceNames.add("StructureMap");
|
||||
|
||||
}
|
||||
|
||||
|
@ -97,7 +97,7 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc {
|
|||
|
||||
if (myPartitionSettings.isPartitioningEnabled()) {
|
||||
// Handle system requests
|
||||
if ((theRequest == null && myPartitioningBlacklist.contains(theResourceType))) {
|
||||
if ((theRequest == null && myNonPartitionableResourceNames.contains(theResourceType))) {
|
||||
return RequestPartitionId.defaultPartition();
|
||||
}
|
||||
|
||||
|
@ -128,10 +128,6 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc {
|
|||
RequestPartitionId requestPartitionId;
|
||||
|
||||
if (myPartitionSettings.isPartitioningEnabled()) {
|
||||
// Handle system requests
|
||||
if ((theRequest == null && myPartitioningBlacklist.contains(theResourceType))) {
|
||||
return RequestPartitionId.defaultPartition();
|
||||
}
|
||||
|
||||
// Interceptor call: STORAGE_PARTITION_IDENTIFY_CREATE
|
||||
HookParams params = new HookParams()
|
||||
|
@ -140,6 +136,12 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc {
|
|||
.addIfMatchesType(ServletRequestDetails.class, theRequest);
|
||||
requestPartitionId = (RequestPartitionId) doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE, params);
|
||||
|
||||
// Handle system requests
|
||||
boolean nonPartitionableResource = myNonPartitionableResourceNames.contains(theResourceType);
|
||||
if (nonPartitionableResource && requestPartitionId == null) {
|
||||
requestPartitionId = RequestPartitionId.defaultPartition();
|
||||
}
|
||||
|
||||
String resourceName = myFhirContext.getResourceType(theResource);
|
||||
validateSinglePartitionForCreate(requestPartitionId, resourceName, Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE);
|
||||
|
||||
|
@ -271,8 +273,8 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc {
|
|||
if ((theRequestPartitionId.hasPartitionIds() && !theRequestPartitionId.getPartitionIds().contains(null)) ||
|
||||
(theRequestPartitionId.hasPartitionNames() && !theRequestPartitionId.getPartitionNames().contains(JpaConstants.DEFAULT_PARTITION_NAME))) {
|
||||
|
||||
if (myPartitioningBlacklist.contains(theResourceName)) {
|
||||
String msg = myFhirContext.getLocalizer().getMessageSanitized(RequestPartitionHelperSvc.class, "blacklistedResourceTypeForPartitioning", theResourceName);
|
||||
if (myNonPartitionableResourceNames.contains(theResourceName)) {
|
||||
String msg = myFhirContext.getLocalizer().getMessageSanitized(RequestPartitionHelperSvc.class, "nonDefaultPartitionSelectedForNonPartitionable", theResourceName);
|
||||
throw new UnprocessableEntityException(msg);
|
||||
}
|
||||
|
||||
|
|
|
@ -10,10 +10,13 @@ import ca.uhn.fhir.jpa.entity.PartitionEntity;
|
|||
import ca.uhn.fhir.jpa.interceptor.ex.PartitionInterceptorReadAllPartitions;
|
||||
import ca.uhn.fhir.jpa.interceptor.ex.PartitionInterceptorReadPartitionsBasedOnScopes;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import com.google.common.collect.Sets;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
@ -21,6 +24,7 @@ import org.apache.commons.lang3.Validate;
|
|||
import org.hamcrest.Matchers;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.StructureDefinition;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
@ -82,6 +86,51 @@ public class PartitioningInterceptorR4Test extends BaseJpaR4SystemTest {
|
|||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testCreateNonPartionableResourceWithPartitionDate() {
|
||||
myPartitionInterceptor.addCreatePartition(RequestPartitionId.defaultPartition(LocalDate.of(2021, 2, 22)));
|
||||
|
||||
StructureDefinition sd = new StructureDefinition();
|
||||
sd.setUrl("http://foo");
|
||||
myStructureDefinitionDao.create(sd);
|
||||
|
||||
runInTransaction(()->{
|
||||
List<ResourceTable> resources = myResourceTableDao.findAll();
|
||||
assertEquals(1, resources.size());
|
||||
assertEquals(null, resources.get(0).getPartitionId().getPartitionId());
|
||||
assertEquals(22, resources.get(0).getPartitionId().getPartitionDate().getDayOfMonth());
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateNonPartionableResourceWithNullPartitionReturned() {
|
||||
myPartitionInterceptor.addCreatePartition(null);
|
||||
|
||||
StructureDefinition sd = new StructureDefinition();
|
||||
sd.setUrl("http://foo");
|
||||
myStructureDefinitionDao.create(sd);
|
||||
|
||||
runInTransaction(()->{
|
||||
List<ResourceTable> resources = myResourceTableDao.findAll();
|
||||
assertEquals(1, resources.size());
|
||||
assertEquals(null, resources.get(0).getPartitionId());
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateNonPartionableResourceWithDisallowedPartitionReturned() {
|
||||
myPartitionInterceptor.addCreatePartition(RequestPartitionId.fromPartitionName("FOO"));
|
||||
|
||||
StructureDefinition sd = new StructureDefinition();
|
||||
sd.setUrl("http://foo");
|
||||
try {
|
||||
myStructureDefinitionDao.create(sd);
|
||||
fail();
|
||||
} catch (UnprocessableEntityException e) {
|
||||
assertEquals("Resource type StructureDefinition can not be partitioned", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Should fail if no interceptor is registered for the READ pointcut
|
||||
*/
|
||||
|
|
|
@ -118,6 +118,21 @@ public class MultitenantServerR4Test extends BaseMultitenantResourceProviderR4Te
|
|||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testCreateAndRead_NonPartitionableResource_DefaultTenant() {
|
||||
|
||||
// Create patients
|
||||
|
||||
IIdType idA = createResource("NamingSystem", withTenant(JpaConstants.DEFAULT_PARTITION_NAME), withStatus("draft"));
|
||||
|
||||
runInTransaction(() -> {
|
||||
ResourceTable resourceTable = myResourceTableDao.findById(idA.getIdPartAsLong()).orElseThrow(() -> new IllegalStateException());
|
||||
assertNull(resourceTable.getPartitionId());
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testCreate_InvalidTenant() {
|
||||
|
||||
|
|
|
@ -73,10 +73,18 @@ public class BaseCqlDstu3Test extends BaseJpaDstu3Test implements CqlProviderTes
|
|||
return count;
|
||||
}
|
||||
|
||||
protected Bundle loadBundle(String theLocation) throws IOException {
|
||||
protected Bundle parseBundle(String theLocation) throws IOException {
|
||||
String json = stringFromResource(theLocation);
|
||||
Bundle bundle = (Bundle) myFhirContext.newJsonParser().parseResource(json);
|
||||
Bundle result = (Bundle) mySystemDao.transaction(null, bundle);
|
||||
return result;
|
||||
return bundle;
|
||||
}
|
||||
|
||||
protected Bundle loadBundle(Bundle bundle) {
|
||||
return (Bundle) mySystemDao.transaction(null, bundle);
|
||||
}
|
||||
|
||||
protected Bundle loadBundle(String theLocation) throws IOException {
|
||||
Bundle bundle = parseBundle(theLocation);
|
||||
return loadBundle(bundle);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,8 +8,11 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
|||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.subscription.match.config.SubscriptionProcessorConfig;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.util.BundleUtil;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.MeasureReport;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -21,6 +24,7 @@ import org.springframework.test.context.junit.jupiter.SpringExtension;
|
|||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@ContextConfiguration(classes = {CqlR4Config.class, TestCqlConfig.class, SubscriptionProcessorConfig.class})
|
||||
|
@ -72,11 +76,18 @@ public class BaseCqlR4Test extends BaseJpaR4Test implements CqlProviderTestBase
|
|||
return count;
|
||||
}
|
||||
|
||||
protected Bundle loadBundle(String theLocation) throws IOException {
|
||||
protected Bundle parseBundle(String theLocation) throws IOException {
|
||||
String json = stringFromResource(theLocation);
|
||||
Bundle bundle = (Bundle) myFhirContext.newJsonParser().parseResource(json);
|
||||
Bundle result = (Bundle) mySystemDao.transaction(null, bundle);
|
||||
return result;
|
||||
return bundle;
|
||||
}
|
||||
|
||||
protected Bundle loadBundle(Bundle bundle) {
|
||||
return (Bundle) mySystemDao.transaction(null, bundle);
|
||||
}
|
||||
|
||||
protected Bundle loadBundle(String theLocation) throws IOException {
|
||||
Bundle bundle = parseBundle(theLocation);
|
||||
return loadBundle(bundle);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,144 @@
|
|||
package ca.uhn.fhir.cql.dstu3;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.math.BigDecimal;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.hamcrest.Matchers;
|
||||
import org.hl7.fhir.dstu3.model.Bundle;
|
||||
import org.hl7.fhir.dstu3.model.DateTimeType;
|
||||
import org.hl7.fhir.dstu3.model.IdType;
|
||||
import org.hl7.fhir.dstu3.model.Measure;
|
||||
import org.hl7.fhir.dstu3.model.MeasureReport;
|
||||
import org.hl7.fhir.dstu3.model.MeasureReport.MeasureReportGroupComponent;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import ca.uhn.fhir.cql.BaseCqlDstu3Test;
|
||||
import ca.uhn.fhir.cql.dstu3.provider.MeasureOperationsProvider;
|
||||
import ca.uhn.fhir.util.BundleUtil;
|
||||
|
||||
public class CqlMeasureEvaluationDstu3Test extends BaseCqlDstu3Test {
|
||||
Logger ourLog = LoggerFactory.getLogger(CqlMeasureEvaluationDstu3Test.class);
|
||||
|
||||
@Autowired
|
||||
MeasureOperationsProvider myMeasureOperationsProvider;
|
||||
|
||||
protected void testMeasureBundle(String theLocation) throws IOException {
|
||||
Bundle bundle = parseBundle(theLocation);
|
||||
loadBundle(bundle);
|
||||
|
||||
List<Measure> measures = BundleUtil.toListOfResourcesOfType(myFhirContext, bundle, Measure.class);
|
||||
if (measures == null || measures.isEmpty()) {
|
||||
throw new IllegalArgumentException(String.format("No measures found for Bundle %s", theLocation));
|
||||
}
|
||||
|
||||
List<MeasureReport> reports = BundleUtil.toListOfResourcesOfType(myFhirContext, bundle, MeasureReport.class);
|
||||
if (reports == null || reports.isEmpty()) {
|
||||
throw new IllegalArgumentException(String.format("No measure reports found for Bundle %s", theLocation));
|
||||
}
|
||||
|
||||
for (MeasureReport report : reports) {
|
||||
testMeasureReport(report);
|
||||
}
|
||||
}
|
||||
|
||||
protected void testMeasureReport(MeasureReport expected) {
|
||||
String measureId = this.getMeasureId(expected);
|
||||
String patientId = this.getPatientId(expected);
|
||||
String periodStart = this.getPeriodStart(expected);
|
||||
String periodEnd = this.getPeriodEnd(expected);
|
||||
|
||||
this.ourLog.info("Measure: %s, Patient: %s, Start: %s, End: %s", measureId, patientId, periodStart, periodEnd);
|
||||
|
||||
MeasureReport actual = this.myMeasureOperationsProvider.evaluateMeasure(new IdType("Measure", measureId),
|
||||
periodStart, periodEnd, null,
|
||||
// TODO: These are all individual reports
|
||||
"patient", patientId,
|
||||
// TODO: Generalize these parameters into a Parameters resource
|
||||
null, null, null, null, null, null);
|
||||
|
||||
compareMeasureReport(expected, actual);
|
||||
}
|
||||
|
||||
protected void compareMeasureReport(MeasureReport expected, MeasureReport actual) {
|
||||
assertNotNull("expected MeasureReport can not be null", expected);
|
||||
assertNotNull("actual MeasureReport can not be null", actual);
|
||||
|
||||
String errorLocator = String.format("Measure: %s, Subject: %s", expected.getMeasure(),
|
||||
expected.getPatient().getReference());
|
||||
|
||||
assertEquals(expected.hasGroup(), actual.hasGroup(), errorLocator);
|
||||
assertEquals(expected.getGroup().size(), actual.getGroup().size(), errorLocator);
|
||||
|
||||
for (MeasureReportGroupComponent mrgcExpected : expected.getGroup()) {
|
||||
Optional<MeasureReportGroupComponent> mrgcActualOptional = actual.getGroup().stream()
|
||||
.filter(x -> x.getId().equals(mrgcExpected.getId())).findFirst();
|
||||
|
||||
errorLocator = String.format("Measure: %s, Subject: %s, Group: %s", expected.getMeasure(),
|
||||
expected.getPatient().getReference(), mrgcExpected.getId());
|
||||
assertTrue(errorLocator, mrgcActualOptional.isPresent());
|
||||
|
||||
MeasureReportGroupComponent mrgcActual = mrgcActualOptional.get();
|
||||
|
||||
if (mrgcExpected.getMeasureScore() == null) {
|
||||
assertNull(mrgcActual.getMeasureScore(), errorLocator);
|
||||
} else {
|
||||
assertNotNull(mrgcActual.getMeasureScore());
|
||||
BigDecimal decimalExpected = mrgcExpected.getMeasureScore();
|
||||
BigDecimal decimalActual = mrgcActual.getMeasureScore();
|
||||
|
||||
assertThat(errorLocator, decimalActual, Matchers.comparesEqualTo(decimalExpected));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public String getPatientId(MeasureReport measureReport) {
|
||||
String[] subjectRefParts = measureReport.getPatient().getReference().split("/");
|
||||
String patientId = subjectRefParts[subjectRefParts.length - 1];
|
||||
return patientId;
|
||||
}
|
||||
|
||||
public String getMeasureId(MeasureReport measureReport) {
|
||||
String[] measureRefParts = measureReport.getMeasure().getReference().split("/");
|
||||
String measureId = measureRefParts[measureRefParts.length - 1];
|
||||
return measureId;
|
||||
}
|
||||
|
||||
public String getPeriodStart(MeasureReport measureReport) {
|
||||
Date periodStart = measureReport.getPeriod().getStart();
|
||||
if (periodStart != null) {
|
||||
return toDateString(periodStart);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public String getPeriodEnd(MeasureReport measureReport) {
|
||||
Date periodEnd = measureReport.getPeriod().getEnd();
|
||||
if (periodEnd != null) {
|
||||
return toDateString(periodEnd);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public String toDateString(Date date) {
|
||||
return new DateTimeType(date).getValueAsString();
|
||||
}
|
||||
|
||||
// As of 2/11/2021, all the DSTU3 bundles in the Connectathon IG are out of date
|
||||
// and can't be posted
|
||||
// @Test
|
||||
// public void test_EXM117_83000() throws IOException {
|
||||
// this.testMeasureBundle("dstu3/connectathon/EXM117_FHIR3-8.3.000-bundle.json");
|
||||
// }
|
||||
}
|
|
@ -1,54 +1,208 @@
|
|||
package ca.uhn.fhir.cql.r4;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.math.BigDecimal;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.hamcrest.Matchers;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.DateTimeType;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Measure;
|
||||
import org.hl7.fhir.r4.model.MeasureReport;
|
||||
import org.hl7.fhir.r4.model.MeasureReport.MeasureReportGroupComponent;
|
||||
import org.hl7.fhir.r4.model.Quantity;
|
||||
import org.hl7.fhir.r4.model.MeasureReport.MeasureReportGroupPopulationComponent;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import ca.uhn.fhir.cql.BaseCqlR4Test;
|
||||
import ca.uhn.fhir.cql.common.provider.CqlProviderTestBase;
|
||||
import ca.uhn.fhir.cql.r4.provider.MeasureOperationsProvider;
|
||||
import ca.uhn.fhir.util.BundleUtil;
|
||||
|
||||
public class CqlMeasureEvaluationR4Test extends BaseCqlR4Test implements CqlProviderTestBase {
|
||||
|
||||
private static final IdType measureId = new IdType("Measure", "measure-EXM130-7.3.000");
|
||||
private static final String periodStart = "2019-01-01";
|
||||
private static final String periodEnd = "2019-12-31";
|
||||
public class CqlMeasureEvaluationR4Test extends BaseCqlR4Test {
|
||||
Logger ourLog = LoggerFactory.getLogger(CqlMeasureEvaluationR4Test.class);
|
||||
|
||||
@Autowired
|
||||
MeasureOperationsProvider myMeasureOperationsProvider;
|
||||
|
||||
public void loadBundles() throws IOException {
|
||||
loadBundle("r4/connectathon/EXM130-7.3.000-bundle.json");
|
||||
protected void testMeasureBundle(String theLocation) throws IOException {
|
||||
Bundle bundle = parseBundle(theLocation);
|
||||
loadBundle(bundle);
|
||||
|
||||
List<Measure> measures = BundleUtil.toListOfResourcesOfType(myFhirContext, bundle, Measure.class);
|
||||
if (measures == null || measures.isEmpty()) {
|
||||
throw new IllegalArgumentException(String.format("No measures found for Bundle %s", theLocation));
|
||||
}
|
||||
|
||||
List<MeasureReport> reports = BundleUtil.toListOfResourcesOfType(myFhirContext, bundle, MeasureReport.class);
|
||||
if (reports == null || reports.isEmpty()) {
|
||||
throw new IllegalArgumentException(String.format("No measure reports found for Bundle %s", theLocation));
|
||||
}
|
||||
|
||||
for (MeasureReport report : reports) {
|
||||
testMeasureReport(report);
|
||||
}
|
||||
}
|
||||
|
||||
protected void testMeasureReport(MeasureReport expected) {
|
||||
String measureId = this.getMeasureId(expected);
|
||||
String patientId = this.getPatientId(expected);
|
||||
String periodStart = this.getPeriodStart(expected);
|
||||
String periodEnd = this.getPeriodEnd(expected);
|
||||
|
||||
this.ourLog.info("Measure: %s, Patient: %s, Start: %s, End: %s", measureId, patientId, periodStart, periodEnd);
|
||||
|
||||
MeasureReport actual = this.myMeasureOperationsProvider.evaluateMeasure(new IdType("Measure", measureId),
|
||||
periodStart, periodEnd, null,
|
||||
// TODO: These are all individual reports
|
||||
"patient", patientId,
|
||||
// TODO: Generalize these parameters into a Parameters resource
|
||||
null, null, null, null, null, null);
|
||||
|
||||
compareMeasureReport(expected, actual);
|
||||
}
|
||||
|
||||
protected void compareMeasureReport(MeasureReport expected, MeasureReport actual) {
|
||||
assertNotNull("expected MeasureReport can not be null", expected);
|
||||
assertNotNull("actual MeasureReport can not be null", actual);
|
||||
|
||||
String errorLocator = String.format("Measure: %s, Subject: %s", expected.getMeasure(),
|
||||
expected.getSubject().getReference());
|
||||
|
||||
assertEquals(expected.hasGroup(), actual.hasGroup(), errorLocator);
|
||||
assertEquals(expected.getGroup().size(), actual.getGroup().size(), errorLocator);
|
||||
|
||||
for (MeasureReportGroupComponent mrgcExpected : expected.getGroup()) {
|
||||
Optional<MeasureReportGroupComponent> mrgcActualOptional = actual.getGroup().stream()
|
||||
.filter(x -> x.getId().equals(mrgcExpected.getId())).findFirst();
|
||||
|
||||
errorLocator = String.format("Measure: %s, Subject: %s, Group: %s", expected.getMeasure(),
|
||||
expected.getSubject().getReference(), mrgcExpected.getId());
|
||||
assertTrue(errorLocator, mrgcActualOptional.isPresent());
|
||||
|
||||
MeasureReportGroupComponent mrgcActual = mrgcActualOptional.get();
|
||||
|
||||
if (mrgcExpected.getMeasureScore() == null) {
|
||||
assertNull(mrgcActual.getMeasureScore(), errorLocator);
|
||||
} else {
|
||||
assertNotNull(mrgcActual.getMeasureScore());
|
||||
Quantity quantityExpected = mrgcExpected.getMeasureScore();
|
||||
Quantity quantityActual = mrgcActual.getMeasureScore();
|
||||
|
||||
assertThat(errorLocator, quantityActual.getValue(), Matchers.comparesEqualTo(quantityExpected.getValue()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: In R4 the Subject will not necessarily be a Patient.
|
||||
public String getPatientId(MeasureReport measureReport) {
|
||||
String[] subjectRefParts = measureReport.getSubject().getReference().split("/");
|
||||
String patientId = subjectRefParts[subjectRefParts.length - 1];
|
||||
return patientId;
|
||||
}
|
||||
|
||||
public String getMeasureId(MeasureReport measureReport) {
|
||||
String[] measureRefParts = measureReport.getMeasure().split("/");
|
||||
String measureId = measureRefParts[measureRefParts.length - 1];
|
||||
return measureId;
|
||||
}
|
||||
|
||||
public String getPeriodStart(MeasureReport measureReport) {
|
||||
Date periodStart = measureReport.getPeriod().getStart();
|
||||
if (periodStart != null) {
|
||||
return toDateString(periodStart);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public String getPeriodEnd(MeasureReport measureReport) {
|
||||
Date periodEnd = measureReport.getPeriod().getEnd();
|
||||
if (periodEnd != null) {
|
||||
return toDateString(periodEnd);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public String toDateString(Date date) {
|
||||
return new DateTimeType(date).getValueAsString();
|
||||
}
|
||||
|
||||
// @Test - No test results in this bundle yet
|
||||
// public void test_EXM74_102000() throws IOException {
|
||||
// this.testMeasureBundle("r4/connectathon/EXM74-10.2.000-bundle.json");
|
||||
// }
|
||||
|
||||
@Test
|
||||
public void test_EXM105_82000() throws IOException {
|
||||
this.testMeasureBundle("r4/connectathon/EXM105-8.2.000-bundle.json");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExm130PatientNumerator() throws IOException {
|
||||
loadBundles();
|
||||
MeasureReport report = myMeasureOperationsProvider.evaluateMeasure(measureId, periodStart, periodEnd, null, "patient",
|
||||
"numer-EXM130", null, null, null, null, null, null);
|
||||
// Assert it worked
|
||||
assertThat(report.getGroup(), hasSize(1));
|
||||
assertEquals(new BigDecimal("1.0"), report.getGroupFirstRep().getMeasureScore().getValue());
|
||||
public void test_EXM108_83000() throws IOException {
|
||||
this.testMeasureBundle("r4/connectathon/EXM108-8.3.000-bundle.json");
|
||||
}
|
||||
|
||||
// @Test - No test results in this bundle yet
|
||||
// public void test_EXM111_91000() throws IOException {
|
||||
// this.testMeasureBundle("r4/connectathon/EXM111-9.1.000-bundle.json");
|
||||
// }
|
||||
|
||||
// @Test - The test data for the denominator exclusion appears to be invalid
|
||||
// public void test_EXM124_82000() throws IOException {
|
||||
// this.testMeasureBundle("r4/connectathon/EXM124-8.2.000-bundle.json");
|
||||
// }
|
||||
|
||||
@Test
|
||||
public void test_EXM124_90000() throws IOException {
|
||||
this.testMeasureBundle("r4/connectathon/EXM124-9.0.000-bundle.json");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExm130PatientDenominator() throws IOException {
|
||||
loadBundles();
|
||||
MeasureReport report = myMeasureOperationsProvider.evaluateMeasure(measureId, periodStart, periodEnd, null, "patient",
|
||||
"denom-EXM130", null, null, null, null, null, null);
|
||||
// Assert it worked
|
||||
assertThat(report.getGroup(), hasSize(1));
|
||||
assertEquals(new BigDecimal("0.0"), report.getGroupFirstRep().getMeasureScore().getValue());
|
||||
public void test_EXM125_73000() throws IOException {
|
||||
this.testMeasureBundle("r4/connectathon/EXM125-7.3.000-bundle.json");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void test_EXM130_73000() throws IOException {
|
||||
this.testMeasureBundle("r4/connectathon/EXM130-7.3.000-bundle.json");
|
||||
}
|
||||
|
||||
// @Test - No test results in this bundle yet
|
||||
// public void test_EXM149_92000() throws IOException {
|
||||
// this.testMeasureBundle("r4/connectathon/EXM149-9.2.000-bundle.json");
|
||||
// }
|
||||
|
||||
// @Test - Missing Adult outpatient encounters Library
|
||||
// public void test_EXM153_92000() throws IOException {
|
||||
// this.testMeasureBundle("r4/connectathon/EXM153-9.2.000-bundle.json");
|
||||
// }
|
||||
|
||||
// @Test - Missing Encounter data for Numerator test
|
||||
// public void test_EXM347_43000() throws IOException {
|
||||
// this.testMeasureBundle("r4/connectathon/EXM347-4.3.000-bundle.json");
|
||||
// }
|
||||
|
||||
// @Test - No test results in this bundle yet
|
||||
// public void test_EXM349_210000() throws IOException {
|
||||
// this.testMeasureBundle("r4/connectathon/EXM349-2.10.000-bundle.json");
|
||||
// }
|
||||
|
||||
// @Test - No test results in this bundle yet
|
||||
// public void test_EXM506_22000() throws IOException {
|
||||
// this.testMeasureBundle("r4/connectathon/EXM506-2.2.000-bundle.json");
|
||||
// }
|
||||
|
||||
// @Test - No test results in this bundle yet
|
||||
// public void test_EXM529_10000() throws IOException {
|
||||
// this.testMeasureBundle("r4/connectathon/EXM529-1.0.000-bundle.json");
|
||||
// }
|
||||
}
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -48,7 +48,6 @@ import javax.persistence.SequenceGenerator;
|
|||
import javax.persistence.Table;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||
import static org.apache.commons.lang3.StringUtils.left;
|
||||
|
||||
//@formatter:off
|
||||
@Embeddable
|
||||
|
@ -289,6 +288,7 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
|
|||
hashPrefixLength = 0;
|
||||
}
|
||||
|
||||
return hash(thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, left(theValueNormalized, hashPrefixLength));
|
||||
return hash(thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, StringUtil.left(theValueNormalized, hashPrefixLength));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
|||
import ca.uhn.fhir.jpa.searchparam.JpaRuntimeSearchParam;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.util.SearchParameterUtil;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
|
@ -47,19 +48,19 @@ import org.springframework.beans.factory.annotation.Autowired;
|
|||
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.annotation.PreDestroy;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
|
||||
public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceChangeListener {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(SearchParamRegistryImpl.class);
|
||||
private static final int MAX_MANAGED_PARAM_COUNT = 10000;
|
||||
private static long REFRESH_INTERVAL = DateUtils.MILLIS_PER_HOUR;
|
||||
private static final long REFRESH_INTERVAL = DateUtils.MILLIS_PER_HOUR;
|
||||
|
||||
@Autowired
|
||||
private ModelConfig myModelConfig;
|
||||
|
@ -74,7 +75,7 @@ public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceC
|
|||
|
||||
private volatile ReadOnlySearchParamCache myBuiltInSearchParams;
|
||||
private volatile IPhoneticEncoder myPhoneticEncoder;
|
||||
private volatile JpaSearchParamCache myJpaSearchParamCache = new JpaSearchParamCache();
|
||||
private final JpaSearchParamCache myJpaSearchParamCache = new JpaSearchParamCache();
|
||||
private volatile RuntimeSearchParamCache myActiveSearchParams;
|
||||
|
||||
@Autowired
|
||||
|
@ -282,7 +283,15 @@ public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceC
|
|||
|
||||
@Override
|
||||
public void handleInit(Collection<IIdType> theResourceIds) {
|
||||
List<IBaseResource> searchParams = theResourceIds.stream().map(id -> mySearchParamProvider.read(id)).collect(Collectors.toList());
|
||||
List<IBaseResource> searchParams = new ArrayList<>();
|
||||
for (IIdType id : theResourceIds) {
|
||||
try {
|
||||
IBaseResource searchParam = mySearchParamProvider.read(id);
|
||||
searchParams.add(searchParam);
|
||||
} catch (ResourceNotFoundException e) {
|
||||
ourLog.warn("SearchParameter {} not found. Excluding from list of active search params.", id);
|
||||
}
|
||||
}
|
||||
initializeActiveSearchParams(searchParams);
|
||||
}
|
||||
|
||||
|
|
|
@ -17,8 +17,11 @@ import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
|||
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult;
|
||||
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryResourceMatcher;
|
||||
import ca.uhn.fhir.jpa.searchparam.matcher.SearchParamMatcher;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.server.SimpleBundleProvider;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.hl7.fhir.r4.model.Enumerations;
|
||||
import org.hl7.fhir.r4.model.SearchParameter;
|
||||
|
@ -62,10 +65,10 @@ public class SearchParamRegistryImplTest {
|
|||
private static final ReadOnlySearchParamCache ourBuiltInSearchParams = ReadOnlySearchParamCache.fromFhirContext(ourFhirContext);
|
||||
|
||||
public static final int TEST_SEARCH_PARAMS = 3;
|
||||
private static List<ResourceTable> ourEntities;
|
||||
private static ResourceVersionMap ourResourceVersionMap;
|
||||
private static final List<ResourceTable> ourEntities;
|
||||
private static final ResourceVersionMap ourResourceVersionMap;
|
||||
private static int ourLastId;
|
||||
private static int ourBuiltinPatientSearchParamCount;
|
||||
private static final int ourBuiltinPatientSearchParamCount;
|
||||
|
||||
static {
|
||||
ourEntities = new ArrayList<>();
|
||||
|
@ -172,6 +175,24 @@ public class SearchParamRegistryImplTest {
|
|||
mySearchParamRegistry.resetForUnitTest();
|
||||
}
|
||||
|
||||
@Test
|
||||
void handleInit() {
|
||||
assertEquals(25, mySearchParamRegistry.getActiveSearchParams("Patient").size());
|
||||
|
||||
IdDt idBad = new IdDt("SearchParameter/bad");
|
||||
when(mySearchParamProvider.read(idBad)).thenThrow(new ResourceNotFoundException("id bad"));
|
||||
|
||||
IdDt idGood = new IdDt("SearchParameter/good");
|
||||
SearchParameter goodSearchParam = buildSearchParameter(Enumerations.PublicationStatus.ACTIVE);
|
||||
when(mySearchParamProvider.read(idGood)).thenReturn(goodSearchParam);
|
||||
|
||||
List<IIdType> idList = new ArrayList<>();
|
||||
idList.add(idBad);
|
||||
idList.add(idGood);
|
||||
mySearchParamRegistry.handleInit(idList);
|
||||
assertEquals(26, mySearchParamRegistry.getActiveSearchParams("Patient").size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRefreshAfterExpiry() {
|
||||
mySearchParamRegistry.requestRefresh();
|
||||
|
|
|
@ -66,7 +66,7 @@ public class TestR4Config extends BaseJavaConfigR4 {
|
|||
retVal.setWebsocketContextPath("/websocketR4");
|
||||
retVal.setAllowContainsSearches(true);
|
||||
retVal.setAllowMultipleDelete(true);
|
||||
retVal.setAllowInlineMatchUrlReferences(true);
|
||||
retVal.setAllowInlineMatchUrlReferences(false);
|
||||
retVal.setAllowExternalReferences(true);
|
||||
retVal.getTreatBaseUrlsAsLocal().add("http://hapi.fhir.org/baseR4");
|
||||
retVal.getTreatBaseUrlsAsLocal().add("https://hapi.fhir.org/baseR4");
|
||||
|
|
|
@ -22,7 +22,9 @@ package ca.uhn.fhir.mdm.rules.config;
|
|||
|
||||
import ca.uhn.fhir.context.ConfigurationException;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.fhirpath.IFhirPath;
|
||||
import ca.uhn.fhir.mdm.api.MdmConstants;
|
||||
import ca.uhn.fhir.mdm.api.IMdmRuleValidator;
|
||||
import ca.uhn.fhir.mdm.rules.json.MdmFieldMatchJson;
|
||||
|
@ -33,7 +35,9 @@ import ca.uhn.fhir.mdm.rules.json.MdmSimilarityJson;
|
|||
import ca.uhn.fhir.parser.DataFormatException;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRetriever;
|
||||
import ca.uhn.fhir.util.FhirTerser;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
@ -51,16 +55,19 @@ public class MdmRuleValidator implements IMdmRuleValidator {
|
|||
|
||||
private final FhirContext myFhirContext;
|
||||
private final ISearchParamRetriever mySearchParamRetriever;
|
||||
private final Class<? extends IBaseResource> myPatientClass;
|
||||
private final Class<? extends IBaseResource> myPractitionerClass;
|
||||
private final FhirTerser myTerser;
|
||||
private final IFhirPath myFhirPath;
|
||||
|
||||
@Autowired
|
||||
public MdmRuleValidator(FhirContext theFhirContext, ISearchParamRetriever theSearchParamRetriever) {
|
||||
myFhirContext = theFhirContext;
|
||||
myPatientClass = theFhirContext.getResourceDefinition("Patient").getImplementingClass();
|
||||
myPractitionerClass = theFhirContext.getResourceDefinition("Practitioner").getImplementingClass();
|
||||
myTerser = myFhirContext.newTerser();
|
||||
if (myFhirContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3)) {
|
||||
myFhirPath = myFhirContext.newFhirPath();
|
||||
} else {
|
||||
ourLog.debug("Skipping FHIRPath validation as DSTU2 does not support FHIR");
|
||||
myFhirPath = null;
|
||||
}
|
||||
mySearchParamRetriever = theSearchParamRetriever;
|
||||
}
|
||||
|
||||
|
@ -158,20 +165,48 @@ public class MdmRuleValidator implements IMdmRuleValidator {
|
|||
}
|
||||
|
||||
private void validateFieldPathForType(String theResourceType, MdmFieldMatchJson theFieldMatch) {
|
||||
ourLog.debug(" validating resource {} for {} ", theResourceType, theFieldMatch.getResourcePath());
|
||||
ourLog.debug("Validating resource {} for {} ", theResourceType, theFieldMatch.getResourcePath());
|
||||
|
||||
try {
|
||||
if (theFieldMatch.getFhirPath() != null && theFieldMatch.getResourcePath() != null) {
|
||||
throw new ConfigurationException("MatchField [" +
|
||||
theFieldMatch.getName() +
|
||||
"] resourceType [" +
|
||||
theFieldMatch.getResourceType() +
|
||||
"] has defined both a resourcePath and a fhirPath. You must define one of the two.");
|
||||
}
|
||||
|
||||
if (theFieldMatch.getResourcePath() == null && theFieldMatch.getFhirPath() == null) {
|
||||
throw new ConfigurationException("MatchField [" +
|
||||
theFieldMatch.getName() +
|
||||
"] resourceType [" +
|
||||
theFieldMatch.getResourceType() +
|
||||
"] has defined neither a resourcePath or a fhirPath. You must define one of the two.");
|
||||
}
|
||||
|
||||
if (theFieldMatch.getResourcePath() != null) {
|
||||
try { //Try to validate the struture definition path
|
||||
RuntimeResourceDefinition resourceDefinition = myFhirContext.getResourceDefinition(theResourceType);
|
||||
Class<? extends IBaseResource> implementingClass = resourceDefinition.getImplementingClass();
|
||||
String path = theResourceType + "." + theFieldMatch.getResourcePath();
|
||||
myTerser.getDefinition(implementingClass, path);
|
||||
} catch (DataFormatException | ConfigurationException | ClassCastException e) {
|
||||
//Fallback to attempting to FHIRPath evaluate it.
|
||||
throw new ConfigurationException("MatchField " +
|
||||
theFieldMatch.getName() +
|
||||
" resourceType " +
|
||||
theFieldMatch.getResourceType() +
|
||||
" has invalid path '" + theFieldMatch.getResourcePath() + "'. " +
|
||||
e.getMessage());
|
||||
" has invalid path '" + theFieldMatch.getResourcePath() + "'. " + e.getMessage());
|
||||
}
|
||||
} else { //Try to validate the FHIRPath
|
||||
try {
|
||||
if (myFhirPath != null) {
|
||||
myFhirPath.parse(theResourceType + "." + theFieldMatch.getFhirPath());
|
||||
} else {
|
||||
ourLog.debug("Can't validate FHIRPath expression due to a lack of IFhirPath object.");
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new ConfigurationException("MatchField [" + theFieldMatch.getName() + "] resourceType [" + theFieldMatch.getResourceType() + "] has failed FHIRPath evaluation. " + e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -44,9 +44,12 @@ public class MdmFieldMatchJson implements IModelJson {
|
|||
@JsonProperty(value = "resourceType", required = true)
|
||||
String myResourceType;
|
||||
|
||||
@JsonProperty(value = "resourcePath", required = true)
|
||||
@JsonProperty(value = "resourcePath", required = false)
|
||||
String myResourcePath;
|
||||
|
||||
@JsonProperty(value = "fhirPath", required = false)
|
||||
String myFhirPath;
|
||||
|
||||
@JsonProperty(value = "matcher", required = false)
|
||||
MdmMatcherJson myMatcher;
|
||||
|
||||
|
@ -112,4 +115,13 @@ public class MdmFieldMatchJson implements IModelJson {
|
|||
}
|
||||
throw new InternalErrorException("Field Match " + myName + " has neither a matcher nor a similarity.");
|
||||
}
|
||||
|
||||
public String getFhirPath() {
|
||||
return myFhirPath;
|
||||
}
|
||||
|
||||
public MdmFieldMatchJson setFhirPath(String theFhirPath) {
|
||||
myFhirPath = theFhirPath;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ package ca.uhn.fhir.mdm.rules.svc;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.fhirpath.IFhirPath;
|
||||
import ca.uhn.fhir.mdm.api.MdmMatchEvaluation;
|
||||
import ca.uhn.fhir.mdm.rules.json.MdmFieldMatchJson;
|
||||
import ca.uhn.fhir.mdm.rules.json.MdmRulesJson;
|
||||
|
@ -43,16 +44,20 @@ public class MdmResourceFieldMatcher {
|
|||
private final MdmFieldMatchJson myMdmFieldMatchJson;
|
||||
private final String myResourceType;
|
||||
private final String myResourcePath;
|
||||
private final String myFhirPath;
|
||||
private final MdmRulesJson myMdmRulesJson;
|
||||
private final String myName;
|
||||
private final boolean myIsFhirPathExpression;
|
||||
|
||||
public MdmResourceFieldMatcher(FhirContext theFhirContext, MdmFieldMatchJson theMdmFieldMatchJson, MdmRulesJson theMdmRulesJson) {
|
||||
myFhirContext = theFhirContext;
|
||||
myMdmFieldMatchJson = theMdmFieldMatchJson;
|
||||
myResourceType = theMdmFieldMatchJson.getResourceType();
|
||||
myResourcePath = theMdmFieldMatchJson.getResourcePath();
|
||||
myFhirPath = theMdmFieldMatchJson.getFhirPath();
|
||||
myName = theMdmFieldMatchJson.getName();
|
||||
myMdmRulesJson = theMdmRulesJson;
|
||||
myIsFhirPathExpression = myFhirPath != null;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -71,9 +76,18 @@ public class MdmResourceFieldMatcher {
|
|||
validate(theLeftResource);
|
||||
validate(theRightResource);
|
||||
|
||||
FhirTerser terser = myFhirContext.newTerser();
|
||||
List<IBase> leftValues = terser.getValues(theLeftResource, myResourcePath, IBase.class);
|
||||
List<IBase> rightValues = terser.getValues(theRightResource, myResourcePath, IBase.class);
|
||||
List<IBase> leftValues;
|
||||
List<IBase> rightValues;
|
||||
|
||||
if (myIsFhirPathExpression) {
|
||||
IFhirPath fhirPath = myFhirContext.newFhirPath();
|
||||
leftValues = fhirPath.evaluate(theLeftResource, myFhirPath, IBase.class);
|
||||
rightValues = fhirPath.evaluate(theRightResource, myFhirPath, IBase.class);
|
||||
} else {
|
||||
FhirTerser fhirTerser = myFhirContext.newTerser();
|
||||
leftValues = fhirTerser.getValues(theLeftResource, myResourcePath, IBase.class);
|
||||
rightValues = fhirTerser.getValues(theRightResource, myResourcePath, IBase.class);
|
||||
}
|
||||
return match(leftValues, rightValues);
|
||||
}
|
||||
|
||||
|
|
|
@ -144,6 +144,7 @@ public class MdmResourceMatcherSvc {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
|
||||
private boolean isValidResourceType(String theResourceType, String theFieldComparatorType) {
|
||||
return (
|
||||
theFieldComparatorType.equalsIgnoreCase(MdmConstants.ALL_RESOURCE_SEARCH_PARAM_TYPE)
|
||||
|
|
|
@ -71,6 +71,36 @@ public class MdmRuleValidatorTest extends BaseR4Test {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMatcherBadFhirPath() throws IOException {
|
||||
try {
|
||||
setMdmRuleJson("bad-rules-bad-fhirpath.json");
|
||||
fail();
|
||||
} catch (ConfigurationException e) {
|
||||
assertThat(e.getMessage(), startsWith("MatchField [given-name] resourceType [Patient] has failed FHIRPath evaluation. Error in ?? at 1, 1: The name blurst is not a valid function name"));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBadRulesMissingBothPaths() throws IOException {
|
||||
try {
|
||||
setMdmRuleJson("bad-rules-no-path.json");
|
||||
fail();
|
||||
} catch (ConfigurationException e) {
|
||||
assertThat(e.getMessage(), startsWith("MatchField [given-name] resourceType [Patient] has defined neither a resourcePath or a fhirPath. You must define one of the two."));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBadRulesBothPathsFilled() throws IOException {
|
||||
try {
|
||||
setMdmRuleJson("bad-rules-both-paths.json");
|
||||
fail();
|
||||
} catch (ConfigurationException e) {
|
||||
assertThat(e.getMessage(), startsWith("MatchField [given-name] resourceType [Patient] has defined both a resourcePath and a fhirPath. You must define one of the two."));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMatcherBadSearchParam() throws IOException {
|
||||
try {
|
||||
|
|
|
@ -16,6 +16,7 @@ import java.util.Arrays;
|
|||
|
||||
public abstract class BaseMdmRulesR4Test extends BaseR4Test {
|
||||
public static final String PATIENT_GIVEN = "patient-given";
|
||||
public static final String PATIENT_GIVEN_FIRST = "patient-given-first";
|
||||
public static final String PATIENT_FAMILY = "patient-last";
|
||||
|
||||
public static final double NAME_THRESHOLD = 0.8;
|
||||
|
@ -36,6 +37,7 @@ public abstract class BaseMdmRulesR4Test extends BaseR4Test {
|
|||
.setResourceType("Patient")
|
||||
.setResourcePath("name.given")
|
||||
.setSimilarity(new MdmSimilarityJson().setAlgorithm(MdmSimilarityEnum.COSINE).setMatchThreshold(NAME_THRESHOLD));
|
||||
|
||||
myBothNameFields = String.join(",", PATIENT_GIVEN, PATIENT_FAMILY);
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,116 @@
|
|||
package ca.uhn.fhir.mdm.rules.svc;
|
||||
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.mdm.api.MdmMatchOutcome;
|
||||
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
|
||||
import ca.uhn.fhir.mdm.rules.json.MdmFieldMatchJson;
|
||||
import ca.uhn.fhir.mdm.rules.json.MdmMatcherJson;
|
||||
import ca.uhn.fhir.mdm.rules.json.MdmRulesJson;
|
||||
import ca.uhn.fhir.mdm.rules.matcher.MdmMatcherEnum;
|
||||
import org.hl7.fhir.r4.model.HumanName;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class FhirPathResourceMatcherR4Test extends BaseMdmRulesR4Test {
|
||||
private static final String MATCH_FIELDS = PATIENT_GIVEN_FIRST + "," + PATIENT_GIVEN;
|
||||
private Patient myLeft;
|
||||
private Patient myRight;
|
||||
|
||||
@Override
|
||||
@BeforeEach
|
||||
public void before() {
|
||||
super.before();
|
||||
when(mySearchParamRetriever.getActiveSearchParam("Patient", "birthdate")).thenReturn(mock(RuntimeSearchParam.class));
|
||||
when(mySearchParamRetriever.getActiveSearchParam("Patient", "identifier")).thenReturn(mock(RuntimeSearchParam.class));
|
||||
when(mySearchParamRetriever.getActiveSearchParam("Patient", "active")).thenReturn(mock(RuntimeSearchParam.class));
|
||||
|
||||
{
|
||||
myLeft = new Patient();
|
||||
HumanName name = myLeft.addName();
|
||||
name.addGiven("Gary");
|
||||
name.addGiven("John");
|
||||
myLeft.setId("Patient/1");
|
||||
}
|
||||
{
|
||||
myRight = new Patient();
|
||||
HumanName name = myRight.addName();
|
||||
name.addGiven("John");
|
||||
name.addGiven("Gary");
|
||||
myRight.setId("Patient/2");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testFhirPathOrderedMatches() {
|
||||
MdmResourceMatcherSvc matcherSvc = buildMatcher(buildOrderedGivenNameRules(MdmMatcherEnum.STRING));
|
||||
|
||||
myLeft = new Patient();
|
||||
HumanName name = myLeft.addName();
|
||||
name.addGiven("Gary");
|
||||
name.addGiven("John");
|
||||
myLeft.setId("Patient/1");
|
||||
|
||||
myRight = new Patient();
|
||||
HumanName name2 = myRight.addName();
|
||||
name2.addGiven("John");
|
||||
name2.addGiven("Gary");
|
||||
myRight.setId("Patient/2");
|
||||
|
||||
MdmMatchOutcome result = matcherSvc.match(myLeft, myRight);
|
||||
assertMatchResult(MdmMatchResultEnum.NO_MATCH, 0L, 0.0, false, false, result);
|
||||
|
||||
myRight = new Patient();
|
||||
name = myRight.addName();
|
||||
name.addGiven("John");
|
||||
name.addGiven("Gary");
|
||||
myRight.setId("Patient/2");
|
||||
|
||||
myLeft = new Patient();
|
||||
name2 = myLeft.addName();
|
||||
name2.addGiven("Frank");
|
||||
name2.addGiven("Gary");
|
||||
myLeft.setId("Patient/1");
|
||||
|
||||
result = matcherSvc.match(myLeft, myRight);
|
||||
assertMatchResult(MdmMatchResultEnum.POSSIBLE_MATCH, 1L, 1.0, false, false, result);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStringMatchResult() {
|
||||
MdmResourceMatcherSvc matcherSvc = buildMatcher(buildOrderedGivenNameRules(MdmMatcherEnum.STRING));
|
||||
MdmMatchOutcome result = matcherSvc.match(myLeft, myRight);
|
||||
assertMatchResult(MdmMatchResultEnum.NO_MATCH, 0L, 0.0, false, false, result);
|
||||
}
|
||||
|
||||
protected MdmRulesJson buildOrderedGivenNameRules(MdmMatcherEnum theMatcherEnum) {
|
||||
MdmFieldMatchJson firstGivenNameMatchField = new MdmFieldMatchJson()
|
||||
.setName(PATIENT_GIVEN_FIRST)
|
||||
.setResourceType("Patient")
|
||||
.setFhirPath("name.given.first()")
|
||||
.setMatcher(new MdmMatcherJson().setAlgorithm(theMatcherEnum));
|
||||
|
||||
MdmFieldMatchJson secondGivenNameMatchField = new MdmFieldMatchJson()
|
||||
.setName(PATIENT_GIVEN)
|
||||
.setResourceType("Patient")
|
||||
.setFhirPath("name.given[1]")
|
||||
.setMatcher(new MdmMatcherJson().setAlgorithm(theMatcherEnum));
|
||||
|
||||
MdmRulesJson retval = new MdmRulesJson();
|
||||
retval.setVersion("test version");
|
||||
retval.addMatchField(secondGivenNameMatchField);
|
||||
retval.addMatchField(firstGivenNameMatchField);
|
||||
retval.setMdmTypes(Arrays.asList("Patient"));
|
||||
retval.putMatchResult(MATCH_FIELDS, MdmMatchResultEnum.MATCH);
|
||||
retval.putMatchResult(PATIENT_GIVEN_FIRST, MdmMatchResultEnum.POSSIBLE_MATCH);
|
||||
retval.putMatchResult(PATIENT_GIVEN, MdmMatchResultEnum.POSSIBLE_MATCH);
|
||||
return retval;
|
||||
}
|
||||
}
|
|
@ -30,7 +30,6 @@ public class MdmResourceFieldMatcherR4Test extends BaseMdmRulesR4Test {
|
|||
@BeforeEach
|
||||
public void before() {
|
||||
super.before();
|
||||
|
||||
myComparator = new MdmResourceFieldMatcher(ourFhirContext, myGivenNameMatchField, myMdmRulesJson);
|
||||
myJohn = buildJohn();
|
||||
myJohny = buildJohny();
|
||||
|
@ -91,22 +90,6 @@ public class MdmResourceFieldMatcherR4Test extends BaseMdmRulesR4Test {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBadPath() {
|
||||
try {
|
||||
MdmFieldMatchJson matchField = new MdmFieldMatchJson()
|
||||
.setName("patient-foo")
|
||||
.setResourceType("Patient")
|
||||
.setResourcePath("foo")
|
||||
.setSimilarity(new MdmSimilarityJson().setAlgorithm(MdmSimilarityEnum.COSINE).setMatchThreshold(NAME_THRESHOLD));
|
||||
MdmResourceFieldMatcher comparator = new MdmResourceFieldMatcher(ourFhirContext, matchField, myMdmRulesJson);
|
||||
comparator.match(myJohn, myJohny);
|
||||
fail();
|
||||
} catch (DataFormatException e) {
|
||||
assertThat(e.getMessage(), startsWith("Unknown child name 'foo' in element Patient"));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMatch() {
|
||||
assertTrue(myComparator.match(myJohn, myJohny).match);
|
||||
|
|
|
@ -7,12 +7,15 @@ import ca.uhn.fhir.mdm.rules.json.MdmFieldMatchJson;
|
|||
import ca.uhn.fhir.mdm.rules.json.MdmMatcherJson;
|
||||
import ca.uhn.fhir.mdm.rules.json.MdmRulesJson;
|
||||
import ca.uhn.fhir.mdm.rules.matcher.MdmMatcherEnum;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import org.hl7.fhir.r4.model.HumanName;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
{
|
||||
"version": "1",
|
||||
"mdmTypes": ["Patient", "Practitioner", "Medication"],
|
||||
"candidateSearchParams" : [],
|
||||
"candidateFilterSearchParams" : [],
|
||||
"matchFields" : [ {
|
||||
"name" : "given-name",
|
||||
"resourceType" : "Patient",
|
||||
"fhirPath" : "name.given.blurst()",
|
||||
"matcher" : {
|
||||
"algorithm": "STRING",
|
||||
"exact" : true
|
||||
}
|
||||
}],
|
||||
"matchResultMap" : {
|
||||
"given-name" : "POSSIBLE_MATCH"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
{
|
||||
"version": "1",
|
||||
"mdmTypes": ["Patient", "Practitioner", "Medication"],
|
||||
"candidateSearchParams" : [],
|
||||
"candidateFilterSearchParams" : [],
|
||||
"matchFields" : [ {
|
||||
"name" : "given-name",
|
||||
"resourceType" : "Patient",
|
||||
"resourcePath" : "name.first",
|
||||
"fhirPath" : "name.given.first()",
|
||||
"matcher" : {
|
||||
"algorithm": "STRING",
|
||||
"exact" : true
|
||||
}
|
||||
}],
|
||||
"matchResultMap" : {
|
||||
"given-name" : "POSSIBLE_MATCH"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
{
|
||||
"version": "1",
|
||||
"mdmTypes": ["Patient", "Practitioner", "Medication"],
|
||||
"candidateSearchParams" : [],
|
||||
"candidateFilterSearchParams" : [],
|
||||
"matchFields" : [ {
|
||||
"name" : "given-name",
|
||||
"resourceType" : "Patient",
|
||||
"matcher" : {
|
||||
"algorithm": "STRING",
|
||||
"exact" : true
|
||||
}
|
||||
}],
|
||||
"matchResultMap" : {
|
||||
"given-name" : "POSSIBLE_MATCH"
|
||||
}
|
||||
}
|
|
@ -144,7 +144,7 @@ public interface ITestDataBuilder {
|
|||
return createResource("Organization", theModifiers);
|
||||
}
|
||||
|
||||
default IIdType createResource(String theResourceType, Consumer<IBaseResource>[] theModifiers) {
|
||||
default IIdType createResource(String theResourceType, Consumer<IBaseResource>... theModifiers) {
|
||||
IBaseResource resource = buildResource(theResourceType, theModifiers);
|
||||
|
||||
if (isNotBlank(resource.getIdElement().getValue())) {
|
||||
|
|
|
@ -16389,7 +16389,7 @@
|
|||
<severity value="error"></severity>
|
||||
<human value="Only 'choice' and 'open-choice' items can have answerValueSet"></human>
|
||||
<expression value="(type ='choice' or type = 'open-choice' or type = 'decimal' or type = 'integer' or type = 'date' or type = 'dateTime' or type = 'time' or type = 'string' or type = 'quantity') or (answerValueSet.empty() and answerOption.empty())"></expression>
|
||||
<xpath value="f:type/@value=('choice','open-choice','decimal','integer','date','dateTime','time','string','quantity',') or not(f:answerOption or f:answerValueSet)"></xpath>
|
||||
<xpath value="f:type/@value=('choice','open-choice','decimal','integer','date','dateTime','time','string','quantity') or not(f:answerOption or f:answerValueSet)"></xpath>
|
||||
</constraint>
|
||||
<constraint>
|
||||
<key value="que-6"></key>
|
||||
|
|
|
@ -285286,7 +285286,7 @@
|
|||
<severity value="error"></severity>
|
||||
<human value="Only 'choice' and 'open-choice' items can have answerValueSet"></human>
|
||||
<expression value="(type ='choice' or type = 'open-choice' or type = 'decimal' or type = 'integer' or type = 'date' or type = 'dateTime' or type = 'time' or type = 'string' or type = 'quantity') or (answerValueSet.empty() and answerOption.empty())"></expression>
|
||||
<xpath value="f:type/@value=('choice','open-choice','decimal','integer','date','dateTime','time','string','quantity',') or not(f:answerOption or f:answerValueSet)"></xpath>
|
||||
<xpath value="f:type/@value=('choice','open-choice','decimal','integer','date','dateTime','time','string','quantity') or not(f:answerOption or f:answerValueSet)"></xpath>
|
||||
</constraint>
|
||||
<constraint>
|
||||
<key value="que-6"></key>
|
||||
|
@ -287200,7 +287200,7 @@
|
|||
<severity value="error"></severity>
|
||||
<human value="Only 'choice' and 'open-choice' items can have answerValueSet"></human>
|
||||
<expression value="(type ='choice' or type = 'open-choice' or type = 'decimal' or type = 'integer' or type = 'date' or type = 'dateTime' or type = 'time' or type = 'string' or type = 'quantity') or (answerValueSet.empty() and answerOption.empty())"></expression>
|
||||
<xpath value="f:type/@value=('choice','open-choice','decimal','integer','date','dateTime','time','string','quantity',') or not(f:answerOption or f:answerValueSet)"></xpath>
|
||||
<xpath value="f:type/@value=('choice','open-choice','decimal','integer','date','dateTime','time','string','quantity') or not(f:answerOption or f:answerValueSet)"></xpath>
|
||||
</constraint>
|
||||
<constraint>
|
||||
<key value="que-4"></key>
|
||||
|
|
|
@ -35,7 +35,7 @@
|
|||
<sch:assert test="not(f:type/@value=('group', 'display') and f:*[starts-with(local-name(.), 'initial')])">Read-only can't be specified for "display" items (inherited)</sch:assert>
|
||||
<sch:assert test="not(f:type/@value=('group', 'display') and f:*[starts-with(local-name(.), 'initial')])">Initial values can't be specified for groups or display items (inherited)</sch:assert>
|
||||
<sch:assert test="not(f:type/@value='display' and (f:required or f:repeats))">Required and repeat aren't permitted for display items (inherited)</sch:assert>
|
||||
<sch:assert test="f:type/@value=('choice','open-choice','decimal','integer','date','dateTime','time','string','quantity',') or not(f:answerOption or f:answerValueSet)">Only 'choice' and 'open-choice' items can have answerValueSet (inherited)</sch:assert>
|
||||
<sch:assert test="f:type/@value=('choice','open-choice','decimal','integer','date','dateTime','time','string','quantity') or not(f:answerOption or f:answerValueSet)">Only 'choice' and 'open-choice' items can have answerValueSet (inherited)</sch:assert>
|
||||
<sch:assert test="not(f:answerValueSet and f:answerOption)">A question cannot have both answerOption and answerValueSet (inherited)</sch:assert>
|
||||
<sch:assert test="not(f:type/@value='display' and f:code)">Display items cannot have a "code" asserted (inherited)</sch:assert>
|
||||
<sch:assert test="f:type/@value=('boolean', 'decimal', 'integer', 'open-choice', 'string', 'text', 'url') or not(f:maxLength)">Maximum length can only be declared for simple question types (inherited)</sch:assert>
|
||||
|
|
|
@ -41,7 +41,7 @@
|
|||
<sch:assert test="not(f:type/@value=('group', 'display') and f:*[starts-with(local-name(.), 'initial')])">Read-only can't be specified for "display" items (inherited)</sch:assert>
|
||||
<sch:assert test="not(f:type/@value=('group', 'display') and f:*[starts-with(local-name(.), 'initial')])">Initial values can't be specified for groups or display items (inherited)</sch:assert>
|
||||
<sch:assert test="not(f:type/@value='display' and (f:required or f:repeats))">Required and repeat aren't permitted for display items (inherited)</sch:assert>
|
||||
<sch:assert test="f:type/@value=('choice','open-choice','decimal','integer','date','dateTime','time','string','quantity',') or not(f:answerOption or f:answerValueSet)">Only 'choice' items can have answerValueSet (inherited)</sch:assert>
|
||||
<sch:assert test="f:type/@value=('choice','open-choice','decimal','integer','date','dateTime','time','string','quantity') or not(f:answerOption or f:answerValueSet)">Only 'choice' items can have answerValueSet (inherited)</sch:assert>
|
||||
<sch:assert test="not(f:answerValueSet and f:answerOption)">A question cannot have both answerOption and answerValueSet (inherited)</sch:assert>
|
||||
<sch:assert test="not(f:type/@value='display' and f:code)">Display items cannot have a "code" asserted (inherited)</sch:assert>
|
||||
<sch:assert test="f:type/@value=('boolean', 'decimal', 'integer', 'open-choice', 'string', 'text', 'url') or not(f:maxLength)">Maximum length can only be declared for simple question types (inherited)</sch:assert>
|
||||
|
|
|
@ -10667,7 +10667,7 @@
|
|||
<sch:assert test="not(f:type/@value=('group', 'display') and f:*[starts-with(local-name(.), 'initial')])">que-9: Read-only can't be specified for "display" items</sch:assert>
|
||||
<sch:assert test="not(f:type/@value=('group', 'display') and f:*[starts-with(local-name(.), 'initial')])">que-8: Initial values can't be specified for groups or display items</sch:assert>
|
||||
<sch:assert test="not(f:type/@value='display' and (f:required or f:repeats))">que-6: Required and repeat aren't permitted for display items</sch:assert>
|
||||
<sch:assert test="f:type/@value=('choice','open-choice','decimal','integer','date','dateTime','time','string','quantity',') or not(f:answerOption or f:answerValueSet)">que-5: Only 'choice' and 'open-choice' items can have answerValueSet</sch:assert>
|
||||
<sch:assert test="f:type/@value=('choice','open-choice','decimal','integer','date','dateTime','time','string','quantity') or not(f:answerOption or f:answerValueSet)">que-5: Only 'choice' and 'open-choice' items can have answerValueSet</sch:assert>
|
||||
<sch:assert test="not(f:answerValueSet and f:answerOption)">que-4: A question cannot have both answerOption and answerValueSet</sch:assert>
|
||||
<sch:assert test="not(f:type/@value='display' and f:code)">que-3: Display items cannot have a "code" asserted</sch:assert>
|
||||
<sch:assert test="f:type/@value=('boolean', 'decimal', 'integer', 'open-choice', 'string', 'text', 'url') or not(f:maxLength)">que-10: Maximum length can only be declared for simple question types</sch:assert>
|
||||
|
|
|
@ -78,7 +78,7 @@
|
|||
<sch:assert test="not(f:type/@value=('group', 'display') and f:*[starts-with(local-name(.), 'initial')])">que-9: Read-only can't be specified for "display" items</sch:assert>
|
||||
<sch:assert test="not(f:type/@value=('group', 'display') and f:*[starts-with(local-name(.), 'initial')])">que-8: Initial values can't be specified for groups or display items</sch:assert>
|
||||
<sch:assert test="not(f:type/@value='display' and (f:required or f:repeats))">que-6: Required and repeat aren't permitted for display items</sch:assert>
|
||||
<sch:assert test="f:type/@value=('choice','open-choice','decimal','integer','date','dateTime','time','string','quantity',') or not(f:answerOption or f:answerValueSet)">que-5: Only 'choice' and 'open-choice' items can have answerValueSet</sch:assert>
|
||||
<sch:assert test="f:type/@value=('choice','open-choice','decimal','integer','date','dateTime','time','string','quantity') or not(f:answerOption or f:answerValueSet)">que-5: Only 'choice' and 'open-choice' items can have answerValueSet</sch:assert>
|
||||
<sch:assert test="not(f:answerValueSet and f:answerOption)">que-4: A question cannot have both answerOption and answerValueSet</sch:assert>
|
||||
<sch:assert test="not(f:type/@value='display' and f:code)">que-3: Display items cannot have a "code" asserted</sch:assert>
|
||||
<sch:assert test="f:type/@value=('boolean', 'decimal', 'integer', 'open-choice', 'string', 'text', 'url') or not(f:maxLength)">que-10: Maximum length can only be declared for simple question types</sch:assert>
|
||||
|
|
Loading…
Reference in New Issue