Merge remote-tracking branch 'origin/master' into do-20231213-core-bump-6-2-6
This commit is contained in:
commit
0a91f88934
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -209,7 +209,7 @@ public class GenericClientDstu3IT {
|
|||
@Test
|
||||
public void testClientFailures() {
|
||||
ResponseBody body = mock(ResponseBody.class);
|
||||
when(body.source()).thenThrow(IllegalStateException.class, RuntimeException.class);
|
||||
when(body.byteStream()).thenThrow(IllegalStateException.class, RuntimeException.class);
|
||||
|
||||
myHttpResponse = new Response.Builder()
|
||||
.request(myRequest)
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -105,6 +105,10 @@ public class RequestPartitionId implements IModelJson {
|
|||
return myAllPartitions;
|
||||
}
|
||||
|
||||
public boolean isPartitionCovered(Integer thePartitionId) {
|
||||
return isAllPartitions() || getPartitionIds().contains(thePartitionId);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public LocalDate getPartitionDate() {
|
||||
return myPartitionDate;
|
||||
|
|
|
@ -0,0 +1,70 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR - Core Library
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.model.api;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
public abstract class BaseBatchJobParameters implements IModelJson {
|
||||
/**
|
||||
* A serializable map of key-value pairs that can be
|
||||
* added to any extending job.
|
||||
*/
|
||||
@JsonProperty("userData")
|
||||
private Map<String, Object> myUserData;
|
||||
|
||||
public Map<String, Object> getUserData() {
|
||||
if (myUserData == null) {
|
||||
myUserData = new HashMap<>();
|
||||
}
|
||||
return myUserData;
|
||||
}
|
||||
|
||||
public void setUserData(String theKey, Object theValue) {
|
||||
Validate.isTrue(isNotBlank(theKey), "Invalid key; key must be non-empty, non-null.");
|
||||
if (theValue == null) {
|
||||
getUserData().remove(theKey);
|
||||
} else {
|
||||
Validate.isTrue(
|
||||
validateValue(theValue),
|
||||
String.format(
|
||||
"Invalid data type provided %s", theValue.getClass().getName()));
|
||||
getUserData().put(theKey, theValue);
|
||||
}
|
||||
}
|
||||
|
||||
private boolean validateValue(Object theValue) {
|
||||
if (theValue instanceof Boolean) {
|
||||
return true;
|
||||
}
|
||||
if (theValue instanceof Number) {
|
||||
return true;
|
||||
}
|
||||
if (theValue instanceof String) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
|
@ -43,6 +43,7 @@ import ca.uhn.fhir.rest.api.Constants;
|
|||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.util.BundleUtil;
|
||||
import ca.uhn.fhir.util.FhirTerser;
|
||||
import ca.uhn.fhir.util.MetaUtil;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import com.google.common.base.Charsets;
|
||||
import jakarta.annotation.Nullable;
|
||||
|
@ -217,7 +218,8 @@ public abstract class BaseParser implements IParser {
|
|||
});
|
||||
}
|
||||
|
||||
private String determineReferenceText(IBaseReference theRef, CompositeChildElement theCompositeChildElement) {
|
||||
private String determineReferenceText(
|
||||
IBaseReference theRef, CompositeChildElement theCompositeChildElement, IBaseResource theResource) {
|
||||
IIdType ref = theRef.getReferenceElement();
|
||||
if (isBlank(ref.getIdPart())) {
|
||||
String reference = ref.getValue();
|
||||
|
@ -241,7 +243,7 @@ public abstract class BaseParser implements IParser {
|
|||
.getResourceDefinition(theRef.getResource())
|
||||
.getName());
|
||||
}
|
||||
if (isStripVersionsFromReferences(theCompositeChildElement)) {
|
||||
if (isStripVersionsFromReferences(theCompositeChildElement, theResource)) {
|
||||
reference = refId.toVersionless().getValue();
|
||||
} else {
|
||||
reference = refId.getValue();
|
||||
|
@ -258,12 +260,12 @@ public abstract class BaseParser implements IParser {
|
|||
myContext.getResourceDefinition(theRef.getResource()).getName());
|
||||
}
|
||||
if (isNotBlank(myServerBaseUrl) && StringUtils.equals(myServerBaseUrl, ref.getBaseUrl())) {
|
||||
if (isStripVersionsFromReferences(theCompositeChildElement)) {
|
||||
if (isStripVersionsFromReferences(theCompositeChildElement, theResource)) {
|
||||
return ref.toUnqualifiedVersionless().getValue();
|
||||
}
|
||||
return ref.toUnqualified().getValue();
|
||||
}
|
||||
if (isStripVersionsFromReferences(theCompositeChildElement)) {
|
||||
if (isStripVersionsFromReferences(theCompositeChildElement, theResource)) {
|
||||
return ref.toVersionless().getValue();
|
||||
}
|
||||
return ref.getValue();
|
||||
|
@ -604,7 +606,17 @@ public abstract class BaseParser implements IParser {
|
|||
return myContext.getParserOptions().isOverrideResourceIdWithBundleEntryFullUrl();
|
||||
}
|
||||
|
||||
private boolean isStripVersionsFromReferences(CompositeChildElement theCompositeChildElement) {
|
||||
private boolean isStripVersionsFromReferences(
|
||||
CompositeChildElement theCompositeChildElement, IBaseResource theResource) {
|
||||
|
||||
Set<String> autoVersionReferencesAtPathExtensions =
|
||||
MetaUtil.getAutoVersionReferencesAtPath(theResource.getMeta(), myContext.getResourceType(theResource));
|
||||
|
||||
if (!autoVersionReferencesAtPathExtensions.isEmpty()
|
||||
&& theCompositeChildElement.anyPathMatches(autoVersionReferencesAtPathExtensions)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Boolean stripVersionsFromReferences = myStripVersionsFromReferences;
|
||||
if (stripVersionsFromReferences != null) {
|
||||
return stripVersionsFromReferences;
|
||||
|
@ -811,7 +823,7 @@ public abstract class BaseParser implements IParser {
|
|||
*/
|
||||
if (next instanceof IBaseReference) {
|
||||
IBaseReference nextRef = (IBaseReference) next;
|
||||
String refText = determineReferenceText(nextRef, theCompositeChildElement);
|
||||
String refText = determineReferenceText(nextRef, theCompositeChildElement, theResource);
|
||||
if (!StringUtils.equals(refText, nextRef.getReferenceElement().getValue())) {
|
||||
|
||||
if (retVal == theValues) {
|
||||
|
|
|
@ -219,6 +219,7 @@ public class Constants {
|
|||
public static final String PARAM_TAGS = "_tags";
|
||||
public static final String PARAM_TEXT = "_text";
|
||||
public static final String PARAM_VALIDATE = "_validate";
|
||||
public static final String PARAM_MDM = "_mdm";
|
||||
|
||||
public static final String PARAMQUALIFIER_MISSING = ":missing";
|
||||
public static final String PARAMQUALIFIER_MISSING_FALSE = "false";
|
||||
|
|
|
@ -105,7 +105,7 @@ public final class HapiSystemProperties {
|
|||
}
|
||||
|
||||
/**
|
||||
* This property is used to ensure unit test behaviour is deterministic. It is also used to add extra logging for unit tests.
|
||||
* This property is used to ensure unit test behaviour is deterministic.
|
||||
*/
|
||||
public static void enableUnitTestMode() {
|
||||
System.setProperty(UNIT_TEST_MODE, Boolean.TRUE.toString());
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.hl7.fhir.instance.model.api.IBaseExtension;
|
|||
import org.hl7.fhir.instance.model.api.IBaseHasExtensions;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Predicate;
|
||||
|
@ -177,15 +178,18 @@ public class ExtensionUtil {
|
|||
* pulls out any extensions that have the given theExtensionUrl and a primitive value type,
|
||||
* and returns a list of the string version of the extension values.
|
||||
*/
|
||||
public static List<String> getExtensionPrimitiveValues(IBaseHasExtensions theBase, String theExtensionUrl) {
|
||||
List<String> values = theBase.getExtension().stream()
|
||||
public static List<String> getExtensionPrimitiveValues(IBase theBase, String theExtensionUrl) {
|
||||
if (theBase instanceof IBaseHasExtensions) {
|
||||
return ((IBaseHasExtensions) theBase)
|
||||
.getExtension().stream()
|
||||
.filter(t -> theExtensionUrl.equals(t.getUrl()))
|
||||
.filter(t -> t.getValue() instanceof IPrimitiveType<?>)
|
||||
.map(t -> (IPrimitiveType<?>) t.getValue())
|
||||
.map(IPrimitiveType::getValueAsString)
|
||||
.filter(StringUtils::isNotBlank)
|
||||
.collect(Collectors.toList());
|
||||
return values;
|
||||
}
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -162,6 +162,16 @@ public class HapiExtensions {
|
|||
*/
|
||||
public static final String EXTENSION_SEARCHPARAM_UPLIFT_REFCHAIN =
|
||||
"https://smilecdr.com/fhir/ns/StructureDefinition/searchparameter-uplift-refchain";
|
||||
|
||||
/**
|
||||
* This extension is used to enable auto version references at path for resource instances.
|
||||
* This extension should be of type <code>string</code> and should be
|
||||
* placed on the <code>Resource.meta</code> element.
|
||||
* It is allowed to add multiple extensions with different paths.
|
||||
*/
|
||||
public static final String EXTENSION_AUTO_VERSION_REFERENCES_AT_PATH =
|
||||
"http://hapifhir.io/fhir/StructureDefinition/auto-version-references-at-path";
|
||||
|
||||
/**
|
||||
* This extension is used for "uplifted refchains" on search parameters. See the
|
||||
* HAPI FHIR documentation for an explanation of how these work.
|
||||
|
|
|
@ -35,6 +35,8 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
@ -144,4 +146,12 @@ public class MetaUtil {
|
|||
}
|
||||
sourceElement.setValueAsString(theValue);
|
||||
}
|
||||
|
||||
public static Set<String> getAutoVersionReferencesAtPath(IBaseMetaType theMeta, String theResourceType) {
|
||||
return ExtensionUtil.getExtensionPrimitiveValues(
|
||||
theMeta, HapiExtensions.EXTENSION_AUTO_VERSION_REFERENCES_AT_PATH)
|
||||
.stream()
|
||||
.map(path -> String.format("%s.%s", theResourceType, path))
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,114 @@
|
|||
package ca.uhn.fhir.model.api;
|
||||
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
public class BaseBatchJobParametersTest {
|
||||
|
||||
private static class TestParameters extends BaseBatchJobParameters {
|
||||
|
||||
}
|
||||
|
||||
private static class TestParam {
|
||||
private final Object myTestValue;
|
||||
private final boolean myExpectedToWork;
|
||||
|
||||
public TestParam(Object theValue, boolean theExpected) {
|
||||
myTestValue = theValue;
|
||||
myExpectedToWork = theExpected;
|
||||
}
|
||||
|
||||
public Object getTestValue() {
|
||||
return myTestValue;
|
||||
}
|
||||
|
||||
public boolean isExpectedToWork() {
|
||||
return myExpectedToWork;
|
||||
}
|
||||
}
|
||||
|
||||
private static List<TestParam> parameters() {
|
||||
List<TestParam> params = new ArrayList<>();
|
||||
|
||||
// should pass
|
||||
params.add(new TestParam("string", true));
|
||||
params.add(new TestParam(1, true));
|
||||
params.add(new TestParam(1.1f, true));
|
||||
params.add(new TestParam(1.1d, true));
|
||||
params.add(new TestParam(true, true));
|
||||
params.add(new TestParam(-1, true));
|
||||
|
||||
// should not pass
|
||||
params.add(new TestParam(List.of("strings"), false));
|
||||
params.add(new TestParam(new Object(), false));
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("parameters")
|
||||
public void setUserData_acceptsStringNumberAndBooleansOnly(TestParam theParams) {
|
||||
// setup
|
||||
String key = "key";
|
||||
TestParameters parameters = new TestParameters();
|
||||
Object testValue = theParams.getTestValue();
|
||||
|
||||
// test
|
||||
if (theParams.isExpectedToWork()) {
|
||||
parameters.setUserData(key, testValue);
|
||||
assertFalse(parameters.getUserData().isEmpty());
|
||||
assertEquals(testValue, parameters.getUserData().get(key));
|
||||
} else {
|
||||
try {
|
||||
parameters.setUserData(key, testValue);
|
||||
fail();
|
||||
} catch (IllegalArgumentException ex) {
|
||||
String dataType = testValue.getClass().getName();
|
||||
assertTrue(ex.getMessage().contains("Invalid data type provided " + dataType),
|
||||
ex.getMessage());
|
||||
assertTrue(parameters.getUserData().isEmpty());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void setUserData_invalidKey_throws() {
|
||||
// setup
|
||||
TestParameters parameters = new TestParameters();
|
||||
|
||||
// test
|
||||
for (String key : new String[] { null, "" }) {
|
||||
try {
|
||||
parameters.setUserData(key, "test");
|
||||
fail();
|
||||
} catch (IllegalArgumentException ex) {
|
||||
assertTrue(ex.getMessage().contains("Invalid key; key must be non-empty, non-null"),
|
||||
ex.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void setUserData_nullValue_removes() {
|
||||
// setup
|
||||
TestParameters parameters = new TestParameters();
|
||||
String key = "key";
|
||||
|
||||
// test
|
||||
parameters.setUserData(key, "test");
|
||||
assertTrue(parameters.getUserData().containsKey(key));
|
||||
|
||||
parameters.setUserData(key, null);
|
||||
assertFalse(parameters.getUserData().containsKey(key));
|
||||
}
|
||||
}
|
|
@ -4,7 +4,7 @@
|
|||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-bom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<packaging>pom</packaging>
|
||||
<name>HAPI FHIR BOM</name>
|
||||
|
@ -12,7 +12,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
type: add
|
||||
issue: 5527
|
||||
title: "Added a map of `additionalData` to BulkExport job params.
|
||||
This will allow consumers of BulkExport to add additional
|
||||
data to be accessed at later steps by using various pointcuts
|
||||
in the system.
|
||||
Updated ConsentService so that BulkExport operations will
|
||||
call the willSeeResource method for each exported resource.
|
||||
"
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: add
|
||||
issue: 5588
|
||||
title: "Added `auto-version-references-at-path` extension that allows to
|
||||
enable auto versioning references at specified paths of resource instances."
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: fix
|
||||
title: "$everything queries with MDM expansion were bypassing the partition boundary by return resources outside the partition.
|
||||
Also, POST (as opposed to GET) $everything queries with MDM expansion.
|
||||
The first issue was fixed by reversing the previous changes in [5493](https://github.com/hapifhir/hapi-fhir/issues/5493)
|
||||
and by filtering source and golden resources by partition ID.
|
||||
The second issue was fixed by correctly capturing the MDM expansion flag in POST $everything queries."
|
|
@ -4,20 +4,24 @@
|
|||
title: "The version of a few dependencies have been bumped to the latest versions
|
||||
(dependent HAPI modules listed in brackets):
|
||||
<ul>
|
||||
<li>Jackson (Base): 2.15.3 -> 2.16.0</li>
|
||||
<li>Jackson (Base): 2.15.3 -> 2.16.1</li>
|
||||
<li>SLF4j (Base): 2.0.3 -> 2.0.9</li>
|
||||
<li>Logback (Base): 1.4.7 -> 1.4.14</li>
|
||||
<li>Caffeine (Base): 3.1.1 -> 3.1.8</li>
|
||||
<li>Spring Framework (JPA): 5.3.27 -> 6.1.1</li>
|
||||
<li>Spring Boot (JPA-Starter): 5.3.27 -> 6.2.0</li>
|
||||
<li>Spring Data BOM (JPA): 2021.2.2 -> 2023.1.0</li>
|
||||
<li>Hibernate (JPA): 5.6.15.Final -> 6.4.0.Final</li>
|
||||
<li>Hibernate (JPA): 5.6.15.Final -> 6.4.1.Final</li>
|
||||
<li>Hibernate Validator (JPA): 6.1.5.Final -> 8.0.0.Final</li>
|
||||
<li>Hibernate Search (JPA): 6.1.6.Final -> 6.2.2.Final</li>
|
||||
<li>Hibernate Search (JPA): 6.1.6.Final -> 7.0.0.Final</li>
|
||||
<li>Commons-DBCP2 (JPA): 2.9.0 -> 2.11.0</li>
|
||||
<li>DataSource-Proxy (JPA): 1.9 -> 1.10</li>
|
||||
<li>Spring Boot (Boot+Starter): 2.7.12 -> 3.1.4</li>
|
||||
<li>Jetty (CLI): 10.0.14 -> 12.0.3</li>
|
||||
<li>Jansi (CLI): 2.4.0 -> 2.4.1</li>
|
||||
<li>Derby (CLI): 10.14.2.0 -> 10.17.1.0</li>
|
||||
<li>Commons-Lang3 (CLI): 3.12.0 -> 3.14.0</li>
|
||||
<li>Commons-CSV (CLI): 1.8 -> 1.10.0</li>
|
||||
<li>Phloc Schematron (Schematron Validator): 5.6.5 -> 7.1.2</li>
|
||||
<li>RestEasy (JAX-RS Server): 5.0.2.Final -> 6.2.5.Final</li>
|
||||
</ul>"
|
||||
|
|
|
@ -166,3 +166,22 @@ You can also configure HAPI to not strip versions only on certain fields. This i
|
|||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/Parser.java|disableStripVersionsField}}
|
||||
```
|
||||
|
||||
# Automatically Versioned References
|
||||
|
||||
It is possible to configure HAPI to automatically version references for desired resource instances by providing the `auto-version-references-at-path` extension in the `Resource.meta` element:
|
||||
|
||||
```json
|
||||
"meta": {
|
||||
"extension":[
|
||||
{
|
||||
"url":"http://hapifhir.io/fhir/StructureDefinition/auto-version-references-at-path",
|
||||
"valueString":"focus"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
It is allowed to add multiple extensions with different paths. When a resource is stored, any references found at the specified paths will have the current version of the target appended, if a version is not already present.
|
||||
|
||||
Parser will not strip versions from references at paths provided by the `auto-version-references-at-path` extension.
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -53,7 +53,7 @@
|
|||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hibernate.search</groupId>
|
||||
<artifactId>hibernate-search-mapper-orm-orm6</artifactId>
|
||||
<artifactId>hibernate-search-mapper-orm</artifactId>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -340,11 +340,7 @@
|
|||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-analyzers-phonetic</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-backward-codecs</artifactId>
|
||||
<artifactId>lucene-analysis-phonetic</artifactId>
|
||||
</dependency>
|
||||
<!-- Misc -->
|
||||
<dependency>
|
||||
|
|
|
@ -111,7 +111,7 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor<JpaPid> {
|
|||
|
||||
@SuppressWarnings("rawtypes")
|
||||
@Autowired
|
||||
private IMdmLinkDao myMdmLinkDao;
|
||||
protected IMdmLinkDao myMdmLinkDao;
|
||||
|
||||
@Autowired
|
||||
private MdmExpansionCacheSvc myMdmExpansionCacheSvc;
|
||||
|
|
|
@ -31,6 +31,7 @@ import ca.uhn.fhir.model.primitive.IdDt;
|
|||
import ca.uhn.fhir.rest.api.SortOrderEnum;
|
||||
import ca.uhn.fhir.rest.api.SortSpec;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.param.UriParam;
|
||||
|
@ -172,7 +173,7 @@ public class JpaPersistedResourceValidationSupport implements IValidationSupport
|
|||
}
|
||||
IBundleProvider search = myDaoRegistry
|
||||
.getResourceDao("StructureDefinition")
|
||||
.search(new SearchParameterMap().setLoadSynchronousUpTo(1000));
|
||||
.search(new SearchParameterMap().setLoadSynchronousUpTo(1000), new SystemRequestDetails());
|
||||
return (List<T>) search.getResources(0, 1000);
|
||||
}
|
||||
|
||||
|
|
|
@ -46,7 +46,6 @@ import org.springframework.beans.factory.annotation.Autowired;
|
|||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
||||
public class JpaResourceDaoPatient<T extends IBaseResource> extends BaseHapiFhirResourceDao<T>
|
||||
|
@ -67,6 +66,7 @@ public class JpaResourceDaoPatient<T extends IBaseResource> extends BaseHapiFhir
|
|||
StringAndListParam theNarrative,
|
||||
StringAndListParam theFilter,
|
||||
StringAndListParam theTypes,
|
||||
boolean theMdmExpand,
|
||||
RequestDetails theRequest) {
|
||||
SearchParameterMap paramMap = new SearchParameterMap();
|
||||
if (theCount != null) {
|
||||
|
@ -95,12 +95,9 @@ public class JpaResourceDaoPatient<T extends IBaseResource> extends BaseHapiFhir
|
|||
paramMap.setSort(theSort);
|
||||
paramMap.setLastUpdated(theLastUpdated);
|
||||
if (theIds != null) {
|
||||
if (theRequest.getParameters().containsKey("_mdm")) {
|
||||
String[] paramVal = theRequest.getParameters().get("_mdm");
|
||||
if (Arrays.asList(paramVal).contains("true")) {
|
||||
if (theMdmExpand) {
|
||||
theIds.getValuesAsQueryTokens().forEach(param -> param.setMdmExpand(true));
|
||||
}
|
||||
}
|
||||
paramMap.add("_id", theIds);
|
||||
}
|
||||
|
||||
|
@ -161,6 +158,7 @@ public class JpaResourceDaoPatient<T extends IBaseResource> extends BaseHapiFhir
|
|||
theQueryParams.getNarrative(),
|
||||
theQueryParams.getFilter(),
|
||||
theQueryParams.getTypes(),
|
||||
theQueryParams.getMdmExpand(),
|
||||
theRequestDetails);
|
||||
}
|
||||
|
||||
|
@ -181,6 +179,7 @@ public class JpaResourceDaoPatient<T extends IBaseResource> extends BaseHapiFhir
|
|||
theQueryParams.getNarrative(),
|
||||
theQueryParams.getFilter(),
|
||||
theQueryParams.getTypes(),
|
||||
theQueryParams.getMdmExpand(),
|
||||
theRequestDetails);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -57,15 +57,21 @@ public interface IMdmLinkJpaRepository
|
|||
nativeQuery = true)
|
||||
void deleteLinksHistoryWithAnyReferenceToPids(@Param("goldenPids") List<Long> theResourcePids);
|
||||
|
||||
@Query("SELECT ml2.myGoldenResourcePid as goldenPid, ml2.mySourcePid as sourcePid FROM MdmLink ml2 "
|
||||
+ "WHERE ml2.myMatchResult=:matchResult "
|
||||
+ "AND ml2.myGoldenResourcePid IN ("
|
||||
+ "SELECT ml.myGoldenResourcePid FROM MdmLink ml "
|
||||
+ "INNER JOIN ResourceLink hrl "
|
||||
+ "ON hrl.myTargetResourcePid=ml.mySourcePid "
|
||||
+ "AND hrl.mySourceResourcePid=:groupPid "
|
||||
+ "AND hrl.mySourcePath='Group.member.entity' "
|
||||
+ "AND hrl.myTargetResourceType='Patient'"
|
||||
// TODO: LD: the calling code in JpaBulkExportProcessor doesn't yet leverage the partition IDs, but maybe it
|
||||
// should?
|
||||
@Query(
|
||||
"SELECT lookup_links.myGoldenResourcePid as goldenPid, gld_rt.myPartitionIdValue as goldenPartitionId, lookup_links.mySourcePid as sourcePid, lookup_links.myPartitionIdValue as sourcePartitionId "
|
||||
+ "FROM MdmLink lookup_links "
|
||||
+ "INNER JOIN ResourceTable gld_rt "
|
||||
+ "on lookup_links.myGoldenResourcePid=gld_rt.myId "
|
||||
+ "WHERE lookup_links.myMatchResult=:matchResult "
|
||||
+ "AND lookup_links.myGoldenResourcePid IN ("
|
||||
+ "SELECT inner_mdm_link.myGoldenResourcePid FROM MdmLink inner_mdm_link "
|
||||
+ "INNER JOIN ResourceLink inner_res_link "
|
||||
+ "ON inner_res_link.myTargetResourcePid=inner_mdm_link.mySourcePid "
|
||||
+ "AND inner_res_link.mySourceResourcePid=:groupPid "
|
||||
+ "AND inner_res_link.mySourcePath='Group.member.entity' "
|
||||
+ "AND inner_res_link.myTargetResourceType='Patient'"
|
||||
+ ")")
|
||||
List<MdmPidTuple> expandPidsFromGroupPidGivenMatchResult(
|
||||
@Param("groupPid") Long theGroupPid, @Param("matchResult") MdmMatchResultEnum theMdmMatchResultEnum);
|
||||
|
@ -77,15 +83,23 @@ public interface IMdmLinkJpaRepository
|
|||
interface MdmPidTuple {
|
||||
Long getGoldenPid();
|
||||
|
||||
Integer getGoldenPartitionId();
|
||||
|
||||
Long getSourcePid();
|
||||
|
||||
Integer getSourcePartitionId();
|
||||
}
|
||||
|
||||
@Query("SELECT ml.myGoldenResourcePid as goldenPid, ml.mySourcePid as sourcePid " + "FROM MdmLink ml "
|
||||
+ "INNER JOIN MdmLink ml2 "
|
||||
+ "on ml.myGoldenResourcePid=ml2.myGoldenResourcePid "
|
||||
+ "WHERE ml2.mySourcePid=:sourcePid "
|
||||
+ "AND ml2.myMatchResult=:matchResult "
|
||||
+ "AND ml.myMatchResult=:matchResult")
|
||||
@Query(
|
||||
"SELECT lookup_link.myGoldenResourcePid as goldenPid, gld_rt.myPartitionIdValue as goldenPartitionId, lookup_link.mySourcePid as sourcePid, lookup_link.myPartitionIdValue as sourcePartitionId "
|
||||
+ "FROM MdmLink lookup_link "
|
||||
+ "INNER JOIN MdmLink gld_link "
|
||||
+ "on lookup_link.myGoldenResourcePid=gld_link.myGoldenResourcePid "
|
||||
+ "INNER JOIN ResourceTable gld_rt "
|
||||
+ "on gld_link.myGoldenResourcePid=gld_rt.myId "
|
||||
+ "WHERE gld_link.mySourcePid=:sourcePid "
|
||||
+ "AND gld_link.myMatchResult=:matchResult "
|
||||
+ "AND lookup_link.myMatchResult=:matchResult")
|
||||
List<MdmPidTuple> expandPidsBySourcePidAndMatchResult(
|
||||
@Param("sourcePid") Long theSourcePid, @Param("matchResult") MdmMatchResultEnum theMdmMatchResultEnum);
|
||||
|
||||
|
@ -99,7 +113,12 @@ public interface IMdmLinkJpaRepository
|
|||
@Param("matchResult") MdmMatchResultEnum theMdmMatchResultEnumToExclude);
|
||||
|
||||
@Query(
|
||||
"SELECT ml.myGoldenResourcePid as goldenPid, ml.mySourcePid as sourcePid FROM MdmLink ml WHERE ml.myGoldenResourcePid = :goldenPid and ml.myMatchResult = :matchResult")
|
||||
"SELECT lookup_link.myGoldenResourcePid as goldenPid, gld_rt.myPartitionIdValue as goldenPartitionId, lookup_link.mySourcePid as sourcePid, lookup_link.myPartitionIdValue as sourcePartitionId "
|
||||
+ "FROM MdmLink lookup_link "
|
||||
+ "INNER JOIN ResourceTable gld_rt "
|
||||
+ "on lookup_link.myGoldenResourcePid=gld_rt.myId "
|
||||
+ "WHERE lookup_link.myGoldenResourcePid = :goldenPid "
|
||||
+ "AND lookup_link.myMatchResult = :matchResult")
|
||||
List<MdmPidTuple> expandPidsByGoldenResourcePidAndMatchResult(
|
||||
@Param("goldenPid") Long theSourcePid, @Param("matchResult") MdmMatchResultEnum theMdmMatchResultEnum);
|
||||
|
||||
|
|
|
@ -59,6 +59,8 @@ import org.hl7.fhir.instance.model.api.IAnyResource;
|
|||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||
|
@ -98,6 +100,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
*/
|
||||
@Service
|
||||
public class IdHelperService implements IIdHelperService<JpaPid> {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(IdHelperService.class);
|
||||
public static final Predicate[] EMPTY_PREDICATE_ARRAY = new Predicate[0];
|
||||
public static final String RESOURCE_PID = "RESOURCE_PID";
|
||||
|
||||
|
@ -210,9 +213,6 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
Validate.isTrue(!theIds.isEmpty(), "theIds must not be empty");
|
||||
|
||||
Map<String, JpaPid> retVals = new HashMap<>();
|
||||
RequestPartitionId partitionId = myPartitionSettings.isAllowUnqualifiedCrossPartitionReference()
|
||||
? RequestPartitionId.allPartitions()
|
||||
: theRequestPartitionId;
|
||||
for (String id : theIds) {
|
||||
JpaPid retVal;
|
||||
if (!idRequiresForcedId(id)) {
|
||||
|
@ -223,17 +223,18 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
// is a forced id
|
||||
// we must resolve!
|
||||
if (myStorageSettings.isDeleteEnabled()) {
|
||||
retVal = resolveResourceIdentity(partitionId, theResourceType, id, theExcludeDeleted)
|
||||
retVal = resolveResourceIdentity(theRequestPartitionId, theResourceType, id, theExcludeDeleted)
|
||||
.getPersistentId();
|
||||
retVals.put(id, retVal);
|
||||
} else {
|
||||
// fetch from cache... adding to cache if not available
|
||||
String key = toForcedIdToPidKey(partitionId, theResourceType, id);
|
||||
String key = toForcedIdToPidKey(theRequestPartitionId, theResourceType, id);
|
||||
retVal = myMemoryCacheService.getThenPutAfterCommit(
|
||||
MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, t -> {
|
||||
List<IIdType> ids = Collections.singletonList(new IdType(theResourceType, id));
|
||||
// fetches from cache using a function that checks cache first...
|
||||
List<JpaPid> resolvedIds = resolveResourcePersistentIdsWithCache(partitionId, ids);
|
||||
List<JpaPid> resolvedIds =
|
||||
resolveResourcePersistentIdsWithCache(theRequestPartitionId, ids);
|
||||
if (resolvedIds.isEmpty()) {
|
||||
throw new ResourceNotFoundException(Msg.code(1100) + ids.get(0));
|
||||
}
|
||||
|
@ -581,7 +582,7 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
RequestPartitionId replaceDefault(RequestPartitionId theRequestPartitionId) {
|
||||
public RequestPartitionId replaceDefault(RequestPartitionId theRequestPartitionId) {
|
||||
if (myPartitionSettings.getDefaultPartitionId() != null) {
|
||||
if (!theRequestPartitionId.isAllPartitions() && theRequestPartitionId.hasDefaultPartitionId()) {
|
||||
List<Integer> partitionIds = theRequestPartitionId.getPartitionIds().stream()
|
||||
|
@ -711,7 +712,7 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
}
|
||||
|
||||
@VisibleForTesting
|
||||
void setPartitionSettingsForUnitTest(PartitionSettings thePartitionSettings) {
|
||||
public void setPartitionSettingsForUnitTest(PartitionSettings thePartitionSettings) {
|
||||
myPartitionSettings = thePartitionSettings;
|
||||
}
|
||||
|
||||
|
|
|
@ -120,8 +120,11 @@ public class MdmLinkDaoJpaImpl implements IMdmLinkDao<JpaPid, MdmLink> {
|
|||
}
|
||||
|
||||
private MdmPidTuple<JpaPid> daoTupleToMdmTuple(IMdmLinkJpaRepository.MdmPidTuple theMdmPidTuple) {
|
||||
return MdmPidTuple.fromGoldenAndSource(
|
||||
JpaPid.fromId(theMdmPidTuple.getGoldenPid()), JpaPid.fromId(theMdmPidTuple.getSourcePid()));
|
||||
return MdmPidTuple.fromGoldenAndSourceAndPartitionIds(
|
||||
JpaPid.fromId(theMdmPidTuple.getGoldenPid()),
|
||||
theMdmPidTuple.getGoldenPartitionId(),
|
||||
JpaPid.fromId(theMdmPidTuple.getSourcePid()),
|
||||
theMdmPidTuple.getSourcePartitionId());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -112,6 +112,11 @@ public abstract class BaseJpaResourceProviderPatient<T extends IBaseResource> ex
|
|||
max = OperationParam.MAX_UNLIMITED,
|
||||
typeName = "string")
|
||||
List<IPrimitiveType<String>> theTypes,
|
||||
@Description(
|
||||
shortDefinition =
|
||||
"Filter the resources to return only resources matching the given _type filter (note that this filter is applied only to results which link to the given patient, not to the patient itself or to supporting resources linked to by the matched resources)")
|
||||
@OperationParam(name = Constants.PARAM_MDM, min = 0, max = 1, typeName = "boolean")
|
||||
IPrimitiveType<Boolean> theMdmExpand,
|
||||
@Sort SortSpec theSortSpec,
|
||||
RequestDetails theRequestDetails) {
|
||||
|
||||
|
@ -126,6 +131,7 @@ public abstract class BaseJpaResourceProviderPatient<T extends IBaseResource> ex
|
|||
everythingParams.setNarrative(toStringAndList(theNarrative));
|
||||
everythingParams.setFilter(toStringAndList(theFilter));
|
||||
everythingParams.setTypes(toStringAndList(theTypes));
|
||||
everythingParams.setMdmExpand(resolveNullValue(theMdmExpand));
|
||||
|
||||
return ((IFhirResourceDaoPatient<?>) getDao())
|
||||
.patientInstanceEverything(theServletRequest, theRequestDetails, everythingParams, theId);
|
||||
|
@ -202,6 +208,11 @@ public abstract class BaseJpaResourceProviderPatient<T extends IBaseResource> ex
|
|||
max = OperationParam.MAX_UNLIMITED,
|
||||
typeName = "id")
|
||||
List<IIdType> theId,
|
||||
@Description(
|
||||
shortDefinition =
|
||||
"Filter the resources to return only resources matching the given _type filter (note that this filter is applied only to results which link to the given patient, not to the patient itself or to supporting resources linked to by the matched resources)")
|
||||
@OperationParam(name = Constants.PARAM_MDM, min = 0, max = 1, typeName = "boolean")
|
||||
IPrimitiveType<Boolean> theMdmExpand,
|
||||
@Sort SortSpec theSortSpec,
|
||||
RequestDetails theRequestDetails) {
|
||||
|
||||
|
@ -216,6 +227,7 @@ public abstract class BaseJpaResourceProviderPatient<T extends IBaseResource> ex
|
|||
everythingParams.setNarrative(toStringAndList(theNarrative));
|
||||
everythingParams.setFilter(toStringAndList(theFilter));
|
||||
everythingParams.setTypes(toStringAndList(theTypes));
|
||||
everythingParams.setMdmExpand(resolveNullValue(theMdmExpand));
|
||||
|
||||
return ((IFhirResourceDaoPatient<?>) getDao())
|
||||
.patientTypeEverything(
|
||||
|
@ -261,4 +273,8 @@ public abstract class BaseJpaResourceProviderPatient<T extends IBaseResource> ex
|
|||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private boolean resolveNullValue(IPrimitiveType<Boolean> theMdmExpand) {
|
||||
return theMdmExpand == null ? Boolean.FALSE : theMdmExpand.getValue();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -71,7 +71,7 @@ public class SynchronousSearchSvcImpl implements ISynchronousSearchSvc {
|
|||
private JpaStorageSettings myStorageSettings;
|
||||
|
||||
@Autowired
|
||||
private SearchBuilderFactory mySearchBuilderFactory;
|
||||
protected SearchBuilderFactory mySearchBuilderFactory;
|
||||
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
|
|
|
@ -98,14 +98,13 @@ import jakarta.persistence.EntityManager;
|
|||
import jakarta.persistence.NonUniqueResultException;
|
||||
import jakarta.persistence.PersistenceContext;
|
||||
import jakarta.persistence.PersistenceContextType;
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.commons.collections4.ListUtils;
|
||||
import org.apache.commons.lang3.ObjectUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.hibernate.CacheMode;
|
||||
import org.hibernate.search.engine.search.predicate.dsl.BooleanPredicateClausesStep;
|
||||
import org.hibernate.search.engine.search.predicate.dsl.PredicateFinalStep;
|
||||
|
@ -175,6 +174,7 @@ import java.util.StringTokenizer;
|
|||
import java.util.UUID;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.fhir.jpa.entity.TermConceptPropertyBinder.CONCEPT_PROPERTY_PREFIX_NAME;
|
||||
|
@ -1135,39 +1135,51 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
}
|
||||
int chunkSize = chunkSizeOpt.get();
|
||||
|
||||
SearchProperties searchProps = buildSearchScroll(
|
||||
/*
|
||||
* Turn the filter into one or more Hibernate Search queries. Ideally we want it
|
||||
* to be handled by a single query, but Lucene/ES don't like it when we exceed
|
||||
* 1024 different terms in a single query. So if we have that many terms (which
|
||||
* can happen if a ValueSet has a lot of explicitly enumerated codes that it's
|
||||
* including) we split this into multiple searches. The method below builds these
|
||||
* searches lazily, returning a Supplier that creates and executes the search
|
||||
* when it's actually time to.
|
||||
*/
|
||||
SearchProperties searchProps = buildSearchScrolls(
|
||||
theTermCodeSystemVersion,
|
||||
theExpansionFilter,
|
||||
theSystem,
|
||||
theIncludeOrExclude,
|
||||
chunkSize,
|
||||
includeOrExcludeVersion);
|
||||
|
||||
int accumulatedBatchesSoFar = 0;
|
||||
try (SearchScroll<EntityReference> scroll = searchProps.getSearchScroll()) {
|
||||
for (var next : searchProps.getSearchScroll()) {
|
||||
try (SearchScroll<EntityReference> scroll = next.get()) {
|
||||
|
||||
ourLog.debug(
|
||||
"Beginning batch expansion for {} with max results per batch: {}",
|
||||
(theAdd ? "inclusion" : "exclusion"),
|
||||
chunkSize);
|
||||
for (SearchScrollResult<EntityReference> chunk = scroll.next(); chunk.hasHits(); chunk = scroll.next()) {
|
||||
for (SearchScrollResult<EntityReference> chunk = scroll.next();
|
||||
chunk.hasHits();
|
||||
chunk = scroll.next()) {
|
||||
int countForBatch = 0;
|
||||
|
||||
List<Long> pids = chunk.hits().stream().map(t -> (Long) t.id()).collect(Collectors.toList());
|
||||
List<Long> pids =
|
||||
chunk.hits().stream().map(t -> (Long) t.id()).collect(Collectors.toList());
|
||||
|
||||
List<TermConcept> termConcepts = myTermConceptDao.fetchConceptsAndDesignationsByPid(pids);
|
||||
|
||||
// If the include section had multiple codes, return the codes in the same order
|
||||
termConcepts = sortTermConcepts(searchProps, termConcepts);
|
||||
|
||||
// int firstResult = theQueryIndex * maxResultsPerBatch;// TODO GGG HS we lose the ability to check the
|
||||
// int firstResult = theQueryIndex * maxResultsPerBatch;
|
||||
// TODO GGG HS we lose the ability to check the
|
||||
// index of the first result, so just best-guessing it here.
|
||||
Optional<PredicateFinalStep> expansionStepOpt = searchProps.getExpansionStepOpt();
|
||||
int delta = 0;
|
||||
for (TermConcept concept : termConcepts) {
|
||||
count++;
|
||||
countForBatch++;
|
||||
if (theAdd && expansionStepOpt.isPresent()) {
|
||||
if (theAdd && searchProps.hasIncludeOrExcludeCodes()) {
|
||||
ValueSet.ConceptReferenceComponent theIncludeConcept =
|
||||
getMatchedConceptIncludedInValueSet(theIncludeOrExclude, concept);
|
||||
if (theIncludeConcept != null && isNotBlank(theIncludeConcept.getDisplay())) {
|
||||
|
@ -1208,6 +1220,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
fullOperationSw.getMillis());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private List<TermConcept> sortTermConcepts(SearchProperties searchProps, List<TermConcept> termConcepts) {
|
||||
List<String> codes = searchProps.getIncludeOrExcludeCodes();
|
||||
|
@ -1243,7 +1256,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
return maxResultsPerBatch > 0 ? Optional.of(maxResultsPerBatch) : Optional.empty();
|
||||
}
|
||||
|
||||
private SearchProperties buildSearchScroll(
|
||||
private SearchProperties buildSearchScrolls(
|
||||
TermCodeSystemVersion theTermCodeSystemVersion,
|
||||
ExpansionFilter theExpansionFilter,
|
||||
String theSystem,
|
||||
|
@ -1255,15 +1268,39 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
SearchPredicateFactory predicate =
|
||||
searchSession.scope(TermConcept.class).predicate();
|
||||
|
||||
List<String> allCodes = theIncludeOrExclude.getConcept().stream()
|
||||
.filter(Objects::nonNull)
|
||||
.map(ValueSet.ConceptReferenceComponent::getCode)
|
||||
.filter(StringUtils::isNotBlank)
|
||||
.collect(Collectors.toList());
|
||||
SearchProperties returnProps = new SearchProperties();
|
||||
returnProps.setIncludeOrExcludeCodes(allCodes);
|
||||
|
||||
/*
|
||||
* Lucene/ES can't typically handle more than 1024 clauses per search, so if
|
||||
* we have more than that number (e.g. because of a ValueSet that explicitly
|
||||
* includes thousands of codes), we break this up into multiple searches.
|
||||
*/
|
||||
List<List<String>> partitionedCodes = ListUtils.partition(allCodes, IndexSearcher.getMaxClauseCount() - 10);
|
||||
if (partitionedCodes.isEmpty()) {
|
||||
partitionedCodes = List.of(List.of());
|
||||
}
|
||||
|
||||
for (List<String> nextCodePartition : partitionedCodes) {
|
||||
Supplier<SearchScroll<EntityReference>> nextScroll = () -> {
|
||||
// Build the top-level expansion on filters.
|
||||
PredicateFinalStep step = predicate.bool(b -> {
|
||||
b.must(predicate.match().field("myCodeSystemVersionPid").matching(theTermCodeSystemVersion.getPid()));
|
||||
b.must(predicate
|
||||
.match()
|
||||
.field("myCodeSystemVersionPid")
|
||||
.matching(theTermCodeSystemVersion.getPid()));
|
||||
|
||||
if (theExpansionFilter.hasCode()) {
|
||||
b.must(predicate.match().field("myCode").matching(theExpansionFilter.getCode()));
|
||||
}
|
||||
|
||||
String codeSystemUrlAndVersion = buildCodeSystemUrlAndVersion(theSystem, theIncludeOrExcludeVersion);
|
||||
String codeSystemUrlAndVersion =
|
||||
buildCodeSystemUrlAndVersion(theSystem, theIncludeOrExcludeVersion);
|
||||
for (ValueSet.ConceptSetFilterComponent nextFilter : theIncludeOrExclude.getFilter()) {
|
||||
handleFilter(codeSystemUrlAndVersion, predicate, b, nextFilter);
|
||||
}
|
||||
|
@ -1272,29 +1309,14 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
}
|
||||
});
|
||||
|
||||
SearchProperties returnProps = new SearchProperties();
|
||||
|
||||
List<String> codes = theIncludeOrExclude.getConcept().stream()
|
||||
.filter(Objects::nonNull)
|
||||
.map(ValueSet.ConceptReferenceComponent::getCode)
|
||||
.filter(StringUtils::isNotBlank)
|
||||
.collect(Collectors.toList());
|
||||
returnProps.setIncludeOrExcludeCodes(codes);
|
||||
|
||||
Optional<PredicateFinalStep> expansionStepOpt = buildExpansionPredicate(codes, predicate);
|
||||
final PredicateFinalStep finishedQuery =
|
||||
expansionStepOpt.isPresent() ? predicate.bool().must(step).must(expansionStepOpt.get()) : step;
|
||||
returnProps.setExpansionStepOpt(expansionStepOpt);
|
||||
|
||||
/*
|
||||
* DM 2019-08-21 - Processing slows after any ValueSets with many codes explicitly identified. This might
|
||||
* be due to the dark arts that is memory management. Will monitor but not do anything about this right now.
|
||||
*/
|
||||
|
||||
// BooleanQuery.setMaxClauseCount(SearchBuilder.getMaximumPageSize());
|
||||
// TODO GGG HS looks like we can't set max clause count, but it can be set server side.
|
||||
// BooleanQuery.setMaxClauseCount(10000);
|
||||
// JM 22-02-15 - Hopefully increasing maxClauseCount should be not needed anymore
|
||||
// Add a selector on any explicitly enumerated codes in the VS component
|
||||
final PredicateFinalStep finishedQuery;
|
||||
if (nextCodePartition.isEmpty()) {
|
||||
finishedQuery = step;
|
||||
} else {
|
||||
PredicateFinalStep expansionStep = buildExpansionPredicate(nextCodePartition, predicate);
|
||||
finishedQuery = predicate.bool().must(step).must(expansionStep);
|
||||
}
|
||||
|
||||
SearchQuery<EntityReference> termConceptsQuery = searchSession
|
||||
.search(TermConcept.class)
|
||||
|
@ -1302,7 +1324,12 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
.where(f -> finishedQuery)
|
||||
.toQuery();
|
||||
|
||||
returnProps.setSearchScroll(termConceptsQuery.scroll(theScrollChunkSize));
|
||||
return termConceptsQuery.scroll(theScrollChunkSize);
|
||||
};
|
||||
|
||||
returnProps.addSearchScroll(nextScroll);
|
||||
}
|
||||
|
||||
return returnProps;
|
||||
}
|
||||
|
||||
|
@ -1317,29 +1344,9 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
/**
|
||||
* Helper method which builds a predicate for the expansion
|
||||
*/
|
||||
private Optional<PredicateFinalStep> buildExpansionPredicate(
|
||||
List<String> theCodes, SearchPredicateFactory thePredicate) {
|
||||
if (CollectionUtils.isEmpty(theCodes)) {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
if (theCodes.size() < BooleanQuery.getMaxClauseCount()) {
|
||||
return Optional.of(thePredicate.simpleQueryString().field("myCode").matching(String.join(" | ", theCodes)));
|
||||
}
|
||||
|
||||
// Number of codes is larger than maxClauseCount, so we split the query in several clauses
|
||||
|
||||
// partition codes in lists of BooleanQuery.getMaxClauseCount() size
|
||||
List<List<String>> listOfLists = ListUtils.partition(theCodes, BooleanQuery.getMaxClauseCount());
|
||||
|
||||
PredicateFinalStep step = thePredicate.bool(b -> {
|
||||
b.minimumShouldMatchNumber(1);
|
||||
for (List<String> codeList : listOfLists) {
|
||||
b.should(p -> p.simpleQueryString().field("myCode").matching(String.join(" | ", codeList)));
|
||||
}
|
||||
});
|
||||
|
||||
return Optional.of(step);
|
||||
private PredicateFinalStep buildExpansionPredicate(List<String> theCodes, SearchPredicateFactory thePredicate) {
|
||||
assert !theCodes.isEmpty();
|
||||
return thePredicate.simpleQueryString().field("myCode").matching(String.join(" | ", theCodes));
|
||||
}
|
||||
|
||||
private String buildCodeSystemUrlAndVersion(String theSystem, String theIncludeOrExcludeVersion) {
|
||||
|
@ -3150,24 +3157,15 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
* Properties returned from method buildSearchScroll
|
||||
*/
|
||||
private static final class SearchProperties {
|
||||
private SearchScroll<EntityReference> mySearchScroll;
|
||||
private Optional<PredicateFinalStep> myExpansionStepOpt;
|
||||
private List<Supplier<SearchScroll<EntityReference>>> mySearchScroll = new ArrayList<>();
|
||||
private List<String> myIncludeOrExcludeCodes;
|
||||
|
||||
public SearchScroll<EntityReference> getSearchScroll() {
|
||||
public List<Supplier<SearchScroll<EntityReference>>> getSearchScroll() {
|
||||
return mySearchScroll;
|
||||
}
|
||||
|
||||
public void setSearchScroll(SearchScroll<EntityReference> theSearchScroll) {
|
||||
mySearchScroll = theSearchScroll;
|
||||
}
|
||||
|
||||
public Optional<PredicateFinalStep> getExpansionStepOpt() {
|
||||
return myExpansionStepOpt;
|
||||
}
|
||||
|
||||
public void setExpansionStepOpt(Optional<PredicateFinalStep> theExpansionStepOpt) {
|
||||
myExpansionStepOpt = theExpansionStepOpt;
|
||||
public void addSearchScroll(Supplier<SearchScroll<EntityReference>> theSearchScrollSupplier) {
|
||||
mySearchScroll.add(theSearchScrollSupplier);
|
||||
}
|
||||
|
||||
public List<String> getIncludeOrExcludeCodes() {
|
||||
|
@ -3177,6 +3175,10 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
public void setIncludeOrExcludeCodes(List<String> theIncludeOrExcludeCodes) {
|
||||
myIncludeOrExcludeCodes = theIncludeOrExcludeCodes;
|
||||
}
|
||||
|
||||
public boolean hasIncludeOrExcludeCodes() {
|
||||
return !myIncludeOrExcludeCodes.isEmpty();
|
||||
}
|
||||
}
|
||||
|
||||
static boolean isValueSetDisplayLanguageMatch(ValueSetExpansionOptions theExpansionOptions, String theStoredLang) {
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.hl7.fhir.r4.model.Group;
|
|||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
|
@ -152,6 +153,12 @@ public class JpaBulkExportProcessorTest {
|
|||
@InjectMocks
|
||||
private JpaBulkExportProcessor myProcessor;
|
||||
|
||||
@BeforeEach
|
||||
public void init() {
|
||||
myProcessor.mySearchBuilderFactory = mySearchBuilderFactory;
|
||||
myProcessor.myMdmLinkDao = myMdmLinkDao;
|
||||
}
|
||||
|
||||
private ExportPIDIteratorParameters createExportParameters(BulkExportJobParameters.ExportStyle theExportStyle) {
|
||||
ExportPIDIteratorParameters parameters = new ExportPIDIteratorParameters();
|
||||
parameters.setInstanceId("instanceId");
|
||||
|
@ -176,7 +183,7 @@ public class JpaBulkExportProcessorTest {
|
|||
}
|
||||
|
||||
private MdmPidTuple<JpaPid> createTuple(long theGroupId, long theGoldenId) {
|
||||
return MdmPidTuple.fromGoldenAndSource(JpaPid.fromId(theGoldenId), JpaPid.fromId(theGroupId));
|
||||
return MdmPidTuple.fromGoldenAndSourceAndPartitionIds(JpaPid.fromId(theGoldenId), null, JpaPid.fromId(theGroupId), null);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
|
|
|
@ -8,11 +8,11 @@ import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
|||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import jakarta.persistence.EntityManager;
|
||||
import jakarta.persistence.Tuple;
|
||||
import jakarta.persistence.TypedQuery;
|
||||
import jakarta.persistence.criteria.*;
|
||||
import org.hl7.fhir.r4.hapi.ctx.FhirR4;
|
||||
import jakarta.persistence.criteria.Path;
|
||||
import jakarta.persistence.criteria.Root;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
@ -22,9 +22,6 @@ import org.mockito.InjectMocks;
|
|||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -62,13 +59,12 @@ public class IdHelperServiceTest {
|
|||
|
||||
when(myStorageSettings.isDeleteEnabled()).thenReturn(true);
|
||||
when(myStorageSettings.getResourceClientIdStrategy()).thenReturn(JpaStorageSettings.ClientIdStrategyEnum.ANY);
|
||||
when(myPartitionSettings.isAllowUnqualifiedCrossPartitionReference()).thenReturn(true);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testResolveResourcePersistentIds() {
|
||||
//prepare params
|
||||
RequestPartitionId requestPartitionId = RequestPartitionId.fromPartitionName("Partition-A");
|
||||
RequestPartitionId requestPartitionId = RequestPartitionId.fromPartitionIdAndName(1, "Partition-A");
|
||||
String resourceType = "Patient";
|
||||
Long id = 123L;
|
||||
List<String> ids = List.of(String.valueOf(id));
|
||||
|
@ -77,25 +73,18 @@ public class IdHelperServiceTest {
|
|||
//prepare results
|
||||
Patient expectedPatient = new Patient();
|
||||
expectedPatient.setId(ids.get(0));
|
||||
Object[] obj = new Object[] {resourceType, Long.parseLong(ids.get(0)), ids.get(0), Date.from(Instant.now())};
|
||||
|
||||
// configure mock behaviour
|
||||
when(myStorageSettings.isDeleteEnabled()).thenReturn(true);
|
||||
when(myResourceTableDao
|
||||
.findAndResolveByForcedIdWithNoType(eq(resourceType), eq(ids), eq(theExcludeDeleted)))
|
||||
.thenReturn(Collections.singletonList(obj));
|
||||
|
||||
Map<String, JpaPid> actualIds = subject.resolveResourcePersistentIds(requestPartitionId, resourceType, ids, theExcludeDeleted);
|
||||
|
||||
//verify results
|
||||
assertFalse(actualIds.isEmpty());
|
||||
assertEquals(id, actualIds.get(ids.get(0)).getId());
|
||||
final ResourceNotFoundException resourceNotFoundException = assertThrows(ResourceNotFoundException.class, () -> subject.resolveResourcePersistentIds(requestPartitionId, resourceType, ids, theExcludeDeleted));
|
||||
assertEquals("HAPI-2001: Resource Patient/123 is not known", resourceNotFoundException.getMessage());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testResolveResourcePersistentIdsDeleteFalse() {
|
||||
//prepare Params
|
||||
RequestPartitionId requestPartitionId = RequestPartitionId.fromPartitionName("Partition-A");
|
||||
RequestPartitionId requestPartitionId = RequestPartitionId.fromPartitionIdAndName(1, "Partition-A");
|
||||
Long id = 123L;
|
||||
String resourceType = "Patient";
|
||||
List<String> ids = List.of(String.valueOf(id));
|
||||
|
@ -107,54 +96,21 @@ public class IdHelperServiceTest {
|
|||
expectedPatient.setId(ids.get(0));
|
||||
|
||||
// configure mock behaviour
|
||||
configureCacheBehaviour(forcedId);
|
||||
configureEntityManagerBehaviour(id, resourceType, ids.get(0));
|
||||
when(myStorageSettings.isDeleteEnabled()).thenReturn(false);
|
||||
when(myFhirCtx.getVersion()).thenReturn(new FhirR4());
|
||||
|
||||
Map<String, JpaPid> actualIds = subject.resolveResourcePersistentIds(requestPartitionId, resourceType, ids, theExcludeDeleted);
|
||||
|
||||
//verifyResult
|
||||
assertFalse(actualIds.isEmpty());
|
||||
assertEquals(id, actualIds.get(ids.get(0)).getId());
|
||||
assertNull(actualIds.get(ids.get(0)));
|
||||
}
|
||||
|
||||
private void configureCacheBehaviour(String resourceUrl) {
|
||||
when(myMemoryCacheService.getThenPutAfterCommit(eq(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID), eq(resourceUrl), any())).thenCallRealMethod();
|
||||
doNothing().when(myMemoryCacheService).putAfterCommit(eq(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID), eq(resourceUrl), ArgumentMatchers.<JpaPid>any());
|
||||
when(myMemoryCacheService.getIfPresent(eq(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID), eq(resourceUrl))).thenReturn(null);
|
||||
}
|
||||
|
||||
private void configureEntityManagerBehaviour(Long idNumber, String resourceType, String id) {
|
||||
List<Tuple> mockedTupleList = getMockedTupleList(idNumber, resourceType, id);
|
||||
CriteriaBuilder builder = getMockedCriteriaBuilder();
|
||||
Root<ResourceTable> from = getMockedFrom();
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
TypedQuery<Tuple> query = (TypedQuery<Tuple>) mock(TypedQuery.class);
|
||||
@SuppressWarnings("unchecked")
|
||||
CriteriaQuery<Tuple> cq = mock(CriteriaQuery.class);
|
||||
|
||||
when(builder.createTupleQuery()).thenReturn(cq);
|
||||
when(cq.from(ArgumentMatchers.<Class<ResourceTable>>any())).thenReturn(from);
|
||||
when(query.getResultList()).thenReturn(mockedTupleList);
|
||||
|
||||
when(myEntityManager.getCriteriaBuilder()).thenReturn(builder);
|
||||
when(myEntityManager.createQuery(ArgumentMatchers.<CriteriaQuery<Tuple>>any())).thenReturn(query);
|
||||
}
|
||||
|
||||
private CriteriaBuilder getMockedCriteriaBuilder() {
|
||||
Predicate pred = mock(Predicate.class);
|
||||
CriteriaBuilder builder = mock(CriteriaBuilder.class);
|
||||
lenient().when(builder.equal(any(), any())).thenReturn(pred);
|
||||
return builder;
|
||||
}
|
||||
private Root<ResourceTable> getMockedFrom() {
|
||||
@SuppressWarnings("unchecked")
|
||||
Path<Object> path = mock(Path.class);
|
||||
@SuppressWarnings("unchecked")
|
||||
Root<ResourceTable> from = mock(Root.class);
|
||||
lenient().when(from.get(ArgumentMatchers.<String>any())).thenReturn(path);
|
||||
when(from.get(ArgumentMatchers.<String>any())).thenReturn(path);
|
||||
return from;
|
||||
}
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -229,7 +229,6 @@ public class MdmSearchExpandingInterceptorIT extends BaseMdmR4Test {
|
|||
String id = expectedIds.get(0);
|
||||
|
||||
HashMap<String, String[]> queryParameters = new HashMap<>();
|
||||
queryParameters.put("_mdm", new String[]{"true"});
|
||||
|
||||
HttpServletRequest req = Mockito.mock(HttpServletRequest.class);
|
||||
RequestDetails theDetails = Mockito.mock(RequestDetails.class);
|
||||
|
@ -240,9 +239,11 @@ public class MdmSearchExpandingInterceptorIT extends BaseMdmR4Test {
|
|||
// test
|
||||
myStorageSettings.setAllowMdmExpansion(true);
|
||||
IFhirResourceDaoPatient<Patient> dao = (IFhirResourceDaoPatient<Patient>) myPatientDao;
|
||||
final PatientEverythingParameters queryParams = new PatientEverythingParameters();
|
||||
queryParams.setMdmExpand(true);
|
||||
IBundleProvider outcome = dao.patientInstanceEverything(
|
||||
req,
|
||||
theDetails, new PatientEverythingParameters(), new IdDt(id)
|
||||
theDetails, queryParams, new IdDt(id)
|
||||
);
|
||||
|
||||
// verify return results
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -86,7 +86,7 @@
|
|||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hibernate.search</groupId>
|
||||
<artifactId>hibernate-search-mapper-orm-orm6</artifactId>
|
||||
<artifactId>hibernate-search-mapper-orm</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hibernate.search</groupId>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -125,7 +125,7 @@
|
|||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hibernate.search</groupId>
|
||||
<artifactId>hibernate-search-mapper-orm-orm6</artifactId>
|
||||
<artifactId>hibernate-search-mapper-orm</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.jscience</groupId>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -38,6 +38,7 @@ public class SynchronousSearchSvcImplTest extends BaseSearchSvc {
|
|||
@BeforeEach
|
||||
public void before() {
|
||||
mySynchronousSearchSvc.setContext(ourCtx);
|
||||
mySynchronousSearchSvc.mySearchBuilderFactory = mySearchBuilderFactory;
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -12,16 +12,12 @@ import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
|
|||
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.RequestTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.rest.client.apache.ResourceEntity;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import ca.uhn.fhir.test.utilities.HttpClientExtension;
|
||||
|
@ -95,6 +91,7 @@ import static org.hamcrest.Matchers.not;
|
|||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
|
||||
|
@ -409,6 +406,72 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
|||
verifyBulkExportResults(options, List.of("Patient/P1", obsId, encId), List.of("Patient/P2", obsId2, encId2, obsId3));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBulkExportParametersPersistExtraData() {
|
||||
// setup
|
||||
myStorageSettings.setIndexMissingFields(JpaStorageSettings.IndexEnabledEnum.ENABLED);
|
||||
List<String> ids = new ArrayList<>();
|
||||
|
||||
// create some resources
|
||||
Patient patient = new Patient();
|
||||
patient.setId("P1");
|
||||
patient.setActive(true);
|
||||
myClient.update().resource(patient).execute();
|
||||
ids.add("Patient/P1");
|
||||
|
||||
Observation observation;
|
||||
for (int i = 0; i < 5; i++) {
|
||||
observation = new Observation();
|
||||
observation.setSubject(new Reference().setReference("Patient/P1"));
|
||||
observation.setStatus(Observation.ObservationStatus.PRELIMINARY);
|
||||
String obsId = myClient.create().resource(observation).execute().getId().toUnqualifiedVersionless().getValue();
|
||||
ids.add(obsId);
|
||||
}
|
||||
|
||||
Encounter encounter = new Encounter();
|
||||
encounter.setSubject(new Reference().setReference("Patient/P1"));
|
||||
encounter.setStatus(Encounter.EncounterStatus.INPROGRESS);
|
||||
String encId = myClient.create().resource(encounter).execute().getId().toUnqualifiedVersionless().getValue();
|
||||
ids.add(encId);
|
||||
|
||||
// set the export options
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Sets.newHashSet("Patient", "Observation", "Encounter"));
|
||||
options.setPatientIds(Set.of("Patient/P1"));
|
||||
options.setFilters(new HashSet<>());
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
|
||||
String key = "counter";
|
||||
String value = "value_";
|
||||
options.setUserData(key, value);
|
||||
|
||||
List<String> valueSet = new ArrayList<>();
|
||||
Object interceptor = new Object() {
|
||||
@Hook(Pointcut.STORAGE_BULK_EXPORT_RESOURCE_INCLUSION)
|
||||
public void onExpandResources(IBaseResource theBaseResource, BulkExportJobParameters theParams) {
|
||||
// this will be called once per every resource
|
||||
String value = (String) theParams.getUserData().get(key);
|
||||
valueSet.add(value + theBaseResource.getIdElement().toUnqualifiedVersionless().getValue());
|
||||
}
|
||||
};
|
||||
myInterceptorRegistry.registerInterceptor(interceptor);
|
||||
|
||||
try {
|
||||
verifyBulkExportResults(options, ids, new ArrayList<>());
|
||||
|
||||
assertFalse(valueSet.isEmpty());
|
||||
assertEquals(ids.size(), valueSet.size());
|
||||
for (String id : valueSet) {
|
||||
// should start with our value from the key-value pairs
|
||||
assertTrue(id.startsWith(value));
|
||||
assertTrue(ids.contains(id.substring(value.length())));
|
||||
}
|
||||
} finally {
|
||||
myInterceptorRegistry.unregisterInterceptor(interceptor);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPatientBulkExportWithMultiIds() {
|
||||
myStorageSettings.setIndexMissingFields(JpaStorageSettings.IndexEnabledEnum.ENABLED);
|
||||
|
|
|
@ -6,6 +6,7 @@ import ca.uhn.fhir.batch2.api.IJobPersistence;
|
|||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
|
||||
import ca.uhn.fhir.batch2.model.StatusEnum;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorService;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
|
||||
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
||||
|
@ -107,6 +108,8 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
|||
private IBatch2JobInstanceRepository myJobInstanceRepository;
|
||||
@Autowired
|
||||
private IBatch2WorkChunkRepository myWorkChunkRepository;
|
||||
@Autowired
|
||||
private IInterceptorService myInterceptorService;
|
||||
|
||||
@BeforeEach
|
||||
public void beforeEach() {
|
||||
|
@ -557,8 +560,8 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
|||
|
||||
RequestDetails details = new SystemRequestDetails();
|
||||
List<String> patientIds = new ArrayList<>();
|
||||
for(int i = 0; i < numPatients; i++){
|
||||
String id = "p-"+i;
|
||||
for (int i = 0; i < numPatients; i++) {
|
||||
String id = "p-" + i;
|
||||
Patient patient = new Patient();
|
||||
patient.setId(id);
|
||||
myPatientDao.update(patient, details);
|
||||
|
@ -594,7 +597,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
|||
List<String> binaryUrls = results.getResourceTypeToBinaryIds().get("Patient");
|
||||
|
||||
IParser jsonParser = myFhirContext.newJsonParser();
|
||||
for(String url : binaryUrls){
|
||||
for (String url : binaryUrls) {
|
||||
Binary binary = myClient.read().resource(Binary.class).withUrl(url).execute();
|
||||
assertEquals(Constants.CT_FHIR_NDJSON, binary.getContentType());
|
||||
String resourceContents = new String(binary.getContent(), Constants.CHARSET_UTF8);
|
||||
|
|
|
@ -2,11 +2,11 @@ package ca.uhn.fhir.jpa.dao.r4;
|
|||
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
|
@ -18,20 +18,28 @@ import org.hl7.fhir.r4.model.Bundle;
|
|||
import org.hl7.fhir.r4.model.Condition;
|
||||
import org.hl7.fhir.r4.model.Encounter;
|
||||
import org.hl7.fhir.r4.model.ExplanationOfBenefit;
|
||||
import org.hl7.fhir.r4.model.Extension;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.MessageHeader;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Organization;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.hl7.fhir.r4.model.Task;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.DisplayName;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.platform.commons.annotation.Testable;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
@ -39,6 +47,7 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.fhir.util.HapiExtensions.EXTENSION_AUTO_VERSION_REFERENCES_AT_PATH;
|
||||
import static org.hamcrest.CoreMatchers.is;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
@ -60,11 +69,60 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
|
|||
myStorageSettings.setAutoVersionReferenceAtPaths(new JpaStorageSettings().getAutoVersionReferenceAtPaths());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateAndUpdateVersionedReferencesInTransaction_VersionedReferenceToUpsertWithNop() {
|
||||
myFhirContext.getParserOptions().setStripVersionsFromReferences(false);
|
||||
myStorageSettings.setAutoVersionReferenceAtPaths("ExplanationOfBenefit.patient");
|
||||
@Nested
|
||||
public class AutoVersionReferencesWithSettingAndExtension extends AutoVersionReferencesWithExtension {
|
||||
@BeforeEach
|
||||
public void before() {
|
||||
beforeAutoVersionReferencesWithSetting();
|
||||
}
|
||||
}
|
||||
|
||||
@Nested
|
||||
public class AutoVersionReferencesWithSetting extends AutoVersionReferencesTestCases {
|
||||
@BeforeEach
|
||||
public void before() {
|
||||
beforeAutoVersionReferencesWithSetting();
|
||||
}
|
||||
}
|
||||
|
||||
private void beforeAutoVersionReferencesWithSetting() {
|
||||
myFhirContext.getParserOptions().setStripVersionsFromReferences(false);
|
||||
myStorageSettings.setAutoVersionReferenceAtPaths(
|
||||
"Patient.managingOrganization",
|
||||
"ExplanationOfBenefit.patient",
|
||||
"Observation.subject",
|
||||
"MessageHeader.focus"
|
||||
);
|
||||
}
|
||||
|
||||
@Nested
|
||||
public class AutoVersionReferencesWithExtension extends AutoVersionReferencesTestCases {
|
||||
@BeforeEach
|
||||
public void before() {
|
||||
patientAutoVersionExtension = createAutoVersionReferencesExtensions("managingOrganization");
|
||||
observationAutoVersionExtension = createAutoVersionReferencesExtensions("subject");
|
||||
explanationOfBenefitAutoVersionExtension = createAutoVersionReferencesExtensions("patient");
|
||||
messageHeaderAutoVersionExtension = createAutoVersionReferencesExtensions("focus");
|
||||
}
|
||||
|
||||
@NotNull
|
||||
private List<Extension> createAutoVersionReferencesExtensions(String... thePaths) {
|
||||
return Arrays.stream(thePaths)
|
||||
.map(path -> new Extension(EXTENSION_AUTO_VERSION_REFERENCES_AT_PATH, new StringType(path)))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
}
|
||||
|
||||
@Testable
|
||||
public abstract class AutoVersionReferencesTestCases {
|
||||
|
||||
protected List<Extension> patientAutoVersionExtension = Collections.emptyList();
|
||||
protected List<Extension> observationAutoVersionExtension = Collections.emptyList();
|
||||
protected List<Extension> explanationOfBenefitAutoVersionExtension = Collections.emptyList();
|
||||
protected List<Extension> messageHeaderAutoVersionExtension = Collections.emptyList();
|
||||
|
||||
@Test
|
||||
public void testCreateAndUpdateVersionedReferencesInTransaction_VersionedReferenceToUpsertWithNoOp() {
|
||||
// We'll submit the same bundle twice. It has an UPSERT (with no changes
|
||||
// the second time) on a Patient, and a CREATE on an ExplanationOfBenefit
|
||||
// referencing that Patient.
|
||||
|
@ -77,6 +135,7 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
|
|||
bb.addTransactionUpdateEntry(patient);
|
||||
|
||||
ExplanationOfBenefit eob = new ExplanationOfBenefit();
|
||||
eob.getMeta().setExtension(explanationOfBenefitAutoVersionExtension);
|
||||
eob.setId(IdType.newRandomUuid());
|
||||
eob.setPatient(new Reference("Patient/A"));
|
||||
bb.addTransactionCreateEntry(eob);
|
||||
|
@ -104,13 +163,7 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testCreateAndUpdateVersionedReferencesInTransaction_VersionedReferenceToVersionedReferenceToUpsertWithNop() {
|
||||
myFhirContext.getParserOptions().setStripVersionsFromReferences(false);
|
||||
myStorageSettings.setAutoVersionReferenceAtPaths(
|
||||
"Patient.managingOrganization",
|
||||
"ExplanationOfBenefit.patient"
|
||||
);
|
||||
|
||||
public void testCreateAndUpdateVersionedReferencesInTransaction_VersionedReferenceToVersionedReferenceToUpsertWithNoOp() {
|
||||
// We'll submit the same bundle twice. It has an UPSERT (with no changes
|
||||
// the second time) on a Patient, and a CREATE on an ExplanationOfBenefit
|
||||
// referencing that Patient.
|
||||
|
@ -123,12 +176,14 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
|
|||
bb.addTransactionUpdateEntry(organization);
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.getMeta().setExtension(patientAutoVersionExtension);
|
||||
patient.setId("Patient/A");
|
||||
patient.setManagingOrganization(new Reference("Organization/O"));
|
||||
patient.setActive(true);
|
||||
bb.addTransactionUpdateEntry(patient);
|
||||
|
||||
ExplanationOfBenefit eob = new ExplanationOfBenefit();
|
||||
eob.getMeta().setExtension(explanationOfBenefitAutoVersionExtension);
|
||||
eob.setId(IdType.newRandomUuid());
|
||||
eob.setPatient(new Reference("Patient/A"));
|
||||
bb.addTransactionCreateEntry(eob);
|
||||
|
@ -165,12 +220,6 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
|
|||
|
||||
@Test
|
||||
public void testCreateAndUpdateVersionedReferencesInTransaction_VersionedReferenceToVersionedReferenceToUpsertWithChange() {
|
||||
myFhirContext.getParserOptions().setStripVersionsFromReferences(false);
|
||||
myStorageSettings.setAutoVersionReferenceAtPaths(
|
||||
"Patient.managingOrganization",
|
||||
"ExplanationOfBenefit.patient"
|
||||
);
|
||||
|
||||
AtomicInteger counter = new AtomicInteger();
|
||||
Supplier<Bundle> supplier = () -> {
|
||||
BundleBuilder bb = new BundleBuilder(myFhirContext);
|
||||
|
@ -182,12 +231,14 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
|
|||
bb.addTransactionUpdateEntry(organization);
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.getMeta().setExtension(patientAutoVersionExtension);
|
||||
patient.setId("Patient/A");
|
||||
patient.setManagingOrganization(new Reference("Organization/O"));
|
||||
patient.setActive(true);
|
||||
bb.addTransactionUpdateEntry(patient);
|
||||
|
||||
ExplanationOfBenefit eob = new ExplanationOfBenefit();
|
||||
eob.getMeta().setExtension(explanationOfBenefitAutoVersionExtension);
|
||||
eob.setId(IdType.newRandomUuid());
|
||||
eob.setPatient(new Reference("Patient/A"));
|
||||
bb.addTransactionCreateEntry(eob);
|
||||
|
@ -220,6 +271,325 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
|
|||
assertEquals(patientId, eob2.getPatient().getReference());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertVersionedReferenceAtPath() {
|
||||
Patient p = new Patient();
|
||||
p.setActive(true);
|
||||
IIdType patientId = myPatientDao.create(p).getId().toUnqualified();
|
||||
assertEquals("1", patientId.getVersionIdPart());
|
||||
assertEquals(null, patientId.getBaseUrl());
|
||||
String patientIdString = patientId.getValue();
|
||||
|
||||
// Create - put an unversioned reference in the subject
|
||||
Observation observation = new Observation();
|
||||
observation.getMeta().setExtension(observationAutoVersionExtension);
|
||||
observation.getSubject().setReference(patientId.toVersionless().getValue());
|
||||
IIdType observationId = myObservationDao.create(observation).getId().toUnqualified();
|
||||
|
||||
// Read back and verify that reference is now versioned
|
||||
observation = myObservationDao.read(observationId);
|
||||
assertEquals(patientIdString, observation.getSubject().getReference());
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
|
||||
// Update - put an unversioned reference in the subject
|
||||
observation = new Observation();
|
||||
observation.getMeta().setExtension(observationAutoVersionExtension);
|
||||
observation.setId(observationId);
|
||||
observation.addIdentifier().setSystem("http://foo").setValue("bar");
|
||||
observation.getSubject().setReference(patientId.toVersionless().getValue());
|
||||
myObservationDao.update(observation);
|
||||
|
||||
// Make sure we're not introducing any extra DB operations
|
||||
assertEquals(5, myCaptureQueriesListener.logSelectQueries().size());
|
||||
|
||||
// Read back and verify that reference is now versioned
|
||||
observation = myObservationDao.read(observationId);
|
||||
assertEquals(patientIdString, observation.getSubject().getReference());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testInsertVersionedReferenceAtPath_InTransaction_SourceAndTargetBothCreated() {
|
||||
BundleBuilder builder = new BundleBuilder(myFhirContext);
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.setId(IdType.newRandomUuid());
|
||||
patient.setActive(true);
|
||||
builder.addTransactionCreateEntry(patient);
|
||||
|
||||
Encounter encounter = new Encounter();
|
||||
encounter.setId(IdType.newRandomUuid());
|
||||
encounter.addIdentifier().setSystem("http://baz").setValue("baz");
|
||||
builder.addTransactionCreateEntry(encounter);
|
||||
|
||||
Observation observation = new Observation();
|
||||
observation.getMeta().setExtension(observationAutoVersionExtension);
|
||||
observation.getSubject().setReference(patient.getId()); // versioned
|
||||
observation.getEncounter().setReference(encounter.getId()); // not versioned
|
||||
builder.addTransactionCreateEntry(observation);
|
||||
|
||||
Bundle outcome = mySystemDao.transaction(mySrd, (Bundle) builder.getBundle());
|
||||
ourLog.debug(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
IdType patientId = new IdType(outcome.getEntry().get(0).getResponse().getLocation());
|
||||
IdType encounterId = new IdType(outcome.getEntry().get(1).getResponse().getLocation());
|
||||
IdType observationId = new IdType(outcome.getEntry().get(2).getResponse().getLocation());
|
||||
assertTrue(patientId.hasVersionIdPart());
|
||||
assertTrue(encounterId.hasVersionIdPart());
|
||||
assertTrue(observationId.hasVersionIdPart());
|
||||
|
||||
// Read back and verify that reference is now versioned
|
||||
observation = myObservationDao.read(observationId);
|
||||
assertEquals(patientId.getValue(), observation.getSubject().getReference());
|
||||
assertEquals(encounterId.toVersionless().getValue(), observation.getEncounter().getReference());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testInsertVersionedReferenceAtPath_InTransaction_TargetConditionalCreatedNoOp() {
|
||||
{
|
||||
// Create patient
|
||||
createAndUpdatePatient(IdType.newRandomUuid().getId());
|
||||
|
||||
// Create encounter
|
||||
Encounter encounter = new Encounter();
|
||||
encounter.setId(IdType.newRandomUuid());
|
||||
encounter.addIdentifier().setSystem("http://baz").setValue("baz");
|
||||
myEncounterDao.create(encounter);
|
||||
}
|
||||
|
||||
// Verify Patient Version
|
||||
assertEquals("2", myPatientDao.search(SearchParameterMap.newSynchronous("active", new TokenParam("false")))
|
||||
.getResources(0, 1).get(0).getIdElement().getVersionIdPart());
|
||||
|
||||
BundleBuilder builder = new BundleBuilder(myFhirContext);
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.setId(IdType.newRandomUuid());
|
||||
patient.setActive(true);
|
||||
builder.addTransactionCreateEntry(patient).conditional("Patient?active=false");
|
||||
|
||||
Encounter encounter = new Encounter();
|
||||
encounter.setId(IdType.newRandomUuid());
|
||||
encounter.addIdentifier().setSystem("http://baz").setValue("baz");
|
||||
builder.addTransactionCreateEntry(encounter).conditional("Encounter?identifier=http://baz|baz");
|
||||
|
||||
Observation observation = new Observation();
|
||||
observation.getMeta().setExtension(observationAutoVersionExtension);
|
||||
observation.getSubject().setReference(patient.getId()); // versioned
|
||||
observation.getEncounter().setReference(encounter.getId()); // not versioned
|
||||
builder.addTransactionCreateEntry(observation);
|
||||
|
||||
Bundle outcome = mySystemDao.transaction(mySrd, (Bundle) builder.getBundle());
|
||||
ourLog.debug(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
assertEquals("200 OK", outcome.getEntry().get(0).getResponse().getStatus());
|
||||
assertEquals("200 OK", outcome.getEntry().get(1).getResponse().getStatus());
|
||||
assertEquals("201 Created", outcome.getEntry().get(2).getResponse().getStatus());
|
||||
IdType patientId = new IdType(outcome.getEntry().get(0).getResponse().getLocation());
|
||||
IdType encounterId = new IdType(outcome.getEntry().get(1).getResponse().getLocation());
|
||||
IdType observationId = new IdType(outcome.getEntry().get(2).getResponse().getLocation());
|
||||
assertEquals("2", patientId.getVersionIdPart());
|
||||
assertEquals("1", encounterId.getVersionIdPart());
|
||||
assertEquals("1", observationId.getVersionIdPart());
|
||||
|
||||
// Read back and verify that reference is now versioned
|
||||
observation = myObservationDao.read(observationId);
|
||||
assertEquals(patientId.getValue(), observation.getSubject().getReference());
|
||||
assertEquals("2", observation.getSubject().getReferenceElement().getVersionIdPart());
|
||||
assertEquals(encounterId.toVersionless().getValue(), observation.getEncounter().getReference());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testInsertVersionedReferenceAtPath_InTransaction_TargetUpdate() {
|
||||
myStorageSettings.setDeleteEnabled(false);
|
||||
|
||||
{
|
||||
// Create patient
|
||||
Patient patient = new Patient();
|
||||
patient.setId("PATIENT");
|
||||
patient.setActive(true);
|
||||
myPatientDao.update(patient).getId();
|
||||
|
||||
// Update patient to make a second version
|
||||
patient.setActive(false);
|
||||
myPatientDao.update(patient);
|
||||
}
|
||||
|
||||
BundleBuilder builder = new BundleBuilder(myFhirContext);
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.setId("Patient/PATIENT");
|
||||
patient.setActive(true);
|
||||
builder.addTransactionUpdateEntry(patient);
|
||||
|
||||
Observation observation = new Observation();
|
||||
observation.getMeta().setExtension(observationAutoVersionExtension);
|
||||
observation.getSubject().setReference(patient.getId()); // versioned
|
||||
builder.addTransactionCreateEntry(observation);
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
Bundle outcome = mySystemDao.transaction(mySrd, (Bundle) builder.getBundle());
|
||||
ourLog.debug(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
assertEquals("200 OK", outcome.getEntry().get(0).getResponse().getStatus());
|
||||
assertEquals("201 Created", outcome.getEntry().get(1).getResponse().getStatus());
|
||||
IdType patientId = new IdType(outcome.getEntry().get(0).getResponse().getLocation());
|
||||
IdType observationId = new IdType(outcome.getEntry().get(1).getResponse().getLocation());
|
||||
assertEquals("3", patientId.getVersionIdPart());
|
||||
assertEquals("1", observationId.getVersionIdPart());
|
||||
|
||||
// Make sure we're not introducing any extra DB operations
|
||||
assertEquals(3, myCaptureQueriesListener.logSelectQueries().size());
|
||||
|
||||
// Read back and verify that reference is now versioned
|
||||
observation = myObservationDao.read(observationId);
|
||||
assertEquals(patientId.getValue(), observation.getSubject().getReference());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertVersionedReferenceAtPath_InTransaction_TargetUpdateConditional() {
|
||||
createAndUpdatePatient(IdType.newRandomUuid().getId());
|
||||
|
||||
BundleBuilder builder = new BundleBuilder(myFhirContext);
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.setId(IdType.newRandomUuid());
|
||||
patient.setDeceased(new BooleanType(true));
|
||||
patient.setActive(false);
|
||||
builder
|
||||
.addTransactionUpdateEntry(patient)
|
||||
.conditional("Patient?active=false");
|
||||
|
||||
Observation observation = new Observation();
|
||||
observation.getMeta().setExtension(observationAutoVersionExtension);
|
||||
observation.getSubject().setReference(patient.getId()); // versioned
|
||||
builder.addTransactionCreateEntry(observation);
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
|
||||
Bundle outcome = mySystemDao.transaction(mySrd, (Bundle) builder.getBundle());
|
||||
ourLog.debug(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
assertEquals("200 OK", outcome.getEntry().get(0).getResponse().getStatus());
|
||||
assertEquals("201 Created", outcome.getEntry().get(1).getResponse().getStatus());
|
||||
IdType patientId = new IdType(outcome.getEntry().get(0).getResponse().getLocation());
|
||||
IdType observationId = new IdType(outcome.getEntry().get(1).getResponse().getLocation());
|
||||
assertEquals("3", patientId.getVersionIdPart());
|
||||
assertEquals("1", observationId.getVersionIdPart());
|
||||
|
||||
// Make sure we're not introducing any extra DB operations
|
||||
assertEquals(4, myCaptureQueriesListener.logSelectQueries().size());
|
||||
|
||||
// Read back and verify that reference is now versioned
|
||||
observation = myObservationDao.read(observationId);
|
||||
assertEquals(patientId.getValue(), observation.getSubject().getReference());
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Bundle transaction with AutoVersionReferenceAtPath on and with existing Patient resource should create")
|
||||
public void bundleTransaction_autoVersionReferenceAtPathWithPreexistingPatientReference_shouldCreate() {
|
||||
String patientId = "Patient/RED";
|
||||
IIdType idType = new IdDt(patientId);
|
||||
|
||||
// create patient ahead of time
|
||||
Patient patient = new Patient();
|
||||
patient.setId(patientId);
|
||||
DaoMethodOutcome outcome = myPatientDao.update(patient);
|
||||
assertThat(outcome.getResource().getIdElement().getValue(), is(equalTo(patientId + "/_history/1")));
|
||||
|
||||
Patient returned = myPatientDao.read(idType);
|
||||
Assertions.assertNotNull(returned);
|
||||
assertThat(returned.getId(), is(equalTo(patientId + "/_history/1")));
|
||||
|
||||
// update to change version
|
||||
patient.setActive(true);
|
||||
myPatientDao.update(patient);
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.getMeta().setExtension(observationAutoVersionExtension);
|
||||
obs.setId("Observation/DEF");
|
||||
Reference patientRef = new Reference(patientId);
|
||||
obs.setSubject(patientRef);
|
||||
BundleBuilder builder = new BundleBuilder(myFhirContext);
|
||||
builder.addTransactionUpdateEntry(obs);
|
||||
|
||||
Bundle submitted = (Bundle) builder.getBundle();
|
||||
|
||||
Bundle returnedTr = mySystemDao.transaction(new SystemRequestDetails(), submitted);
|
||||
|
||||
Assertions.assertNotNull(returnedTr);
|
||||
|
||||
// some verification
|
||||
Observation obRet = myObservationDao.read(obs.getIdElement());
|
||||
Assertions.assertNotNull(obRet);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("GH-2901 Test no NPE is thrown on autoversioned references")
|
||||
public void testNoNpeMinimal() {
|
||||
myStorageSettings.setAutoCreatePlaceholderReferenceTargets(true);
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.getMeta().setExtension(observationAutoVersionExtension);
|
||||
obs.setId("Observation/DEF");
|
||||
Reference patientRef = new Reference("Patient/RED");
|
||||
obs.setSubject(patientRef);
|
||||
BundleBuilder builder = new BundleBuilder(myFhirContext);
|
||||
builder.addTransactionUpdateEntry(obs);
|
||||
|
||||
Bundle submitted = (Bundle) builder.getBundle();
|
||||
|
||||
Bundle returnedTr = mySystemDao.transaction(new SystemRequestDetails(), submitted);
|
||||
|
||||
Assertions.assertNotNull(returnedTr);
|
||||
|
||||
// some verification
|
||||
Observation obRet = myObservationDao.read(obs.getIdElement());
|
||||
Assertions.assertNotNull(obRet);
|
||||
Patient returned = myPatientDao.read(patientRef.getReferenceElement());
|
||||
Assertions.assertNotNull(returned);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertVersionedReferencesByPath_resourceReferenceNotInTransaction_addsVersionToTheReferences() {
|
||||
Patient patient = createAndUpdatePatient(IdType.newRandomUuid().getId());
|
||||
|
||||
// create MessageHeader
|
||||
MessageHeader messageHeader = new MessageHeader();
|
||||
messageHeader.getMeta().setExtension(messageHeaderAutoVersionExtension);
|
||||
// add reference
|
||||
messageHeader.addFocus().setReference(patient.getIdElement().toVersionless().getValue());
|
||||
|
||||
BundleBuilder builder = new BundleBuilder(myFhirContext);
|
||||
builder.addTransactionCreateEntry(messageHeader);
|
||||
|
||||
ourLog.info(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(builder.getBundle()));
|
||||
Bundle outcome = mySystemDao.transaction(mySrd, (Bundle) builder.getBundle());
|
||||
ourLog.info(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
assertEquals("201 Created", outcome.getEntry().get(0).getResponse().getStatus());
|
||||
|
||||
IdType messageHeaderId = new IdType(outcome.getEntry().get(0).getResponse().getLocation());
|
||||
assertEquals("2", patient.getIdElement().getVersionIdPart());
|
||||
assertEquals("1", messageHeaderId.getVersionIdPart());
|
||||
|
||||
// read back and verify that reference is versioned
|
||||
messageHeader = myMessageHeaderDao.read(messageHeaderId);
|
||||
assertEquals(patient.getIdElement().getValue(), messageHeader.getFocus().get(0).getReference());
|
||||
}
|
||||
|
||||
private Patient createAndUpdatePatient(String thePatientId) {
|
||||
Patient patient = new Patient();
|
||||
patient.setId(thePatientId);
|
||||
patient.setActive(true);
|
||||
myPatientDao.create(patient).getId();
|
||||
|
||||
// update patient to make a second version
|
||||
patient.setActive(false);
|
||||
myPatientDao.update(patient);
|
||||
return patient;
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStoreAndRetrieveVersionedReference() {
|
||||
myFhirContext.getParserOptions().setStripVersionsFromReferences(false);
|
||||
|
@ -264,243 +634,6 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
|
|||
assertEquals(patientId.withVersion("1").getValue(), observation.getSubject().getReference());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertVersionedReferenceAtPath() {
|
||||
myFhirContext.getParserOptions().setStripVersionsFromReferences(false);
|
||||
myStorageSettings.setAutoVersionReferenceAtPaths("Observation.subject");
|
||||
|
||||
Patient p = new Patient();
|
||||
p.setActive(true);
|
||||
IIdType patientId = myPatientDao.create(p).getId().toUnqualified();
|
||||
assertEquals("1", patientId.getVersionIdPart());
|
||||
assertEquals(null, patientId.getBaseUrl());
|
||||
String patientIdString = patientId.getValue();
|
||||
|
||||
// Create - put an unversioned reference in the subject
|
||||
Observation observation = new Observation();
|
||||
observation.getSubject().setReference(patientId.toVersionless().getValue());
|
||||
IIdType observationId = myObservationDao.create(observation).getId().toUnqualified();
|
||||
|
||||
// Read back and verify that reference is now versioned
|
||||
observation = myObservationDao.read(observationId);
|
||||
assertEquals(patientIdString, observation.getSubject().getReference());
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
|
||||
// Update - put an unversioned reference in the subject
|
||||
observation = new Observation();
|
||||
observation.setId(observationId);
|
||||
observation.addIdentifier().setSystem("http://foo").setValue("bar");
|
||||
observation.getSubject().setReference(patientId.toVersionless().getValue());
|
||||
myObservationDao.update(observation);
|
||||
|
||||
// Make sure we're not introducing any extra DB operations
|
||||
assertEquals(5, myCaptureQueriesListener.logSelectQueries().size());
|
||||
|
||||
// Read back and verify that reference is now versioned
|
||||
observation = myObservationDao.read(observationId);
|
||||
assertEquals(patientIdString, observation.getSubject().getReference());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertVersionedReferenceAtPath_InTransaction_SourceAndTargetBothCreated() {
|
||||
myFhirContext.getParserOptions().setStripVersionsFromReferences(false);
|
||||
myStorageSettings.setAutoVersionReferenceAtPaths("Observation.subject");
|
||||
|
||||
BundleBuilder builder = new BundleBuilder(myFhirContext);
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.setId(IdType.newRandomUuid());
|
||||
patient.setActive(true);
|
||||
builder.addTransactionCreateEntry(patient);
|
||||
|
||||
Encounter encounter = new Encounter();
|
||||
encounter.setId(IdType.newRandomUuid());
|
||||
encounter.addIdentifier().setSystem("http://baz").setValue("baz");
|
||||
builder.addTransactionCreateEntry(encounter);
|
||||
|
||||
Observation observation = new Observation();
|
||||
observation.getSubject().setReference(patient.getId()); // versioned
|
||||
observation.getEncounter().setReference(encounter.getId()); // not versioned
|
||||
builder.addTransactionCreateEntry(observation);
|
||||
|
||||
Bundle outcome = mySystemDao.transaction(mySrd, (Bundle) builder.getBundle());
|
||||
ourLog.debug(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
IdType patientId = new IdType(outcome.getEntry().get(0).getResponse().getLocation());
|
||||
IdType encounterId = new IdType(outcome.getEntry().get(1).getResponse().getLocation());
|
||||
IdType observationId = new IdType(outcome.getEntry().get(2).getResponse().getLocation());
|
||||
assertTrue(patientId.hasVersionIdPart());
|
||||
assertTrue(encounterId.hasVersionIdPart());
|
||||
assertTrue(observationId.hasVersionIdPart());
|
||||
|
||||
// Read back and verify that reference is now versioned
|
||||
observation = myObservationDao.read(observationId);
|
||||
assertEquals(patientId.getValue(), observation.getSubject().getReference());
|
||||
assertEquals(encounterId.toVersionless().getValue(), observation.getEncounter().getReference());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertVersionedReferenceAtPath_InTransaction_TargetConditionalCreatedNop() {
|
||||
myFhirContext.getParserOptions().setStripVersionsFromReferences(false);
|
||||
myStorageSettings.setAutoVersionReferenceAtPaths("Observation.subject");
|
||||
|
||||
{
|
||||
// Create patient
|
||||
Patient patient = new Patient();
|
||||
patient.setId(IdType.newRandomUuid());
|
||||
patient.setActive(true);
|
||||
myPatientDao.create(patient).getId();
|
||||
|
||||
// Update patient to make a second version
|
||||
patient.setActive(false);
|
||||
myPatientDao.update(patient);
|
||||
|
||||
// Create encounter
|
||||
Encounter encounter = new Encounter();
|
||||
encounter.setId(IdType.newRandomUuid());
|
||||
encounter.addIdentifier().setSystem("http://baz").setValue("baz");
|
||||
myEncounterDao.create(encounter);
|
||||
}
|
||||
|
||||
// Verify Patient Version
|
||||
assertEquals("2", myPatientDao.search(SearchParameterMap.newSynchronous("active", new TokenParam("false"))).getResources(0, 1).get(0).getIdElement().getVersionIdPart());
|
||||
|
||||
BundleBuilder builder = new BundleBuilder(myFhirContext);
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.setId(IdType.newRandomUuid());
|
||||
patient.setActive(true);
|
||||
builder.addTransactionCreateEntry(patient).conditional("Patient?active=false");
|
||||
|
||||
Encounter encounter = new Encounter();
|
||||
encounter.setId(IdType.newRandomUuid());
|
||||
encounter.addIdentifier().setSystem("http://baz").setValue("baz");
|
||||
builder.addTransactionCreateEntry(encounter).conditional("Encounter?identifier=http://baz|baz");
|
||||
|
||||
Observation observation = new Observation();
|
||||
observation.getSubject().setReference(patient.getId()); // versioned
|
||||
observation.getEncounter().setReference(encounter.getId()); // not versioned
|
||||
builder.addTransactionCreateEntry(observation);
|
||||
|
||||
Bundle outcome = mySystemDao.transaction(mySrd, (Bundle) builder.getBundle());
|
||||
ourLog.debug(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
assertEquals("200 OK", outcome.getEntry().get(0).getResponse().getStatus());
|
||||
assertEquals("200 OK", outcome.getEntry().get(1).getResponse().getStatus());
|
||||
assertEquals("201 Created", outcome.getEntry().get(2).getResponse().getStatus());
|
||||
IdType patientId = new IdType(outcome.getEntry().get(0).getResponse().getLocation());
|
||||
IdType encounterId = new IdType(outcome.getEntry().get(1).getResponse().getLocation());
|
||||
IdType observationId = new IdType(outcome.getEntry().get(2).getResponse().getLocation());
|
||||
assertEquals("2", patientId.getVersionIdPart());
|
||||
assertEquals("1", encounterId.getVersionIdPart());
|
||||
assertEquals("1", observationId.getVersionIdPart());
|
||||
|
||||
// Read back and verify that reference is now versioned
|
||||
observation = myObservationDao.read(observationId);
|
||||
assertEquals(patientId.getValue(), observation.getSubject().getReference());
|
||||
assertEquals("2", observation.getSubject().getReferenceElement().getVersionIdPart());
|
||||
assertEquals(encounterId.toVersionless().getValue(), observation.getEncounter().getReference());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testInsertVersionedReferenceAtPath_InTransaction_TargetUpdate() {
|
||||
myFhirContext.getParserOptions().setStripVersionsFromReferences(false);
|
||||
myStorageSettings.setDeleteEnabled(false);
|
||||
myStorageSettings.setAutoVersionReferenceAtPaths("Observation.subject");
|
||||
|
||||
{
|
||||
// Create patient
|
||||
Patient patient = new Patient();
|
||||
patient.setId("PATIENT");
|
||||
patient.setActive(true);
|
||||
myPatientDao.update(patient).getId();
|
||||
|
||||
// Update patient to make a second version
|
||||
patient.setActive(false);
|
||||
myPatientDao.update(patient);
|
||||
}
|
||||
|
||||
BundleBuilder builder = new BundleBuilder(myFhirContext);
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.setId("Patient/PATIENT");
|
||||
patient.setActive(true);
|
||||
builder.addTransactionUpdateEntry(patient);
|
||||
|
||||
Observation observation = new Observation();
|
||||
observation.getSubject().setReference(patient.getId()); // versioned
|
||||
builder.addTransactionCreateEntry(observation);
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
Bundle outcome = mySystemDao.transaction(mySrd, (Bundle) builder.getBundle());
|
||||
ourLog.debug(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
assertEquals("200 OK", outcome.getEntry().get(0).getResponse().getStatus());
|
||||
assertEquals("201 Created", outcome.getEntry().get(1).getResponse().getStatus());
|
||||
IdType patientId = new IdType(outcome.getEntry().get(0).getResponse().getLocation());
|
||||
IdType observationId = new IdType(outcome.getEntry().get(1).getResponse().getLocation());
|
||||
assertEquals("3", patientId.getVersionIdPart());
|
||||
assertEquals("1", observationId.getVersionIdPart());
|
||||
|
||||
// Make sure we're not introducing any extra DB operations
|
||||
assertEquals(3, myCaptureQueriesListener.logSelectQueries().size());
|
||||
|
||||
// Read back and verify that reference is now versioned
|
||||
observation = myObservationDao.read(observationId);
|
||||
assertEquals(patientId.getValue(), observation.getSubject().getReference());
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testInsertVersionedReferenceAtPath_InTransaction_TargetUpdateConditional() {
|
||||
myFhirContext.getParserOptions().setStripVersionsFromReferences(false);
|
||||
myStorageSettings.setAutoVersionReferenceAtPaths("Observation.subject");
|
||||
|
||||
{
|
||||
// Create patient
|
||||
Patient patient = new Patient();
|
||||
patient.setId(IdType.newRandomUuid());
|
||||
patient.setActive(true);
|
||||
myPatientDao.create(patient).getId();
|
||||
|
||||
// Update patient to make a second version
|
||||
patient.setActive(false);
|
||||
myPatientDao.update(patient);
|
||||
}
|
||||
|
||||
BundleBuilder builder = new BundleBuilder(myFhirContext);
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.setId(IdType.newRandomUuid());
|
||||
patient.setDeceased(new BooleanType(true));
|
||||
patient.setActive(false);
|
||||
builder
|
||||
.addTransactionUpdateEntry(patient)
|
||||
.conditional("Patient?active=false");
|
||||
|
||||
Observation observation = new Observation();
|
||||
observation.getSubject().setReference(patient.getId()); // versioned
|
||||
builder.addTransactionCreateEntry(observation);
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
|
||||
Bundle outcome = mySystemDao.transaction(mySrd, (Bundle) builder.getBundle());
|
||||
ourLog.debug(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
assertEquals("200 OK", outcome.getEntry().get(0).getResponse().getStatus());
|
||||
assertEquals("201 Created", outcome.getEntry().get(1).getResponse().getStatus());
|
||||
IdType patientId = new IdType(outcome.getEntry().get(0).getResponse().getLocation());
|
||||
IdType observationId = new IdType(outcome.getEntry().get(1).getResponse().getLocation());
|
||||
assertEquals("3", patientId.getVersionIdPart());
|
||||
assertEquals("1", observationId.getVersionIdPart());
|
||||
|
||||
// Make sure we're not introducing any extra DB operations
|
||||
assertEquals(4, myCaptureQueriesListener.logSelectQueries().size());
|
||||
|
||||
// Read back and verify that reference is now versioned
|
||||
observation = myObservationDao.read(observationId);
|
||||
assertEquals(patientId.getValue(), observation.getSubject().getReference());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchAndIncludeVersionedReference_Asynchronous() {
|
||||
myFhirContext.getParserOptions().setStripVersionsFromReferences(false);
|
||||
|
@ -866,70 +999,4 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
|
|||
Observation obs = myObservationDao.read(idType);
|
||||
Assertions.assertNotNull(obs);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Bundle transaction with AutoVersionReferenceAtPath on and with existing Patient resource should create")
|
||||
public void bundleTransaction_autoreferenceAtPathWithPreexistingPatientReference_shouldCreate() {
|
||||
myStorageSettings.setAutoVersionReferenceAtPaths("Observation.subject");
|
||||
|
||||
String patientId = "Patient/RED";
|
||||
IIdType idType = new IdDt(patientId);
|
||||
|
||||
// create patient ahead of time
|
||||
Patient patient = new Patient();
|
||||
patient.setId(patientId);
|
||||
DaoMethodOutcome outcome = myPatientDao.update(patient);
|
||||
assertThat(outcome.getResource().getIdElement().getValue(), is(equalTo(patientId + "/_history/1")));
|
||||
|
||||
Patient returned = myPatientDao.read(idType);
|
||||
Assertions.assertNotNull(returned);
|
||||
assertThat(returned.getId(), is(equalTo(patientId + "/_history/1")));
|
||||
|
||||
// update to change version
|
||||
patient.setActive(true);
|
||||
myPatientDao.update(patient);
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.setId("Observation/DEF");
|
||||
Reference patientRef = new Reference(patientId);
|
||||
obs.setSubject(patientRef);
|
||||
BundleBuilder builder = new BundleBuilder(myFhirContext);
|
||||
builder.addTransactionUpdateEntry(obs);
|
||||
|
||||
Bundle submitted = (Bundle)builder.getBundle();
|
||||
|
||||
Bundle returnedTr = mySystemDao.transaction(new SystemRequestDetails(), submitted);
|
||||
|
||||
Assertions.assertNotNull(returnedTr);
|
||||
|
||||
// some verification
|
||||
Observation obRet = myObservationDao.read(obs.getIdElement());
|
||||
Assertions.assertNotNull(obRet);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("GH-2901 Test no NPE is thrown on autoversioned references")
|
||||
public void testNoNpeMinimal() {
|
||||
myStorageSettings.setAutoCreatePlaceholderReferenceTargets(true);
|
||||
myStorageSettings.setAutoVersionReferenceAtPaths("Observation.subject");
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.setId("Observation/DEF");
|
||||
Reference patientRef = new Reference("Patient/RED");
|
||||
obs.setSubject(patientRef);
|
||||
BundleBuilder builder = new BundleBuilder(myFhirContext);
|
||||
builder.addTransactionUpdateEntry(obs);
|
||||
|
||||
Bundle submitted = (Bundle)builder.getBundle();
|
||||
|
||||
Bundle returnedTr = mySystemDao.transaction(new SystemRequestDetails(), submitted);
|
||||
|
||||
Assertions.assertNotNull(returnedTr);
|
||||
|
||||
// some verification
|
||||
Observation obRet = myObservationDao.read(obs.getIdElement());
|
||||
Assertions.assertNotNull(obRet);
|
||||
Patient returned = myPatientDao.read(patientRef.getReferenceElement());
|
||||
Assertions.assertNotNull(returned);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,7 +21,11 @@ import org.hl7.fhir.r4.model.Subscription;
|
|||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.ArgumentMatchers;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Mockito;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.slf4j.event.Level;
|
||||
|
@ -50,6 +54,7 @@ import static org.mockito.Mockito.mock;
|
|||
/**
|
||||
* Test the rest-hook subscriptions
|
||||
*/
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
public class RestHookWithInterceptorR4Test extends BaseSubscriptionsR4Test {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(RestHookWithInterceptorR4Test.class);
|
||||
|
@ -61,6 +66,9 @@ public class RestHookWithInterceptorR4Test extends BaseSubscriptionsR4Test {
|
|||
private static boolean ourNextAddHeader;
|
||||
private static final FhirContext ourCtx = FhirContext.forR4Cached();
|
||||
|
||||
@Mock(strictness = Mock.Strictness.STRICT_STUBS)
|
||||
Logger loggerMock;
|
||||
|
||||
@Autowired
|
||||
StoppableSubscriptionDeliveringRestHookSubscriber myStoppableSubscriptionDeliveringRestHookSubscriber;
|
||||
@Autowired
|
||||
|
@ -223,14 +231,13 @@ public class RestHookWithInterceptorR4Test extends BaseSubscriptionsR4Test {
|
|||
@Test
|
||||
public void testDebugLoggingInterceptor() throws Exception {
|
||||
List<String> messages = new ArrayList<>();
|
||||
Logger loggerMock = mock(Logger.class);
|
||||
doAnswer(t -> {
|
||||
Object msg = t.getArguments()[0];
|
||||
Object[] args = Arrays.copyOfRange(t.getArguments(), 1, t.getArguments().length);
|
||||
String formattedMessage = MessageFormatter.arrayFormat((String) msg, args).getMessage();
|
||||
messages.add(formattedMessage);
|
||||
return null;
|
||||
}).when(loggerMock).debug(any(), ArgumentMatchers.<Object[]>any());
|
||||
}).when(loggerMock).debug(any(), any(Object[].class));
|
||||
|
||||
SubscriptionDebugLogInterceptor interceptor = new SubscriptionDebugLogInterceptor();
|
||||
myInterceptorRegistry.registerInterceptor(interceptor);
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -85,6 +85,10 @@
|
|||
<groupId>org.apache.derby</groupId>
|
||||
<artifactId>derby</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.derby</groupId>
|
||||
<artifactId>derbytools</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-test</artifactId>
|
||||
|
|
|
@ -157,6 +157,7 @@ import org.hl7.fhir.r4.model.Media;
|
|||
import org.hl7.fhir.r4.model.Medication;
|
||||
import org.hl7.fhir.r4.model.MedicationAdministration;
|
||||
import org.hl7.fhir.r4.model.MedicationRequest;
|
||||
import org.hl7.fhir.r4.model.MessageHeader;
|
||||
import org.hl7.fhir.r4.model.Meta;
|
||||
import org.hl7.fhir.r4.model.MolecularSequence;
|
||||
import org.hl7.fhir.r4.model.NamingSystem;
|
||||
|
@ -433,6 +434,9 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
|
|||
@Qualifier("myExplanationOfBenefitDaoR4")
|
||||
protected IFhirResourceDao<ExplanationOfBenefit> myExplanationOfBenefitDao;
|
||||
@Autowired
|
||||
@Qualifier("myMessageHeaderDaoR4")
|
||||
protected IFhirResourceDao<MessageHeader> myMessageHeaderDao;
|
||||
@Autowired
|
||||
protected IResourceTableDao myResourceTableDao;
|
||||
@Autowired
|
||||
protected IResourceHistoryTableDao myResourceHistoryTableDao;
|
||||
|
|
|
@ -52,12 +52,8 @@ import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
|||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import org.apache.commons.collections4.ListUtils;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.hibernate.search.engine.search.predicate.dsl.PredicateFinalStep;
|
||||
import org.hibernate.search.engine.search.predicate.dsl.SearchPredicateFactory;
|
||||
import org.hibernate.search.engine.search.query.SearchQuery;
|
||||
import org.hibernate.search.mapper.orm.Search;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.hibernate.search.mapper.orm.common.EntityReference;
|
||||
import org.hibernate.search.mapper.orm.session.SearchSession;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
|
@ -72,7 +68,6 @@ import org.slf4j.LoggerFactory;
|
|||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.test.util.AopTestUtils;
|
||||
import org.springframework.test.util.ReflectionTestUtils;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
|
@ -80,7 +75,6 @@ import jakarta.persistence.EntityManager;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
|
@ -1944,7 +1938,7 @@ public abstract class BaseValueSetHSearchExpansionR4Test extends BaseJpaTest {
|
|||
|
||||
@Test
|
||||
public void testShouldNotFindAny() {
|
||||
List<EntityReference> hits = search(allCodesNotIncludingSearched);
|
||||
List<String> hits = search(allCodesNotIncludingSearched);
|
||||
assertNotNull(hits);
|
||||
assertTrue(hits.isEmpty());
|
||||
}
|
||||
|
@ -1952,12 +1946,12 @@ public abstract class BaseValueSetHSearchExpansionR4Test extends BaseJpaTest {
|
|||
|
||||
@Test
|
||||
public void testHitsInFirstSublist() {
|
||||
int insertIndex = BooleanQuery.getMaxClauseCount() / 2;
|
||||
int insertIndex = IndexSearcher.getMaxClauseCount() / 2;
|
||||
|
||||
// insert existing codes into list of codes searched
|
||||
allCodesNotIncludingSearched.addAll(insertIndex, existingCodes);
|
||||
|
||||
List<EntityReference> hits = search(allCodesNotIncludingSearched);
|
||||
List<String> hits = search(allCodesNotIncludingSearched);
|
||||
assertEquals(existingCodes.size(), hits.size());
|
||||
}
|
||||
|
||||
|
@ -1967,7 +1961,7 @@ public abstract class BaseValueSetHSearchExpansionR4Test extends BaseJpaTest {
|
|||
// insert existing codes into list of codes searched
|
||||
allCodesNotIncludingSearched.addAll(allCodesNotIncludingSearched.size(), existingCodes);
|
||||
|
||||
List<EntityReference> hits = search(allCodesNotIncludingSearched);
|
||||
List<String> hits = search(allCodesNotIncludingSearched);
|
||||
|
||||
assertEquals(existingCodes.size(), hits.size());
|
||||
}
|
||||
|
@ -1986,42 +1980,20 @@ public abstract class BaseValueSetHSearchExpansionR4Test extends BaseJpaTest {
|
|||
// insert last partition of existing codes into last sublist of searched codes
|
||||
allCodesNotIncludingSearched.addAll(allCodesNotIncludingSearched.size(), partitionedExistingCodes.get(1));
|
||||
|
||||
List<EntityReference> hits = search(allCodesNotIncludingSearched);
|
||||
List<String> hits = search(allCodesNotIncludingSearched);
|
||||
assertEquals(existingCodes.size(), hits.size());
|
||||
}
|
||||
|
||||
private List<EntityReference> search(List<String> theSearchedCodes) {
|
||||
return runInTransaction(() -> {
|
||||
TermCodeSystemVersion termCsVersion = myTermCodeSystemVersionDao.findCurrentVersionForCodeSystemResourcePid(termCsId);
|
||||
Long termCsvPid = termCsVersion.getPid();
|
||||
|
||||
SearchSession searchSession = Search.session(myEntityManager);
|
||||
SearchPredicateFactory predicate = searchSession.scope(TermConcept.class).predicate();
|
||||
|
||||
Optional<PredicateFinalStep> lastStepOpt = ReflectionTestUtils.invokeMethod(
|
||||
new TermReadSvcImpl(), "buildExpansionPredicate", theSearchedCodes, predicate);
|
||||
|
||||
assertNotNull(lastStepOpt);
|
||||
assertTrue(lastStepOpt.isPresent());
|
||||
|
||||
PredicateFinalStep step = predicate.bool(b -> {
|
||||
b.must(predicate.match().field("myCodeSystemVersionPid").matching(termCsvPid));
|
||||
b.must(lastStepOpt.get());
|
||||
});
|
||||
|
||||
int maxResultsPerBatch = 800;
|
||||
|
||||
SearchQuery<EntityReference> termConceptsQuery = searchSession
|
||||
.search(TermConcept.class)
|
||||
.selectEntityReference()
|
||||
.where(f -> step)
|
||||
.toQuery();
|
||||
|
||||
ourLog.trace("About to query: {}", termConceptsQuery.queryString());
|
||||
|
||||
return termConceptsQuery.fetchHits(0, maxResultsPerBatch);
|
||||
});
|
||||
|
||||
private List<String> search(List<String> theSearchedCodes) {
|
||||
// Include
|
||||
ValueSet vs = new ValueSet();
|
||||
ValueSet.ConceptSetComponent include = vs.getCompose().addInclude();
|
||||
include.setSystem(LOINC_URI);
|
||||
for (var next : theSearchedCodes) {
|
||||
include.addConcept().setCode(next);
|
||||
}
|
||||
ValueSet outcome = myTermSvc.expandValueSet(null, vs);
|
||||
return toCodesContains(outcome.getExpansion().getContains());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.dao.index;
|
||||
package ca.uhn.fhir.jpa.cache;
|
||||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
|
@ -6,6 +6,7 @@ import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
|||
import ca.uhn.fhir.jpa.cache.ResourcePersistentIdMap;
|
||||
import ca.uhn.fhir.jpa.cache.ResourceVersionSvcDaoImpl;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
||||
|
@ -14,6 +15,7 @@ import ca.uhn.fhir.model.primitive.IdDt;
|
|||
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.InjectMocks;
|
||||
|
@ -52,6 +54,11 @@ public class ResourceVersionSvcTest {
|
|||
@InjectMocks
|
||||
private ResourceVersionSvcDaoImpl myResourceVersionSvc;
|
||||
|
||||
@BeforeEach
|
||||
public void before() {
|
||||
myResourceVersionSvc.myIdHelperService = myIdHelperService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a ResourceTable record for getResourceVersionsForPid
|
||||
* Order matters!
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -29,7 +29,7 @@ class CdsHooksContextBooterTest {
|
|||
void validateJsonThrowsExceptionWhenInputIsInvalid() {
|
||||
// setup
|
||||
final String expected = "HAPI-2378: Invalid JSON: Unrecognized token 'abc': was expecting (JSON String, Number, Array, Object or token 'null', 'true' or 'false')\n" +
|
||||
" at [Source: (String)\"abc\"; line: 1, column: 4]";
|
||||
" at [Source: REDACTED (`StreamReadFeature.INCLUDE_SOURCE_IN_LOCATION` disabled); line: 1, column: 4]";
|
||||
// execute
|
||||
final UnprocessableEntityException actual = assertThrows(UnprocessableEntityException.class, () -> myFixture.validateJson("abc"));
|
||||
// validate
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
*/
|
||||
package ca.uhn.fhir.mdm.api;
|
||||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
@ -27,15 +28,18 @@ import org.hl7.fhir.instance.model.api.IIdType;
|
|||
import java.util.Set;
|
||||
|
||||
public interface IMdmLinkExpandSvc {
|
||||
Set<String> expandMdmBySourceResource(IBaseResource theResource);
|
||||
Set<String> expandMdmBySourceResource(RequestPartitionId theRequestPartitionId, IBaseResource theResource);
|
||||
|
||||
Set<String> expandMdmBySourceResourceId(IIdType theId);
|
||||
Set<String> expandMdmBySourceResourceId(RequestPartitionId theRequestPartitionId, IIdType theId);
|
||||
|
||||
Set<String> expandMdmBySourceResourcePid(IResourcePersistentId theSourceResourcePid);
|
||||
Set<String> expandMdmBySourceResourcePid(
|
||||
RequestPartitionId theRequestPartitionId, IResourcePersistentId<?> theSourceResourcePid);
|
||||
|
||||
Set<String> expandMdmByGoldenResourceId(IResourcePersistentId theGoldenResourcePid);
|
||||
Set<String> expandMdmByGoldenResourceId(
|
||||
RequestPartitionId theRequestPartitionId, IResourcePersistentId<?> theGoldenResourcePid);
|
||||
|
||||
Set<String> expandMdmByGoldenResourcePid(IResourcePersistentId theGoldenResourcePid);
|
||||
Set<String> expandMdmByGoldenResourcePid(
|
||||
RequestPartitionId theRequestPartitionId, IResourcePersistentId<?> theGoldenResourcePid);
|
||||
|
||||
Set<String> expandMdmByGoldenResourceId(IdDt theId);
|
||||
Set<String> expandMdmByGoldenResourceId(RequestPartitionId theRequestPartitionId, IdDt theId);
|
||||
}
|
||||
|
|
|
@ -22,12 +22,16 @@ package ca.uhn.fhir.mdm.interceptor;
|
|||
import ca.uhn.fhir.interceptor.api.Hook;
|
||||
import ca.uhn.fhir.interceptor.api.Interceptor;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.mdm.api.IMdmLinkExpandSvc;
|
||||
import ca.uhn.fhir.mdm.log.Logs;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
@ -52,6 +56,9 @@ public class MdmSearchExpandingInterceptor {
|
|||
|
||||
private static final Logger ourLog = Logs.getMdmTroubleshootingLog();
|
||||
|
||||
@Autowired
|
||||
private IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
|
||||
|
||||
@Autowired
|
||||
private IMdmLinkExpandSvc myMdmLinkExpandSvc;
|
||||
|
||||
|
@ -59,15 +66,20 @@ public class MdmSearchExpandingInterceptor {
|
|||
private JpaStorageSettings myStorageSettings;
|
||||
|
||||
@Hook(Pointcut.STORAGE_PRESEARCH_REGISTERED)
|
||||
public void hook(SearchParameterMap theSearchParameterMap) {
|
||||
public void hook(RequestDetails theRequestDetails, SearchParameterMap theSearchParameterMap) {
|
||||
if (myStorageSettings.isAllowMdmExpansion()) {
|
||||
final RequestDetails requestDetailsToUse =
|
||||
theRequestDetails == null ? new SystemRequestDetails() : theRequestDetails;
|
||||
final RequestPartitionId requestPartitionId =
|
||||
myRequestPartitionHelperSvc.determineReadPartitionForRequestForSearchType(
|
||||
requestDetailsToUse, requestDetailsToUse.getResourceName(), theSearchParameterMap, null);
|
||||
for (Map.Entry<String, List<List<IQueryParameterType>>> set : theSearchParameterMap.entrySet()) {
|
||||
String paramName = set.getKey();
|
||||
List<List<IQueryParameterType>> andList = set.getValue();
|
||||
for (List<IQueryParameterType> orList : andList) {
|
||||
// here we will know if it's an _id param or not
|
||||
// from theSearchParameterMap.keySet()
|
||||
expandAnyReferenceParameters(paramName, orList);
|
||||
expandAnyReferenceParameters(requestPartitionId, paramName, orList);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -76,7 +88,8 @@ public class MdmSearchExpandingInterceptor {
|
|||
/**
|
||||
* If a Parameter is a reference parameter, and it has been set to expand MDM, perform the expansion.
|
||||
*/
|
||||
private void expandAnyReferenceParameters(String theParamName, List<IQueryParameterType> orList) {
|
||||
private void expandAnyReferenceParameters(
|
||||
RequestPartitionId theRequestPartitionId, String theParamName, List<IQueryParameterType> orList) {
|
||||
List<IQueryParameterType> toRemove = new ArrayList<>();
|
||||
List<IQueryParameterType> toAdd = new ArrayList<>();
|
||||
for (IQueryParameterType iQueryParameterType : orList) {
|
||||
|
@ -85,13 +98,13 @@ public class MdmSearchExpandingInterceptor {
|
|||
if (refParam.isMdmExpand()) {
|
||||
ourLog.debug("Found a reference parameter to expand: {}", refParam);
|
||||
// First, attempt to expand as a source resource.
|
||||
Set<String> expandedResourceIds =
|
||||
myMdmLinkExpandSvc.expandMdmBySourceResourceId(new IdDt(refParam.getValue()));
|
||||
Set<String> expandedResourceIds = myMdmLinkExpandSvc.expandMdmBySourceResourceId(
|
||||
theRequestPartitionId, new IdDt(refParam.getValue()));
|
||||
|
||||
// If we failed, attempt to expand as a golden resource
|
||||
if (expandedResourceIds.isEmpty()) {
|
||||
expandedResourceIds =
|
||||
myMdmLinkExpandSvc.expandMdmByGoldenResourceId(new IdDt(refParam.getValue()));
|
||||
expandedResourceIds = myMdmLinkExpandSvc.expandMdmByGoldenResourceId(
|
||||
theRequestPartitionId, new IdDt(refParam.getValue()));
|
||||
}
|
||||
|
||||
// Rebuild the search param list.
|
||||
|
@ -105,7 +118,7 @@ public class MdmSearchExpandingInterceptor {
|
|||
}
|
||||
}
|
||||
} else if (theParamName.equalsIgnoreCase("_id")) {
|
||||
expandIdParameter(iQueryParameterType, toAdd, toRemove);
|
||||
expandIdParameter(theRequestPartitionId, iQueryParameterType, toAdd, toRemove);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -125,11 +138,13 @@ public class MdmSearchExpandingInterceptor {
|
|||
* Expands out the provided _id parameter into all the various
|
||||
* ids of linked resources.
|
||||
*
|
||||
* @param theRequestPartitionId
|
||||
* @param theIdParameter
|
||||
* @param theAddList
|
||||
* @param theRemoveList
|
||||
*/
|
||||
private void expandIdParameter(
|
||||
RequestPartitionId theRequestPartitionId,
|
||||
IQueryParameterType theIdParameter,
|
||||
List<IQueryParameterType> theAddList,
|
||||
List<IQueryParameterType> theRemoveList) {
|
||||
|
@ -157,10 +172,10 @@ public class MdmSearchExpandingInterceptor {
|
|||
} else if (mdmExpand) {
|
||||
ourLog.debug("_id parameter must be expanded out from: {}", id.getValue());
|
||||
|
||||
Set<String> expandedResourceIds = myMdmLinkExpandSvc.expandMdmBySourceResourceId(id);
|
||||
Set<String> expandedResourceIds = myMdmLinkExpandSvc.expandMdmBySourceResourceId(theRequestPartitionId, id);
|
||||
|
||||
if (expandedResourceIds.isEmpty()) {
|
||||
expandedResourceIds = myMdmLinkExpandSvc.expandMdmByGoldenResourceId((IdDt) id);
|
||||
expandedResourceIds = myMdmLinkExpandSvc.expandMdmByGoldenResourceId(theRequestPartitionId, (IdDt) id);
|
||||
}
|
||||
|
||||
// Rebuild
|
||||
|
|
|
@ -20,25 +20,83 @@
|
|||
package ca.uhn.fhir.mdm.model;
|
||||
|
||||
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
|
||||
import jakarta.annotation.Nullable;
|
||||
|
||||
import java.util.Objects;
|
||||
import java.util.StringJoiner;
|
||||
|
||||
public class MdmPidTuple<T extends IResourcePersistentId> {
|
||||
private final T myGoldenPid;
|
||||
|
||||
@Nullable
|
||||
private final Integer myGoldenPartitionId;
|
||||
|
||||
private final T mySourcePid;
|
||||
|
||||
private MdmPidTuple(T theGoldenPid, T theSourcePid) {
|
||||
@Nullable
|
||||
private final Integer mySourcePartitionId;
|
||||
|
||||
private MdmPidTuple(
|
||||
T theGoldenPid,
|
||||
@Nullable Integer theGoldenPartitionId,
|
||||
T theSourcePid,
|
||||
@Nullable Integer theSourcePartitionId) {
|
||||
myGoldenPid = theGoldenPid;
|
||||
mySourcePid = theSourcePid;
|
||||
myGoldenPartitionId = theGoldenPartitionId;
|
||||
mySourcePartitionId = theSourcePartitionId;
|
||||
}
|
||||
|
||||
public static <P extends IResourcePersistentId> MdmPidTuple<P> fromGoldenAndSource(P theGoldenPid, P theSourcePid) {
|
||||
return new MdmPidTuple<>(theGoldenPid, theSourcePid);
|
||||
return new MdmPidTuple<>(theGoldenPid, null, theSourcePid, null);
|
||||
}
|
||||
|
||||
public static <P extends IResourcePersistentId> MdmPidTuple<P> fromGoldenAndSourceAndPartitionIds(
|
||||
P theGoldenPid, Integer theGoldenPartitionId, P theSourcePid, Integer theSourcePartitionId) {
|
||||
return new MdmPidTuple<>(theGoldenPid, theGoldenPartitionId, theSourcePid, theSourcePartitionId);
|
||||
}
|
||||
|
||||
public T getGoldenPid() {
|
||||
return myGoldenPid;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public Integer getGoldenPartitionId() {
|
||||
return myGoldenPartitionId;
|
||||
}
|
||||
|
||||
public T getSourcePid() {
|
||||
return mySourcePid;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public Integer getSourcePartitionId() {
|
||||
return mySourcePartitionId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
MdmPidTuple<?> that = (MdmPidTuple<?>) o;
|
||||
return Objects.equals(myGoldenPid, that.myGoldenPid)
|
||||
&& Objects.equals(myGoldenPartitionId, that.myGoldenPartitionId)
|
||||
&& Objects.equals(mySourcePid, that.mySourcePid)
|
||||
&& Objects.equals(mySourcePartitionId, that.mySourcePartitionId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(myGoldenPid, myGoldenPartitionId, mySourcePid, mySourcePartitionId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return new StringJoiner(", ", MdmPidTuple.class.getSimpleName() + "[", "]")
|
||||
.add("myGoldenPid=" + myGoldenPid)
|
||||
.add("myGoldenPartitionId=" + myGoldenPartitionId)
|
||||
.add("mySourcePid=" + mySourcePid)
|
||||
.add("mySourcePartitionId=" + mySourcePartitionId)
|
||||
.toString();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,9 +36,11 @@ import org.springframework.beans.factory.annotation.Autowired;
|
|||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Service
|
||||
@Transactional
|
||||
|
@ -61,88 +63,114 @@ public class MdmLinkExpandSvc implements IMdmLinkExpandSvc {
|
|||
* @return A set of strings representing the FHIR IDs of the expanded resources.
|
||||
*/
|
||||
@Override
|
||||
public Set<String> expandMdmBySourceResource(IBaseResource theResource) {
|
||||
public Set<String> expandMdmBySourceResource(RequestPartitionId theRequestPartitionId, IBaseResource theResource) {
|
||||
ourLog.debug("About to MDM-expand source resource {}", theResource);
|
||||
return expandMdmBySourceResourceId(theResource.getIdElement());
|
||||
return expandMdmBySourceResourceId(theRequestPartitionId, theResource.getIdElement());
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a resource ID of a source resource, perform MDM expansion and return all the resource IDs of all resources that are
|
||||
* MDM-Matched to this resource.
|
||||
*
|
||||
* @param theRequestPartitionId The partition ID associated with the request.
|
||||
* @param theId The Resource ID of the resource to MDM-Expand
|
||||
* @return A set of strings representing the FHIR ids of the expanded resources.
|
||||
*/
|
||||
@Override
|
||||
public Set<String> expandMdmBySourceResourceId(IIdType theId) {
|
||||
public Set<String> expandMdmBySourceResourceId(RequestPartitionId theRequestPartitionId, IIdType theId) {
|
||||
ourLog.debug("About to expand source resource with resource id {}", theId);
|
||||
return expandMdmBySourceResourcePid(
|
||||
theRequestPartitionId,
|
||||
myIdHelperService.getPidOrThrowException(RequestPartitionId.allPartitions(), theId));
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a PID of a source resource, perform MDM expansion and return all the resource IDs of all resources that are
|
||||
* Given a partition ID and a PID of a source resource, perform MDM expansion and return all the resource IDs of all resources that are
|
||||
* MDM-Matched to this resource.
|
||||
*
|
||||
* @param theRequestPartitionId The partition ID associated with the request.
|
||||
* @param theSourceResourcePid The PID of the resource to MDM-Expand
|
||||
* @return A set of strings representing the FHIR ids of the expanded resources.
|
||||
*/
|
||||
@Override
|
||||
public Set<String> expandMdmBySourceResourcePid(IResourcePersistentId theSourceResourcePid) {
|
||||
public Set<String> expandMdmBySourceResourcePid(
|
||||
RequestPartitionId theRequestPartitionId, IResourcePersistentId<?> theSourceResourcePid) {
|
||||
ourLog.debug("About to expand source resource with PID {}", theSourceResourcePid);
|
||||
List<MdmPidTuple> goldenPidSourcePidTuples =
|
||||
final List<MdmPidTuple<?>> goldenPidSourcePidTuples =
|
||||
myMdmLinkDao.expandPidsBySourcePidAndMatchResult(theSourceResourcePid, MdmMatchResultEnum.MATCH);
|
||||
return flattenPidTuplesToSet(theSourceResourcePid, goldenPidSourcePidTuples);
|
||||
|
||||
return flattenPidTuplesToSet(theRequestPartitionId, theSourceResourcePid, goldenPidSourcePidTuples);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a PID of a golden resource, perform MDM expansion and return all the resource IDs of all resources that are
|
||||
* MDM-Matched to this golden resource.
|
||||
*
|
||||
* @param theRequestPartitionId Partition information from the request
|
||||
* @param theGoldenResourcePid The PID of the golden resource to MDM-Expand.
|
||||
* @return A set of strings representing the FHIR ids of the expanded resources.
|
||||
*/
|
||||
@Override
|
||||
public Set<String> expandMdmByGoldenResourceId(IResourcePersistentId theGoldenResourcePid) {
|
||||
public Set<String> expandMdmByGoldenResourceId(
|
||||
RequestPartitionId theRequestPartitionId, IResourcePersistentId<?> theGoldenResourcePid) {
|
||||
ourLog.debug("About to expand golden resource with PID {}", theGoldenResourcePid);
|
||||
List<MdmPidTuple> goldenPidSourcePidTuples = myMdmLinkDao.expandPidsByGoldenResourcePidAndMatchResult(
|
||||
final List<MdmPidTuple<?>> goldenPidSourcePidTuples = myMdmLinkDao.expandPidsByGoldenResourcePidAndMatchResult(
|
||||
theGoldenResourcePid, MdmMatchResultEnum.MATCH);
|
||||
return flattenPidTuplesToSet(theGoldenResourcePid, goldenPidSourcePidTuples);
|
||||
return flattenPidTuplesToSet(theRequestPartitionId, theGoldenResourcePid, goldenPidSourcePidTuples);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a resource ID of a golden resource, perform MDM expansion and return all the resource IDs of all resources that are
|
||||
* MDM-Matched to this golden resource.
|
||||
*
|
||||
* @param theRequestPartitionId Partition information from the request
|
||||
* @param theGoldenResourcePid The resource ID of the golden resource to MDM-Expand.
|
||||
* @return A set of strings representing the FHIR ids of the expanded resources.
|
||||
*/
|
||||
@Override
|
||||
public Set<String> expandMdmByGoldenResourcePid(IResourcePersistentId theGoldenResourcePid) {
|
||||
public Set<String> expandMdmByGoldenResourcePid(
|
||||
RequestPartitionId theRequestPartitionId, IResourcePersistentId<?> theGoldenResourcePid) {
|
||||
ourLog.debug("About to expand golden resource with PID {}", theGoldenResourcePid);
|
||||
List<MdmPidTuple> goldenPidSourcePidTuples = myMdmLinkDao.expandPidsByGoldenResourcePidAndMatchResult(
|
||||
final List<MdmPidTuple<?>> goldenPidSourcePidTuples = myMdmLinkDao.expandPidsByGoldenResourcePidAndMatchResult(
|
||||
theGoldenResourcePid, MdmMatchResultEnum.MATCH);
|
||||
return flattenPidTuplesToSet(theGoldenResourcePid, goldenPidSourcePidTuples);
|
||||
return flattenPidTuplesToSet(theRequestPartitionId, theGoldenResourcePid, goldenPidSourcePidTuples);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> expandMdmByGoldenResourceId(IdDt theId) {
|
||||
public Set<String> expandMdmByGoldenResourceId(RequestPartitionId theRequestPartitionId, IdDt theId) {
|
||||
ourLog.debug("About to expand golden resource with golden resource id {}", theId);
|
||||
IResourcePersistentId pidOrThrowException =
|
||||
IResourcePersistentId<?> pidOrThrowException =
|
||||
myIdHelperService.getPidOrThrowException(RequestPartitionId.allPartitions(), theId);
|
||||
return expandMdmByGoldenResourcePid(pidOrThrowException);
|
||||
return expandMdmByGoldenResourcePid(theRequestPartitionId, pidOrThrowException);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public Set<String> flattenPidTuplesToSet(
|
||||
IResourcePersistentId initialPid, List<MdmPidTuple> goldenPidSourcePidTuples) {
|
||||
Set<IResourcePersistentId> flattenedPids = new HashSet<>();
|
||||
goldenPidSourcePidTuples.forEach(tuple -> {
|
||||
flattenedPids.add(tuple.getSourcePid());
|
||||
flattenedPids.add(tuple.getGoldenPid());
|
||||
});
|
||||
Set<String> resourceIds = myIdHelperService.translatePidsToFhirResourceIds(flattenedPids);
|
||||
ourLog.debug("Pid {} has been expanded to [{}]", initialPid, String.join(",", resourceIds));
|
||||
RequestPartitionId theRequestPartitionId,
|
||||
IResourcePersistentId<?> theInitialPid,
|
||||
List<MdmPidTuple<?>> theGoldenPidSourcePidTuples) {
|
||||
final Set<IResourcePersistentId> flattenedPids = theGoldenPidSourcePidTuples.stream()
|
||||
.map(tuple -> flattenTuple(theRequestPartitionId, tuple))
|
||||
.flatMap(Collection::stream)
|
||||
.collect(Collectors.toUnmodifiableSet());
|
||||
final Set<String> resourceIds = myIdHelperService.translatePidsToFhirResourceIds(flattenedPids);
|
||||
ourLog.debug("Pid {} has been expanded to [{}]", theInitialPid, String.join(",", resourceIds));
|
||||
return resourceIds;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
static Set<IResourcePersistentId> flattenTuple(RequestPartitionId theRequestPartitionId, MdmPidTuple<?> theTuple) {
|
||||
if (theRequestPartitionId.isPartitionCovered(theTuple.getGoldenPartitionId())) {
|
||||
if (theRequestPartitionId.isPartitionCovered(theTuple.getSourcePartitionId())) {
|
||||
return Set.of(theTuple.getSourcePid(), theTuple.getGoldenPid());
|
||||
}
|
||||
return Set.of(theTuple.getGoldenPid());
|
||||
}
|
||||
|
||||
if (theRequestPartitionId.isPartitionCovered(theTuple.getSourcePartitionId())) {
|
||||
return Set.of(theTuple.getSourcePid());
|
||||
}
|
||||
|
||||
return Collections.emptySet();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,178 @@
|
|||
package ca.uhn.fhir.mdm.svc;
|
||||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
|
||||
import ca.uhn.fhir.mdm.dao.IMdmLinkDao;
|
||||
import ca.uhn.fhir.mdm.model.MdmPidTuple;
|
||||
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.Arguments;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.mockito.stubbing.Answer;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class MdmLinkExpandSvcTest {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(MdmLinkExpandSvcTest.class);
|
||||
|
||||
private static final int PARTITION_A = 1;
|
||||
private static final int PARTITION_B = 2;
|
||||
private static final int PARTITION_GOLDEN = 3;
|
||||
private static final JpaPid JPA_PID_PARTITION_A_1 = JpaPid.fromId(123L);
|
||||
private static final JpaPid JPA_PID_PARTITION_B = JpaPid.fromId(456L);
|
||||
private static final JpaPid JPA_PID_PARTITION_A_2 = JpaPid.fromId(789L);
|
||||
private static final JpaPid JPA_PID_PARTITION_DEFAULT = JpaPid.fromId(111L);
|
||||
private static final JpaPid JPA_PID_PARTITION_GOLDEN = JpaPid.fromId(999L);
|
||||
private static final Set<JpaPid> ALL_PIDS = Set.of(JPA_PID_PARTITION_A_1, JPA_PID_PARTITION_B, JPA_PID_PARTITION_A_2, JPA_PID_PARTITION_GOLDEN, JPA_PID_PARTITION_DEFAULT);
|
||||
private static final MdmPidTuple<JpaPid> JPA_PID_MDM_PID_TUPLE_1 = MdmPidTuple.fromGoldenAndSourceAndPartitionIds(JPA_PID_PARTITION_GOLDEN, PARTITION_GOLDEN, JPA_PID_PARTITION_A_1, PARTITION_A);
|
||||
private static final MdmPidTuple<JpaPid> JPA_PID_MDM_PID_TUPLE_2 = MdmPidTuple.fromGoldenAndSourceAndPartitionIds(JPA_PID_PARTITION_GOLDEN, PARTITION_GOLDEN, JPA_PID_PARTITION_B, PARTITION_B);
|
||||
private static final MdmPidTuple<JpaPid> JPA_PID_MDM_PID_TUPLE_3 = MdmPidTuple.fromGoldenAndSourceAndPartitionIds(JPA_PID_PARTITION_GOLDEN, PARTITION_GOLDEN, JPA_PID_PARTITION_A_2, PARTITION_A);
|
||||
private static final MdmPidTuple<JpaPid> JPA_PID_MDM_PID_TUPLE_4 = MdmPidTuple.fromGoldenAndSourceAndPartitionIds(JPA_PID_PARTITION_GOLDEN, PARTITION_GOLDEN, JPA_PID_PARTITION_DEFAULT, null);
|
||||
|
||||
@Mock
|
||||
private IMdmLinkDao<JpaPid,?> myIMdmLinkDao;
|
||||
|
||||
@Mock
|
||||
private IIdHelperService<?> myIdHelperService;
|
||||
|
||||
@InjectMocks
|
||||
private MdmLinkExpandSvc mySubject;
|
||||
|
||||
void beforeEachExpand() {
|
||||
final Answer<Set<String>> answer = invocation -> {
|
||||
final Set<IResourcePersistentId<?>> param = invocation.getArgument(0);
|
||||
return param.stream()
|
||||
.filter(JpaPid.class::isInstance)
|
||||
.map(JpaPid.class::cast)
|
||||
.map(pid -> Long.toString(pid.getId()))
|
||||
.collect(Collectors.toUnmodifiableSet());
|
||||
};
|
||||
|
||||
when(myIdHelperService.translatePidsToFhirResourceIds(any()))
|
||||
.thenAnswer(answer);
|
||||
}
|
||||
|
||||
private static Stream<Arguments> partitionsAndExpectedPids() {
|
||||
return Stream.of(
|
||||
Arguments.of(RequestPartitionId.allPartitions(), ALL_PIDS),
|
||||
Arguments.of(RequestPartitionId.defaultPartition(), Collections.singleton(JPA_PID_PARTITION_DEFAULT)),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(List.of(PARTITION_A)), Set.of(JPA_PID_PARTITION_A_1, JPA_PID_PARTITION_A_2)),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(List.of(PARTITION_B)), Collections.singleton(JPA_PID_PARTITION_B)),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(List.of(PARTITION_GOLDEN)), Collections.singleton(JPA_PID_PARTITION_GOLDEN)),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(List.of(PARTITION_A, PARTITION_B)), Set.of(JPA_PID_PARTITION_A_1, JPA_PID_PARTITION_A_2, JPA_PID_PARTITION_B)),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(Arrays.asList(PARTITION_A, PARTITION_B, null)), Set.of(JPA_PID_PARTITION_A_1, JPA_PID_PARTITION_A_2, JPA_PID_PARTITION_B, JPA_PID_PARTITION_DEFAULT)),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(Arrays.asList(PARTITION_A, PARTITION_B, PARTITION_GOLDEN, null)), ALL_PIDS)
|
||||
);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("partitionsAndExpectedPids")
|
||||
void expandMdmBySourceResourcePid(RequestPartitionId theRequestPartitionId, Set<JpaPid> theExpectedJpaPids) {
|
||||
beforeEachExpand();
|
||||
|
||||
final JpaPid jpaPid = JpaPid.fromId(123L);
|
||||
when(myIMdmLinkDao.expandPidsBySourcePidAndMatchResult(jpaPid, MdmMatchResultEnum.MATCH)).thenReturn(List.of(
|
||||
JPA_PID_MDM_PID_TUPLE_1,
|
||||
JPA_PID_MDM_PID_TUPLE_1,
|
||||
JPA_PID_MDM_PID_TUPLE_1,
|
||||
JPA_PID_MDM_PID_TUPLE_1,
|
||||
JPA_PID_MDM_PID_TUPLE_2,
|
||||
JPA_PID_MDM_PID_TUPLE_2,
|
||||
JPA_PID_MDM_PID_TUPLE_2,
|
||||
JPA_PID_MDM_PID_TUPLE_2,
|
||||
JPA_PID_MDM_PID_TUPLE_3,
|
||||
JPA_PID_MDM_PID_TUPLE_3,
|
||||
JPA_PID_MDM_PID_TUPLE_3,
|
||||
JPA_PID_MDM_PID_TUPLE_3,
|
||||
JPA_PID_MDM_PID_TUPLE_3,
|
||||
JPA_PID_MDM_PID_TUPLE_4,
|
||||
JPA_PID_MDM_PID_TUPLE_4,
|
||||
JPA_PID_MDM_PID_TUPLE_4,
|
||||
JPA_PID_MDM_PID_TUPLE_4)
|
||||
);
|
||||
|
||||
final Set<String> resolvedPids = mySubject.expandMdmBySourceResourcePid(theRequestPartitionId, jpaPid);
|
||||
|
||||
assertEquals(toPidStrings(theExpectedJpaPids), resolvedPids, String.format("expected: %s, actual: %s", theExpectedJpaPids, resolvedPids));
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("partitionsAndExpectedPids")
|
||||
void expandMdmByGoldenResourcePid(RequestPartitionId theRequestPartitionId, Set<JpaPid> theExpectedJpaPids) {
|
||||
beforeEachExpand();
|
||||
|
||||
when(myIMdmLinkDao.expandPidsByGoldenResourcePidAndMatchResult(any(), any()))
|
||||
.thenReturn(List.of(JPA_PID_MDM_PID_TUPLE_1, JPA_PID_MDM_PID_TUPLE_2, JPA_PID_MDM_PID_TUPLE_3, JPA_PID_MDM_PID_TUPLE_4));
|
||||
final JpaPid jpaPid = JpaPid.fromId(123L);
|
||||
final Set<String> resolvedPids = mySubject.expandMdmByGoldenResourcePid(theRequestPartitionId, jpaPid);
|
||||
|
||||
assertEquals(toPidStrings(theExpectedJpaPids), resolvedPids, String.format("expected: %s, actual: %s", theExpectedJpaPids, resolvedPids));
|
||||
}
|
||||
|
||||
private static Stream<Arguments> partitionsAndTuples() {
|
||||
return Stream.of(
|
||||
Arguments.of(RequestPartitionId.allPartitions(), JPA_PID_MDM_PID_TUPLE_1, Set.of(JPA_PID_PARTITION_GOLDEN, JPA_PID_PARTITION_A_1)),
|
||||
Arguments.of(RequestPartitionId.defaultPartition(), JPA_PID_MDM_PID_TUPLE_1, Collections.emptySet()),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(List.of(PARTITION_A)), JPA_PID_MDM_PID_TUPLE_1, Collections.singleton(JPA_PID_PARTITION_A_1)),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(List.of(PARTITION_B)), JPA_PID_MDM_PID_TUPLE_1, Collections.emptySet()),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(List.of(PARTITION_GOLDEN)), JPA_PID_MDM_PID_TUPLE_1, Collections.singleton(JPA_PID_PARTITION_GOLDEN)),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(List.of(PARTITION_A, PARTITION_B)), JPA_PID_MDM_PID_TUPLE_1, Collections.singleton(JPA_PID_PARTITION_A_1)),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(Arrays.asList(PARTITION_A, PARTITION_B, null)), JPA_PID_MDM_PID_TUPLE_1, Collections.singleton(JPA_PID_PARTITION_A_1)),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(Arrays.asList(PARTITION_A, PARTITION_B, PARTITION_GOLDEN, null)), JPA_PID_MDM_PID_TUPLE_1, Set.of(JPA_PID_PARTITION_GOLDEN, JPA_PID_PARTITION_A_1)),
|
||||
Arguments.of(RequestPartitionId.allPartitions(), JPA_PID_MDM_PID_TUPLE_2, Set.of(JPA_PID_PARTITION_GOLDEN, JPA_PID_PARTITION_B)),
|
||||
Arguments.of(RequestPartitionId.defaultPartition(), JPA_PID_MDM_PID_TUPLE_2, Collections.emptySet()),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(List.of(PARTITION_A)), JPA_PID_MDM_PID_TUPLE_2, Collections.emptySet()),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(List.of(PARTITION_B)), JPA_PID_MDM_PID_TUPLE_2, Collections.singleton(JPA_PID_PARTITION_B)),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(List.of(PARTITION_GOLDEN)), JPA_PID_MDM_PID_TUPLE_2, Collections.singleton(JPA_PID_PARTITION_GOLDEN)),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(List.of(PARTITION_A, PARTITION_B)), JPA_PID_MDM_PID_TUPLE_2, Collections.singleton(JPA_PID_PARTITION_B)),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(Arrays.asList(PARTITION_A, PARTITION_B, null)), JPA_PID_MDM_PID_TUPLE_2, Collections.singleton(JPA_PID_PARTITION_B)),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(Arrays.asList(PARTITION_A, PARTITION_B, PARTITION_GOLDEN, null)), JPA_PID_MDM_PID_TUPLE_2, Set.of(JPA_PID_PARTITION_GOLDEN, JPA_PID_PARTITION_B)),
|
||||
Arguments.of(RequestPartitionId.allPartitions(), JPA_PID_MDM_PID_TUPLE_3, Set.of(JPA_PID_PARTITION_GOLDEN, JPA_PID_PARTITION_A_2)),
|
||||
Arguments.of(RequestPartitionId.defaultPartition(), JPA_PID_MDM_PID_TUPLE_3, Collections.emptySet()),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(List.of(PARTITION_A)), JPA_PID_MDM_PID_TUPLE_3, Collections.singleton(JPA_PID_PARTITION_A_2)),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(List.of(PARTITION_B)), JPA_PID_MDM_PID_TUPLE_3, Collections.emptySet()),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(List.of(PARTITION_GOLDEN)), JPA_PID_MDM_PID_TUPLE_3, Collections.singleton(JPA_PID_PARTITION_GOLDEN)),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(List.of(PARTITION_A, PARTITION_B)), JPA_PID_MDM_PID_TUPLE_3, Collections.singleton(JPA_PID_PARTITION_A_2)),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(Arrays.asList(PARTITION_A, PARTITION_B, null)), JPA_PID_MDM_PID_TUPLE_3, Collections.singleton(JPA_PID_PARTITION_A_2)),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(Arrays.asList(PARTITION_A, PARTITION_B, PARTITION_GOLDEN, null)), JPA_PID_MDM_PID_TUPLE_3, Set.of(JPA_PID_PARTITION_GOLDEN, JPA_PID_PARTITION_A_2)),
|
||||
Arguments.of(RequestPartitionId.allPartitions(), JPA_PID_MDM_PID_TUPLE_4, Set.of(JPA_PID_PARTITION_GOLDEN, JPA_PID_PARTITION_DEFAULT)),
|
||||
Arguments.of(RequestPartitionId.defaultPartition(), JPA_PID_MDM_PID_TUPLE_4, Collections.singleton(JPA_PID_PARTITION_DEFAULT)),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(List.of(PARTITION_A)), JPA_PID_MDM_PID_TUPLE_4, Collections.emptySet()),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(List.of(PARTITION_B)), JPA_PID_MDM_PID_TUPLE_4, Collections.emptySet()),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(List.of(PARTITION_GOLDEN)), JPA_PID_MDM_PID_TUPLE_4, Collections.singleton(JPA_PID_PARTITION_GOLDEN)),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(List.of(PARTITION_A, PARTITION_B)), JPA_PID_MDM_PID_TUPLE_4, Collections.emptySet()),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(Arrays.asList(PARTITION_A, PARTITION_B, null)), JPA_PID_MDM_PID_TUPLE_4, Collections.singleton(JPA_PID_PARTITION_DEFAULT)),
|
||||
Arguments.of(RequestPartitionId.fromPartitionIds(Arrays.asList(PARTITION_A, PARTITION_B, PARTITION_GOLDEN, null)), JPA_PID_MDM_PID_TUPLE_4, Set.of(JPA_PID_PARTITION_DEFAULT, JPA_PID_PARTITION_GOLDEN))
|
||||
);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("partitionsAndTuples")
|
||||
void flattenTuple(RequestPartitionId theRequestPartitionId, MdmPidTuple<JpaPid> theTuple, Set<JpaPid> theExpectedResourceIds) {
|
||||
assertEquals(theExpectedResourceIds, MdmLinkExpandSvc.flattenTuple(theRequestPartitionId, theTuple));
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private Set<String> toPidStrings(Set<JpaPid> theTheExpectedJpaPids) {
|
||||
return theTheExpectedJpaPids.stream().map(JpaPid::getId).map(id -> Long.toString(id)).collect(Collectors.toUnmodifiableSet());
|
||||
}
|
||||
}
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package ca.uhn.fhir.rest.api.server.bulk;
|
||||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.model.api.IModelJson;
|
||||
import ca.uhn.fhir.model.api.BaseBatchJobParameters;
|
||||
import ca.uhn.fhir.rest.server.util.JsonDateDeserializer;
|
||||
import ca.uhn.fhir.rest.server.util.JsonDateSerializer;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
@ -32,7 +32,7 @@ import java.util.Collection;
|
|||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
public class BulkExportJobParameters implements IModelJson {
|
||||
public class BulkExportJobParameters extends BaseBatchJobParameters {
|
||||
|
||||
/**
|
||||
* List of resource types to export.
|
||||
|
|
|
@ -31,6 +31,8 @@ import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails;
|
|||
import ca.uhn.fhir.rest.api.server.IPreResourceShowDetails;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.ResponseDetails;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
|
@ -497,6 +499,37 @@ public class ConsentInterceptor {
|
|||
}
|
||||
}
|
||||
|
||||
protected RequestDetails getRequestDetailsForCurrentExportOperation(
|
||||
BulkExportJobParameters theParameters, IBaseResource theBaseResource) {
|
||||
// bulk exports are system operations
|
||||
SystemRequestDetails details = new SystemRequestDetails();
|
||||
return details;
|
||||
}
|
||||
|
||||
@Hook(value = Pointcut.STORAGE_BULK_EXPORT_RESOURCE_INCLUSION)
|
||||
public boolean shouldBulkExportIncludeResource(BulkExportJobParameters theParameters, IBaseResource theResource) {
|
||||
RequestDetails requestDetails = getRequestDetailsForCurrentExportOperation(theParameters, theResource);
|
||||
|
||||
for (IConsentService next : myConsentService) {
|
||||
ConsentOutcome nextOutcome = next.willSeeResource(requestDetails, theResource, myContextConsentServices);
|
||||
|
||||
ConsentOperationStatusEnum status = nextOutcome.getStatus();
|
||||
switch (status) {
|
||||
case AUTHORIZED:
|
||||
case PROCEED:
|
||||
// go to the next
|
||||
break;
|
||||
case REJECT:
|
||||
// if any consent service rejects,
|
||||
// reject the resource
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// default is to include the resource
|
||||
return true;
|
||||
}
|
||||
|
||||
private boolean isRequestAuthorized(RequestDetails theRequestDetails) {
|
||||
boolean retVal = false;
|
||||
if (theRequestDetails != null) {
|
||||
|
@ -515,11 +548,11 @@ public class ConsentInterceptor {
|
|||
}
|
||||
|
||||
private boolean isMetaOperation(RequestDetails theRequestDetails) {
|
||||
return OPERATION_META.equals(theRequestDetails.getOperation());
|
||||
return theRequestDetails != null && OPERATION_META.equals(theRequestDetails.getOperation());
|
||||
}
|
||||
|
||||
private boolean isMetadataPath(RequestDetails theRequestDetails) {
|
||||
return URL_TOKEN_METADATA.equals(theRequestDetails.getRequestPath());
|
||||
return theRequestDetails != null && URL_TOKEN_METADATA.equals(theRequestDetails.getRequestPath());
|
||||
}
|
||||
|
||||
private void validateParameter(Map<String, String[]> theParameterMap) {
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<artifactId>hapi-fhir-serviceloaders</artifactId>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<artifactId>hapi-fhir-serviceloaders</artifactId>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -21,7 +21,7 @@
|
|||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-caching-api</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<artifactId>hapi-fhir-serviceloaders</artifactId>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>hapi-fhir-spring-boot-sample-client-apache</artifactId>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
*/
|
||||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
||||
import org.intellij.lang.annotations.Language;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -56,6 +57,10 @@ public class DropTableTask extends BaseTableTask {
|
|||
logInfo(ourLog, "Table {} has the following indexes: {}", getTableName(), indexNames);
|
||||
|
||||
for (String next : foreignKeys) {
|
||||
if (getDriverType() == DriverTypeEnum.DERBY_EMBEDDED && next.contains("-")) {
|
||||
// Derby creates special internal indexes with GUID names that can't be deleted
|
||||
continue;
|
||||
}
|
||||
List<String> sql = DropForeignKeyTask.generateSql(getTableName(), next, getDriverType());
|
||||
for (@Language("SQL") String nextSql : sql) {
|
||||
executeSql(getTableName(), nextSql);
|
||||
|
@ -69,6 +74,10 @@ public class DropTableTask extends BaseTableTask {
|
|||
.setDriverType(getDriverType())
|
||||
.setDryRun(isDryRun());
|
||||
for (String nextIndex : indexNames) {
|
||||
if (getDriverType() == DriverTypeEnum.DERBY_EMBEDDED && nextIndex.contains("-")) {
|
||||
// Derby creates special internal indexes with GUID names that can't be deleted
|
||||
continue;
|
||||
}
|
||||
theIndexTask.setIndexName(nextIndex).execute();
|
||||
}
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -58,6 +58,7 @@ import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
|||
import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationSvc;
|
||||
import ca.uhn.fhir.util.BinaryUtil;
|
||||
import ca.uhn.fhir.util.FhirTerser;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
import jakarta.annotation.Nonnull;
|
||||
|
@ -456,4 +457,9 @@ public class ExpandResourceAndWriteBinaryStep
|
|||
protected OutputStreamWriter getStreamWriter(ByteArrayOutputStream theOutputStream) {
|
||||
return new OutputStreamWriter(theOutputStream, Constants.CHARSET_UTF8);
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setIdHelperServiceForUnitTest(IIdHelperService theIdHelperService) {
|
||||
myIdHelperService = theIdHelperService;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,6 +48,7 @@ import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
|||
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.param.TokenOrListParam;
|
||||
import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationSvc;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
import jakarta.annotation.Nonnull;
|
||||
|
@ -287,4 +288,9 @@ public class ExpandResourcesStep
|
|||
// see WriteBinaryStep as well
|
||||
return myFhirContext.newJsonParser().setPrettyPrint(false);
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setIdHelperServiceForUnitTest(IIdHelperService theIdHelperService) {
|
||||
myIdHelperService = theIdHelperService;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,6 +33,7 @@ import ca.uhn.fhir.jpa.bulk.export.api.IBulkExportProcessor;
|
|||
import ca.uhn.fhir.jpa.bulk.export.model.ExportPIDIteratorParameters;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -159,4 +160,9 @@ public class FetchResourceIdsStep implements IFirstJobStepWorker<BulkExportJobPa
|
|||
|
||||
theDataSink.accept(idList);
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setBulkExportProcessorForUnitTest(IBulkExportProcessor theBulkExportProcessor) {
|
||||
myBulkExportProcessor = theBulkExportProcessor;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -123,6 +123,7 @@ public class ExpandResourceAndWriteBinaryStepTest {
|
|||
@BeforeEach
|
||||
public void init() {
|
||||
ourLog.addAppender(myAppender);
|
||||
myFinalStep.setIdHelperServiceForUnitTest(myIdHelperService);
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
|
|
|
@ -27,6 +27,7 @@ import ca.uhn.fhir.rest.server.SimpleBundleProvider;
|
|||
import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationSvc;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.ValueSource;
|
||||
|
@ -82,6 +83,11 @@ public class ExpandResourcesStepTest {
|
|||
@InjectMocks
|
||||
private ExpandResourcesStep mySecondStep;
|
||||
|
||||
@BeforeEach
|
||||
public void init() {
|
||||
mySecondStep.setIdHelperServiceForUnitTest(myIdHelperService);
|
||||
}
|
||||
|
||||
private BulkExportJobParameters createParameters(boolean thePartitioned) {
|
||||
BulkExportJobParameters parameters = new BulkExportJobParameters();
|
||||
parameters.setResourceTypes(Arrays.asList("Patient", "Observation"));
|
||||
|
|
|
@ -65,6 +65,7 @@ public class FetchResourceIdsStepTest {
|
|||
@BeforeEach
|
||||
public void init() {
|
||||
ourLog.addAppender(myAppender);
|
||||
myFirstStep.setBulkExportProcessorForUnitTest(myBulkExportProcessor);
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.11.8-SNAPSHOT</version>
|
||||
<version>6.11.10-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue