Improve loading performance (#3197)
* Synthea perf work * Cleanup * Work on performance * Ongoing perf work * More perf work * Perf work * Test fixes * Updates * Test fix * Test fixes * Clean up tests * Test cleanup * Cleanup * Add test * One more perf tweak * Fixes * Add changelog * Add changelog * License header update * License header updates
This commit is contained in:
parent
7ef1d159ee
commit
aae53d6ec7
|
@ -22,7 +22,12 @@ package ca.uhn.fhir.context;
|
|||
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* This object supplies default configuration to all {@link IParser parser} instances
|
||||
|
@ -37,31 +42,40 @@ public class ParserOptions {
|
|||
private boolean myStripVersionsFromReferences = true;
|
||||
private Set<String> myDontStripVersionsFromReferencesAtPaths = Collections.emptySet();
|
||||
private boolean myOverrideResourceIdWithBundleEntryFullUrl = true;
|
||||
private boolean myAutoContainReferenceTargetsWithNoId = true;
|
||||
|
||||
/**
|
||||
* If supplied value(s), any resource references at the specified paths will have their
|
||||
* resource versions encoded instead of being automatically stripped during the encoding
|
||||
* process. This setting has no effect on the parsing process.
|
||||
* If set to {@literal true} (which is the default), contained resources may be specified by
|
||||
* populating the target (contained) resource directly in {@link org.hl7.fhir.instance.model.api.IBaseReference#setReference(String)}
|
||||
* and the parser will automatically locate it and insert it into <code>Resource.contained</code> when
|
||||
* serializing. This is convenient, but also imposes a performance cost when serializing large numbers
|
||||
* of resources, so this can be disabled if it is not needed.
|
||||
* <p>
|
||||
* This method provides a finer-grained level of control than {@link #setStripVersionsFromReferences(boolean)}
|
||||
* and any paths specified by this method will be encoded even if {@link #setStripVersionsFromReferences(boolean)}
|
||||
* has been set to <code>true</code> (which is the default)
|
||||
* If disabled, only resources that are directly placed in <code>Resource.contained</code> will be
|
||||
* serialized.
|
||||
* </p>
|
||||
*
|
||||
* @param thePaths A collection of paths for which the resource versions will not be removed automatically
|
||||
* when serializing, e.g. "Patient.managingOrganization" or "AuditEvent.object.reference". Note that
|
||||
* only resource name and field names with dots separating is allowed here (no repetition
|
||||
* indicators, FluentPath expressions, etc.)
|
||||
* @return Returns a reference to <code>this</code> parser so that method calls can be chained together
|
||||
* @see #setStripVersionsFromReferences(boolean)
|
||||
* @since 6.0.0
|
||||
*/
|
||||
public ParserOptions setDontStripVersionsFromReferencesAtPaths(String... thePaths) {
|
||||
if (thePaths == null) {
|
||||
setDontStripVersionsFromReferencesAtPaths((List<String>) null);
|
||||
} else {
|
||||
setDontStripVersionsFromReferencesAtPaths(Arrays.asList(thePaths));
|
||||
public boolean isAutoContainReferenceTargetsWithNoId() {
|
||||
return myAutoContainReferenceTargetsWithNoId;
|
||||
}
|
||||
return this;
|
||||
|
||||
/**
|
||||
* If set to {@literal true} (which is the default), contained resources may be specified by
|
||||
* populating the target (contained) resource directly in {@link org.hl7.fhir.instance.model.api.IBaseReference#setReference(String)}
|
||||
* and the parser will automatically locate it and insert it into <code>Resource.contained</code> when
|
||||
* serializing. This is convenient, but also imposes a performance cost when serializing large numbers
|
||||
* of resources, so this can be disabled if it is not needed.
|
||||
* <p>
|
||||
* If disabled, only resources that are directly placed in <code>Resource.contained</code> will be
|
||||
* serialized.
|
||||
* </p>
|
||||
*
|
||||
* @since 6.0.0
|
||||
*/
|
||||
public void setAutoContainReferenceTargetsWithNoId(boolean theAllowAutoContainedReferences) {
|
||||
myAutoContainReferenceTargetsWithNoId = theAllowAutoContainedReferences;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -109,6 +123,32 @@ public class ParserOptions {
|
|||
return myDontStripVersionsFromReferencesAtPaths;
|
||||
}
|
||||
|
||||
/**
|
||||
* If supplied value(s), any resource references at the specified paths will have their
|
||||
* resource versions encoded instead of being automatically stripped during the encoding
|
||||
* process. This setting has no effect on the parsing process.
|
||||
* <p>
|
||||
* This method provides a finer-grained level of control than {@link #setStripVersionsFromReferences(boolean)}
|
||||
* and any paths specified by this method will be encoded even if {@link #setStripVersionsFromReferences(boolean)}
|
||||
* has been set to <code>true</code> (which is the default)
|
||||
* </p>
|
||||
*
|
||||
* @param thePaths A collection of paths for which the resource versions will not be removed automatically
|
||||
* when serializing, e.g. "Patient.managingOrganization" or "AuditEvent.object.reference". Note that
|
||||
* only resource name and field names with dots separating is allowed here (no repetition
|
||||
* indicators, FluentPath expressions, etc.)
|
||||
* @return Returns a reference to <code>this</code> parser so that method calls can be chained together
|
||||
* @see #setStripVersionsFromReferences(boolean)
|
||||
*/
|
||||
public ParserOptions setDontStripVersionsFromReferencesAtPaths(String... thePaths) {
|
||||
if (thePaths == null) {
|
||||
setDontStripVersionsFromReferencesAtPaths((List<String>) null);
|
||||
} else {
|
||||
setDontStripVersionsFromReferencesAtPaths(Arrays.asList(thePaths));
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* If supplied value(s), any resource references at the specified paths will have their
|
||||
* resource versions encoded instead of being automatically stripped during the encoding
|
||||
|
|
|
@ -45,6 +45,7 @@ import ca.uhn.fhir.util.BundleUtil;
|
|||
import ca.uhn.fhir.util.FhirTerser;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.commons.io.output.StringBuilderWriter;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
|
@ -268,7 +269,7 @@ public abstract class BaseParser implements IParser {
|
|||
|
||||
@Override
|
||||
public String encodeResourceToString(IBaseResource theResource) throws DataFormatException {
|
||||
Writer stringWriter = new StringWriter();
|
||||
Writer stringWriter = new StringBuilderWriter();
|
||||
try {
|
||||
encodeResourceToWriter(theResource, stringWriter);
|
||||
} catch (IOException e) {
|
||||
|
@ -615,7 +616,7 @@ public abstract class BaseParser implements IParser {
|
|||
if (isBlank(resourceId.getValue())) {
|
||||
resourceId.setValue(fullUrl);
|
||||
} else {
|
||||
if (fullUrl.startsWith("urn:") && fullUrl.endsWith(":" + resourceId.getIdPart())) {
|
||||
if (fullUrl.startsWith("urn:") && fullUrl.length() > resourceId.getIdPart().length() && fullUrl.charAt(fullUrl.length() - resourceId.getIdPart().length() - 1) == ':' && fullUrl.endsWith(resourceId.getIdPart())) {
|
||||
resourceId.setValue(fullUrl);
|
||||
} else {
|
||||
IIdType fullUrlId = myContext.getVersion().newIdType();
|
||||
|
|
|
@ -57,6 +57,7 @@ import java.io.Writer;
|
|||
import java.math.BigDecimal;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -939,7 +940,9 @@ public class JsonParser extends BaseParser implements IJsonLikeParser {
|
|||
}
|
||||
|
||||
JsonLikeObject alternate = alternateVal.getAsObject();
|
||||
for (String nextKey : alternate.keySet()) {
|
||||
|
||||
for (Iterator<String> keyIter = alternate.keyIterator(); keyIter.hasNext(); ) {
|
||||
String nextKey = keyIter.next();
|
||||
JsonLikeValue nextVal = alternate.get(nextKey);
|
||||
if ("extension".equals(nextKey)) {
|
||||
boolean isModifier = false;
|
||||
|
@ -962,12 +965,11 @@ public class JsonParser extends BaseParser implements IJsonLikeParser {
|
|||
}
|
||||
|
||||
private void parseChildren(JsonLikeObject theObject, ParserState<?> theState) {
|
||||
Set<String> keySet = theObject.keySet();
|
||||
|
||||
int allUnderscoreNames = 0;
|
||||
int handledUnderscoreNames = 0;
|
||||
|
||||
for (String nextName : keySet) {
|
||||
for (Iterator<String> keyIter = theObject.keyIterator(); keyIter.hasNext(); ) {
|
||||
String nextName = keyIter.next();
|
||||
if ("resourceType".equals(nextName)) {
|
||||
continue;
|
||||
} else if ("extension".equals(nextName)) {
|
||||
|
@ -1013,7 +1015,8 @@ public class JsonParser extends BaseParser implements IJsonLikeParser {
|
|||
* for example.
|
||||
*/
|
||||
if (allUnderscoreNames > handledUnderscoreNames) {
|
||||
for (String alternateName : keySet) {
|
||||
for (Iterator<String> keyIter = theObject.keyIterator(); keyIter.hasNext(); ) {
|
||||
String alternateName = keyIter.next();
|
||||
if (alternateName.startsWith("_") && alternateName.length() > 1) {
|
||||
JsonLikeValue nextValue = theObject.get(alternateName);
|
||||
if (nextValue != null) {
|
||||
|
@ -1116,7 +1119,8 @@ public class JsonParser extends BaseParser implements IJsonLikeParser {
|
|||
url = getExtensionUrl(jsonElement.getAsString());
|
||||
}
|
||||
theState.enteringNewElementExtension(null, url, theIsModifier, getServerBaseUrl());
|
||||
for (String next : nextExtObj.keySet()) {
|
||||
for (Iterator<String> keyIter = nextExtObj.keyIterator(); keyIter.hasNext(); ) {
|
||||
String next = keyIter.next();
|
||||
if ("url".equals(next)) {
|
||||
continue;
|
||||
} else if ("extension".equals(next)) {
|
||||
|
@ -1146,7 +1150,8 @@ public class JsonParser extends BaseParser implements IJsonLikeParser {
|
|||
* for example.
|
||||
*/
|
||||
if (allUnderscoreNames > handledUnderscoreNames) {
|
||||
for (String alternateName : nextExtObj.keySet()) {
|
||||
for (Iterator<String> keyIter = nextExtObj.keyIterator(); keyIter.hasNext(); ) {
|
||||
String alternateName = keyIter.next();
|
||||
if (alternateName.startsWith("_") && alternateName.length() > 1) {
|
||||
JsonLikeValue nextValue = nextExtObj.get(alternateName);
|
||||
if (nextValue != null) {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
package ca.uhn.fhir.parser.json;
|
||||
|
||||
import java.util.Set;
|
||||
import java.util.Iterator;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
|
@ -49,7 +49,7 @@ public abstract class JsonLikeObject extends JsonLikeValue {
|
|||
return null;
|
||||
}
|
||||
|
||||
public abstract Set<String> keySet ();
|
||||
public abstract Iterator<String> keyIterator();
|
||||
|
||||
public abstract JsonLikeValue get(String key);
|
||||
|
||||
|
|
|
@ -31,6 +31,7 @@ import com.fasterxml.jackson.core.JsonParser;
|
|||
import com.fasterxml.jackson.databind.DeserializationFeature;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.json.JsonMapper;
|
||||
import com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
import com.fasterxml.jackson.databind.node.DecimalNode;
|
||||
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
|
||||
|
@ -46,9 +47,6 @@ import java.util.ArrayList;
|
|||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
public class JacksonStructure implements JsonLikeStructure {
|
||||
|
||||
|
@ -153,8 +151,6 @@ public class JacksonStructure implements JsonLikeStructure {
|
|||
|
||||
private static class JacksonJsonObject extends JsonLikeObject {
|
||||
private final ObjectNode nativeObject;
|
||||
private final Map<String, JsonLikeValue> jsonLikeMap = new LinkedHashMap<>();
|
||||
private Set<String> keySet = null;
|
||||
|
||||
public JacksonJsonObject(ObjectNode json) {
|
||||
this.nativeObject = json;
|
||||
|
@ -166,30 +162,17 @@ public class JacksonStructure implements JsonLikeStructure {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Set<String> keySet() {
|
||||
if (null == keySet) {
|
||||
final Iterable<Map.Entry<String, JsonNode>> iterable = nativeObject::fields;
|
||||
keySet = StreamSupport.stream(iterable.spliterator(), false)
|
||||
.map(Map.Entry::getKey)
|
||||
.collect(Collectors.toCollection(EntryOrderedSet::new));
|
||||
}
|
||||
|
||||
return keySet;
|
||||
public Iterator<String> keyIterator() {
|
||||
return nativeObject.fieldNames();
|
||||
}
|
||||
|
||||
@Override
|
||||
public JsonLikeValue get(String key) {
|
||||
JsonLikeValue result = null;
|
||||
if (jsonLikeMap.containsKey(key)) {
|
||||
result = jsonLikeMap.get(key);
|
||||
} else {
|
||||
JsonNode child = nativeObject.get(key);
|
||||
if (child != null) {
|
||||
result = new JacksonJsonValue(child);
|
||||
return new JacksonJsonValue(child);
|
||||
}
|
||||
jsonLikeMap.put(key, result);
|
||||
}
|
||||
return result;
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -300,20 +283,29 @@ public class JacksonStructure implements JsonLikeStructure {
|
|||
|
||||
@Override
|
||||
public ValueType getJsonType() {
|
||||
if (null == nativeValue || nativeValue.isNull()) {
|
||||
if (null == nativeValue) {
|
||||
return ValueType.NULL;
|
||||
}
|
||||
if (nativeValue.isObject()) {
|
||||
|
||||
switch (nativeValue.getNodeType()) {
|
||||
case NULL:
|
||||
case MISSING:
|
||||
return ValueType.NULL;
|
||||
case OBJECT:
|
||||
return ValueType.OBJECT;
|
||||
}
|
||||
if (nativeValue.isArray()) {
|
||||
case ARRAY:
|
||||
return ValueType.ARRAY;
|
||||
case POJO:
|
||||
case BINARY:
|
||||
case STRING:
|
||||
case NUMBER:
|
||||
case BOOLEAN:
|
||||
default:
|
||||
break;
|
||||
}
|
||||
if (nativeValue.isValueNode()) {
|
||||
|
||||
return ValueType.SCALAR;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ScalarType getDataType() {
|
||||
|
@ -378,7 +370,10 @@ public class JacksonStructure implements JsonLikeStructure {
|
|||
}
|
||||
|
||||
private static ObjectMapper createObjectMapper() {
|
||||
ObjectMapper retVal = new ObjectMapper();
|
||||
ObjectMapper retVal =
|
||||
JsonMapper
|
||||
.builder()
|
||||
.build();
|
||||
retVal = retVal.setNodeFactory(new JsonNodeFactory(true));
|
||||
retVal = retVal.enable(DeserializationFeature.USE_BIG_DECIMAL_FOR_FLOATS);
|
||||
retVal = retVal.enable(DeserializationFeature.FAIL_ON_TRAILING_TOKENS);
|
||||
|
|
|
@ -83,7 +83,6 @@ import static org.apache.commons.lang3.StringUtils.substring;
|
|||
public class FhirTerser {
|
||||
|
||||
private static final Pattern COMPARTMENT_MATCHER_PATH = Pattern.compile("([a-zA-Z.]+)\\.where\\(resolve\\(\\) is ([a-zA-Z]+)\\)");
|
||||
private static final EnumSet<OptionsEnum> EMPTY_OPTION_SET = EnumSet.noneOf(OptionsEnum.class);
|
||||
private static final String USER_DATA_KEY_CONTAIN_RESOURCES_COMPLETED = FhirTerser.class.getName() + "_CONTAIN_RESOURCES_COMPLETED";
|
||||
private final FhirContext myContext;
|
||||
|
||||
|
@ -1112,7 +1111,7 @@ public class FhirTerser {
|
|||
});
|
||||
}
|
||||
|
||||
private void containResourcesForEncoding(ContainedResources theContained, IBaseResource theResource, EnumSet<OptionsEnum> theOptions) {
|
||||
private void containResourcesForEncoding(ContainedResources theContained, IBaseResource theResource, boolean theModifyResource) {
|
||||
List<IBaseReference> allReferences = getAllPopulatedChildElementsOfType(theResource, IBaseReference.class);
|
||||
for (IBaseReference next : allReferences) {
|
||||
IBaseResource resource = next.getResource();
|
||||
|
@ -1121,7 +1120,7 @@ public class FhirTerser {
|
|||
IBaseResource potentialTarget = theContained.getExistingIdToContainedResource().remove(next.getReferenceElement().getValue());
|
||||
if (potentialTarget != null) {
|
||||
theContained.addContained(next.getReferenceElement(), potentialTarget);
|
||||
containResourcesForEncoding(theContained, potentialTarget, theOptions);
|
||||
containResourcesForEncoding(theContained, potentialTarget, theModifyResource);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1136,15 +1135,13 @@ public class FhirTerser {
|
|||
continue;
|
||||
}
|
||||
IIdType id = theContained.addContained(resource);
|
||||
if (theOptions.contains(OptionsEnum.MODIFY_RESOURCE)) {
|
||||
if (theModifyResource) {
|
||||
getContainedResourceList(theResource).add(resource);
|
||||
next.setReference(id.getValue());
|
||||
}
|
||||
if (resource.getIdElement().isLocal() && theContained.hasExistingIdToContainedResource()) {
|
||||
theContained.getExistingIdToContainedResource().remove(resource.getIdElement().getValue());
|
||||
}
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1161,9 +1158,20 @@ public class FhirTerser {
|
|||
* @since 5.4.0
|
||||
*/
|
||||
public ContainedResources containResources(IBaseResource theResource, OptionsEnum... theOptions) {
|
||||
EnumSet<OptionsEnum> options = toOptionSet(theOptions);
|
||||
boolean storeAndReuse = false;
|
||||
boolean modifyResource = false;
|
||||
for (OptionsEnum next : theOptions) {
|
||||
switch (next) {
|
||||
case MODIFY_RESOURCE:
|
||||
modifyResource = true;
|
||||
break;
|
||||
case STORE_AND_REUSE_RESULTS:
|
||||
storeAndReuse = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (options.contains(OptionsEnum.STORE_AND_REUSE_RESULTS)) {
|
||||
if (storeAndReuse) {
|
||||
Object cachedValue = theResource.getUserData(USER_DATA_KEY_CONTAIN_RESOURCES_COMPLETED);
|
||||
if (cachedValue != null) {
|
||||
return (ContainedResources) cachedValue;
|
||||
|
@ -1184,25 +1192,17 @@ public class FhirTerser {
|
|||
contained.addContained(next);
|
||||
}
|
||||
|
||||
containResourcesForEncoding(contained, theResource, options);
|
||||
if (myContext.getParserOptions().isAutoContainReferenceTargetsWithNoId()) {
|
||||
containResourcesForEncoding(contained, theResource, modifyResource);
|
||||
}
|
||||
|
||||
if (options.contains(OptionsEnum.STORE_AND_REUSE_RESULTS)) {
|
||||
if (storeAndReuse) {
|
||||
theResource.setUserData(USER_DATA_KEY_CONTAIN_RESOURCES_COMPLETED, contained);
|
||||
}
|
||||
|
||||
return contained;
|
||||
}
|
||||
|
||||
private EnumSet<OptionsEnum> toOptionSet(OptionsEnum[] theOptions) {
|
||||
EnumSet<OptionsEnum> options;
|
||||
if (theOptions == null || theOptions.length == 0) {
|
||||
options = EMPTY_OPTION_SET;
|
||||
} else {
|
||||
options = EnumSet.of(theOptions[0], theOptions);
|
||||
}
|
||||
return options;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private <T extends IBaseResource> List<T> getContainedResourceList(T theResource) {
|
||||
List<T> containedResources = Collections.emptyList();
|
||||
|
@ -1404,14 +1404,13 @@ public class FhirTerser {
|
|||
|
||||
/**
|
||||
* Clones a resource object, copying all data elements from theSource into a new copy of the same type.
|
||||
*
|
||||
* <p>
|
||||
* Note that:
|
||||
* <ul>
|
||||
* <li>Only FHIR data elements are copied (i.e. user data maps are not copied)</li>
|
||||
* <li>If a class extending a HAPI FHIR type (e.g. an instance of a class extending the Patient class) is supplied, an instance of the base type will be returned.</li>
|
||||
* </ul>
|
||||
*
|
||||
*
|
||||
* @param theSource The source resource
|
||||
* @return A copy of the source resource
|
||||
* @since 5.6.0
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: perf
|
||||
issue: 3197
|
||||
title: "A new configuration option has been added to ParserOptions called `setAutoContainReferenceTargetsWithNoId`. This
|
||||
setting disables the automatic creation of contained resources in cases where the target/contained resource is set
|
||||
to a Reference instance but not actually added to the `Resource.contained` list. Disabling this automatic containing
|
||||
yields a minor performance improvement in systems that do not need this functionality."
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: perf
|
||||
issue: 3197
|
||||
title: "A number of minor optimizations have been added to the JsonParser serializer module as well as to the
|
||||
transaction processor. These optimizations lead to a significant performance improvement when processing
|
||||
large transaction bundles (i.e. transaction bundles containing a larger number of entries)."
|
||||
|
|
@ -117,8 +117,8 @@ public class TransactionProcessor extends BaseTransactionProcessor {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected Map<IBase, IIdType> doTransactionWriteOperations(final RequestDetails theRequest, String theActionName, TransactionDetails theTransactionDetails, Set<IIdType> theAllIds,
|
||||
Map<IIdType, IIdType> theIdSubstitutions, Map<IIdType, DaoMethodOutcome> theIdToPersistedOutcome, IBaseBundle theResponse, IdentityHashMap<IBase, Integer> theOriginalRequestOrder, List<IBase> theEntries, StopWatch theTransactionStopWatch) {
|
||||
protected EntriesToProcessMap doTransactionWriteOperations(final RequestDetails theRequest, String theActionName, TransactionDetails theTransactionDetails, Set<IIdType> theAllIds,
|
||||
IdSubstitutionMap theIdSubstitutions, Map<IIdType, DaoMethodOutcome> theIdToPersistedOutcome, IBaseBundle theResponse, IdentityHashMap<IBase, Integer> theOriginalRequestOrder, List<IBase> theEntries, StopWatch theTransactionStopWatch) {
|
||||
|
||||
ITransactionProcessorVersionAdapter versionAdapter = getVersionAdapter();
|
||||
RequestPartitionId requestPartitionId = null;
|
||||
|
|
|
@ -30,7 +30,13 @@ import javax.persistence.EntityManagerFactory;
|
|||
public class TestJPAConfig {
|
||||
@Bean
|
||||
public DaoConfig daoConfig() {
|
||||
return new DaoConfig();
|
||||
DaoConfig retVal = new DaoConfig();
|
||||
|
||||
if ("true".equals(System.getProperty("mass_ingestion_mode"))) {
|
||||
retVal.setMassIngestionMode(true);
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Bean
|
||||
|
|
|
@ -53,6 +53,9 @@ public class TestR4Config extends BaseJavaConfigR4 {
|
|||
if ("true".equals(System.getProperty("single_db_connection"))) {
|
||||
ourMaxThreads = 1;
|
||||
}
|
||||
if ("true".equals(System.getProperty("unlimited_db_connection"))) {
|
||||
ourMaxThreads = 100;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -39,29 +39,27 @@ import org.hl7.fhir.r4.model.ServiceRequest;
|
|||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.MethodOrderer;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.TestMethodOrder;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.StringTokenizer;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.in;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@TestMethodOrder(MethodOrderer.MethodName.class)
|
||||
public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoR4QueryCountTest.class);
|
||||
|
||||
|
@ -70,7 +68,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myDaoConfig.setResourceMetaCountHardLimit(new DaoConfig().getResourceMetaCountHardLimit());
|
||||
myDaoConfig.setIndexMissingFields(new DaoConfig().getIndexMissingFields());
|
||||
myDaoConfig.setDeleteEnabled(new DaoConfig().isDeleteEnabled());
|
||||
myDaoConfig.setMatchUrlCache(new DaoConfig().getMatchUrlCache());
|
||||
myDaoConfig.setMatchUrlCacheEnabled(new DaoConfig().isMatchUrlCacheEnabled());
|
||||
myDaoConfig.setHistoryCountMode(DaoConfig.DEFAULT_HISTORY_COUNT_MODE);
|
||||
myDaoConfig.setMassIngestionMode(new DaoConfig().isMassIngestionMode());
|
||||
myModelConfig.setAutoVersionReferenceAtPaths(new ModelConfig().getAutoVersionReferenceAtPaths());
|
||||
|
@ -870,6 +868,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
Practitioner pract = new Practitioner();
|
||||
pract.addIdentifier().setSystem("foo").setValue("bar");
|
||||
myPractitionerDao.create(pract);
|
||||
runInTransaction(() -> assertEquals(1, myResourceTableDao.count(), () -> myResourceTableDao.findAll().stream().map(t -> t.getIdDt().toUnqualifiedVersionless().getValue()).collect(Collectors.joining(","))));
|
||||
|
||||
// First pass
|
||||
|
||||
|
@ -881,7 +880,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
assertEquals(5, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(1, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
runInTransaction(()->assertEquals(4, myResourceTableDao.count()));
|
||||
runInTransaction(() -> assertEquals(4, myResourceTableDao.count(), () -> myResourceTableDao.findAll().stream().map(t -> t.getIdDt().toUnqualifiedVersionless().getValue()).collect(Collectors.joining(","))));
|
||||
|
||||
// Run it again - This time even the match URL should be cached
|
||||
|
||||
|
@ -923,9 +922,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
pt2.addIdentifier().setSystem("http://foo").setValue("456");
|
||||
bb.addTransactionCreateEntry(pt2);
|
||||
|
||||
runInTransaction(() -> {
|
||||
assertEquals(0, myResourceTableDao.count());
|
||||
});
|
||||
runInTransaction(() -> assertEquals(0, myResourceTableDao.count()));
|
||||
|
||||
ourLog.info("About to start transaction");
|
||||
|
||||
|
@ -940,9 +937,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
runInTransaction(() -> {
|
||||
assertEquals(2, myResourceTableDao.count());
|
||||
});
|
||||
runInTransaction(() -> assertEquals(2, myResourceTableDao.count()));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -1299,7 +1294,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
* Third time with mass ingestion mode enabled
|
||||
*/
|
||||
myDaoConfig.setMassIngestionMode(true);
|
||||
myDaoConfig.setMatchUrlCache(true);
|
||||
myDaoConfig.setMatchUrlCacheEnabled(true);
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
outcome = mySystemDao.transaction(mySrd, input.get());
|
||||
|
@ -1331,7 +1326,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
|
||||
@Test
|
||||
public void testTransactionWithConditionalCreate_MatchUrlCacheEnabled() {
|
||||
myDaoConfig.setMatchUrlCache(true);
|
||||
myDaoConfig.setMatchUrlCacheEnabled(true);
|
||||
|
||||
Supplier<Bundle> bundleCreator = () -> {
|
||||
BundleBuilder bb = new BundleBuilder(myFhirCtx);
|
||||
|
@ -2155,7 +2150,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
@Test
|
||||
public void testMassIngestionMode_TransactionWithChanges() {
|
||||
myDaoConfig.setDeleteEnabled(false);
|
||||
myDaoConfig.setMatchUrlCache(true);
|
||||
myDaoConfig.setMatchUrlCacheEnabled(true);
|
||||
myDaoConfig.setMassIngestionMode(true);
|
||||
myFhirCtx.getParserOptions().setStripVersionsFromReferences(false);
|
||||
myModelConfig.setRespectVersionsForSearchIncludes(true);
|
||||
|
@ -2219,7 +2214,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
@Test
|
||||
public void testMassIngestionMode_TransactionWithChanges_2() throws IOException {
|
||||
myDaoConfig.setDeleteEnabled(false);
|
||||
myDaoConfig.setMatchUrlCache(true);
|
||||
myDaoConfig.setMatchUrlCacheEnabled(true);
|
||||
myDaoConfig.setMassIngestionMode(true);
|
||||
myFhirCtx.getParserOptions().setStripVersionsFromReferences(false);
|
||||
myModelConfig.setRespectVersionsForSearchIncludes(true);
|
||||
|
|
|
@ -55,9 +55,7 @@ import ca.uhn.fhir.rest.param.UriParam;
|
|||
import ca.uhn.fhir.rest.param.UriParamQualifierEnum;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException;
|
||||
import ca.uhn.fhir.util.CollectionUtil;
|
||||
import ca.uhn.fhir.util.HapiExtensions;
|
||||
import ca.uhn.fhir.util.ResourceUtil;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
@ -133,11 +131,6 @@ import org.junit.jupiter.api.BeforeEach;
|
|||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.DisplayName;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.Arguments;
|
||||
import org.junit.jupiter.params.provider.CsvFileSource;
|
||||
import org.junit.jupiter.params.provider.CsvSource;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.ArgumentMatchers;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
@ -149,12 +142,9 @@ import org.springframework.transaction.support.TransactionCallbackWithoutResult;
|
|||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.math.BigDecimal;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
|
@ -162,7 +152,6 @@ import java.util.List;
|
|||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static ca.uhn.fhir.rest.api.Constants.PARAM_TYPE;
|
||||
import static org.apache.commons.lang3.StringUtils.countMatches;
|
||||
|
@ -196,11 +185,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
@Autowired
|
||||
MatchUrlService myMatchUrlService;
|
||||
|
||||
@BeforeAll
|
||||
public static void beforeAllTest(){
|
||||
System.setProperty("user.timezone", "EST");
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
public void afterResetSearchSize() {
|
||||
myDaoConfig.setReuseCachedSearchResultsForMillis(new DaoConfig().getReuseCachedSearchResultsForMillis());
|
||||
|
@ -209,6 +193,9 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
myDaoConfig.setSearchPreFetchThresholds(new DaoConfig().getSearchPreFetchThresholds());
|
||||
myDaoConfig.setIndexMissingFields(new DaoConfig().getIndexMissingFields());
|
||||
myModelConfig.setNormalizedQuantitySearchLevel(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED);
|
||||
|
||||
myModelConfig.setAutoSupportDefaultSearchParams(true);
|
||||
mySearchParamRegistry.resetForUnitTest();
|
||||
}
|
||||
|
||||
@BeforeEach
|
||||
|
@ -217,6 +204,35 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
myDaoConfig.setReuseCachedSearchResultsForMillis(null);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDisableAutoSupportDefaultSearchParams() {
|
||||
myModelConfig.setAutoSupportDefaultSearchParams(false);
|
||||
mySearchParamRegistry.resetForUnitTest();
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.setActive(true);
|
||||
patient.addName().setFamily("FAMILY");
|
||||
myPatientDao.create(patient);
|
||||
|
||||
runInTransaction(() -> {
|
||||
assertEquals(0, myResourceIndexedSearchParamStringDao.count());
|
||||
assertEquals(0, myResourceIndexedSearchParamTokenDao.count());
|
||||
});
|
||||
|
||||
SearchParameterMap map = SearchParameterMap.newSynchronous("name", new StringParam("FAMILY"));
|
||||
try {
|
||||
myPatientDao.search(map, mySrd);
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
// good
|
||||
}
|
||||
|
||||
// Make sure we can support mandatory SPs
|
||||
map = SearchParameterMap.newSynchronous("url", new UriParam("http://foo"));
|
||||
myCodeSystemDao.search(map, mySrd); // should not fail
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchInExistingTransaction() {
|
||||
createPatient(withBirthdate("2021-01-01"));
|
||||
|
@ -237,8 +253,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
public void testCanonicalReference() {
|
||||
StructureDefinition sd = new StructureDefinition();
|
||||
|
@ -665,7 +679,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* See #1053
|
||||
*/
|
||||
|
@ -1061,7 +1074,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testHasParameterChained() {
|
||||
IIdType pid0;
|
||||
|
@ -1574,7 +1586,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testPeriodWithNoStart() {
|
||||
ServiceRequest serviceRequest = new ServiceRequest();
|
||||
|
@ -1611,7 +1622,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
});
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSearchByIdParamInverse() {
|
||||
String id1;
|
||||
|
@ -1641,7 +1651,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSearchByIdParam_QueryIsMinimal() {
|
||||
// With only an _id parameter
|
||||
|
@ -1919,7 +1928,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSearchWithIncludeStarQualified() {
|
||||
|
||||
|
@ -1984,7 +1992,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
assertThat(ids, containsInAnyOrder(obsId, encId));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testComponentQuantity() {
|
||||
Observation o1 = new Observation();
|
||||
|
@ -2061,7 +2068,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSearchDate_TimingValueUsingPeriod() {
|
||||
ServiceRequest p1 = new ServiceRequest();
|
||||
|
@ -2081,7 +2087,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSearchDateWrongParam() {
|
||||
Patient p1 = new Patient();
|
||||
|
@ -2105,7 +2110,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testDateRangeOnPeriod_SearchByDateTime_NoUpperBound() {
|
||||
Encounter enc = new Encounter();
|
||||
|
@ -2153,7 +2157,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
assertThat(ids, empty());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testDateRangeOnPeriod_SearchByDate_NoUpperBound() {
|
||||
Encounter enc = new Encounter();
|
||||
|
@ -2201,7 +2204,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
assertThat(ids, empty());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testDateRangeOnPeriod_SearchByDateTime_NoLowerBound() {
|
||||
Encounter enc = new Encounter();
|
||||
|
@ -2249,7 +2251,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
assertThat(ids, contains(id1));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testDateRangeOnPeriod_SearchByDate_NoLowerBound() {
|
||||
Encounter enc = new Encounter();
|
||||
|
@ -2297,7 +2298,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
assertThat(ids, contains(id1));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testDatePeriodParamEndOnly() {
|
||||
{
|
||||
|
@ -2436,7 +2436,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* See #1174
|
||||
*/
|
||||
|
@ -2546,7 +2545,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSearchLastUpdatedParam() {
|
||||
String methodName = "testSearchLastUpdatedParam";
|
||||
|
@ -2861,7 +2859,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSearchNumberParam() {
|
||||
RiskAssessment e1 = new RiskAssessment();
|
||||
|
@ -2985,7 +2982,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSearchParamChangesType() {
|
||||
String name = "testSearchParamChangesType";
|
||||
|
@ -3133,7 +3129,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSearchResourceLinkWithChain() {
|
||||
Patient patient = new Patient();
|
||||
|
@ -3417,7 +3412,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSearchTokenListWithMixedCombinations() {
|
||||
|
||||
|
@ -3665,7 +3659,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
assertEquals(5, countMatches(searchQuery.toUpperCase(), "LEFT OUTER JOIN"), searchQuery);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSearchWithDateRange() {
|
||||
|
||||
|
@ -3792,7 +3785,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSearchDoubleToken() {
|
||||
Patient patient = new Patient();
|
||||
|
@ -3829,7 +3821,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSearchDoubleString() {
|
||||
Patient patient = new Patient();
|
||||
|
@ -4297,7 +4288,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSearchReferenceUntyped() {
|
||||
Patient p = new Patient();
|
||||
|
@ -4334,7 +4324,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
assertEquals("This search uses an unqualified resource(a parameter in a chain without a resource type). This is less efficient than using a qualified type. If you know what you're looking for, try qualifying it using the form: 'entity:[resourceType]'", message.getMessage());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSearchWithDateAndReusesExistingJoin() {
|
||||
// Add a search parameter to Observation.issued, so that between that one
|
||||
|
@ -4434,7 +4423,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSearchWithFetchSizeDefaultMaximum() {
|
||||
myDaoConfig.setFetchSizeDefaultMaximum(5);
|
||||
|
@ -5189,7 +5177,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testDateSearchParametersShouldBeTimezoneIndependent() {
|
||||
|
||||
|
@ -5316,7 +5303,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
));
|
||||
}
|
||||
|
||||
|
||||
private void createObservationWithEffective(String theId, String theEffective) {
|
||||
Observation obs = new Observation();
|
||||
obs.setId(theId);
|
||||
|
@ -5404,7 +5390,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
assertThat(toUnqualifiedVersionlessIdValues(outcome), contains(crId));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSearchWithTwoChainedDates() {
|
||||
// Matches
|
||||
|
@ -5457,7 +5442,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testCircularReferencesDontBreakRevIncludes() {
|
||||
|
||||
|
@ -5495,7 +5479,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
|
||||
}
|
||||
|
||||
|
||||
private String toStringMultiline(List<?> theResults) {
|
||||
StringBuilder b = new StringBuilder();
|
||||
for (Object next : theResults) {
|
||||
|
@ -5505,5 +5488,10 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
return b.toString();
|
||||
}
|
||||
|
||||
@BeforeAll
|
||||
public static void beforeAllTest() {
|
||||
System.setProperty("user.timezone", "EST");
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -136,6 +136,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
|
|||
myDaoConfig.setBundleBatchMaxPoolSize(new DaoConfig().getBundleBatchMaxPoolSize());
|
||||
myDaoConfig.setAutoCreatePlaceholderReferenceTargets(new DaoConfig().isAutoCreatePlaceholderReferenceTargets());
|
||||
myModelConfig.setAutoVersionReferenceAtPaths(new ModelConfig().getAutoVersionReferenceAtPaths());
|
||||
myFhirCtx.getParserOptions().setAutoContainReferenceTargetsWithNoId(true);
|
||||
}
|
||||
|
||||
@BeforeEach
|
||||
|
@ -1043,6 +1044,25 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
|
|||
assertThat(task.getBasedOn().get(0).getReference(), matchesPattern("Patient/[0-9]+"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTransactionNoContainedRedux_ContainedProcessingDisabled() throws IOException {
|
||||
myFhirCtx.getParserOptions().setAutoContainReferenceTargetsWithNoId(false);
|
||||
|
||||
//Pre-create the patient, which will cause the ifNoneExist to prevent a new creation during bundle transaction
|
||||
Patient patient = loadResourceFromClasspath(Patient.class, "/r4/preexisting-patient.json");
|
||||
myPatientDao.create(patient);
|
||||
|
||||
//Post the Bundle containing a conditional POST with an identical patient from the above resource.
|
||||
Bundle request = loadResourceFromClasspath(Bundle.class, "/r4/transaction-no-contained-2.json");
|
||||
|
||||
Bundle outcome = mySystemDao.transaction(mySrd, request);
|
||||
|
||||
IdType taskId = new IdType(outcome.getEntry().get(0).getResponse().getLocation());
|
||||
Task task = myTaskDao.read(taskId, mySrd);
|
||||
|
||||
assertThat(task.getBasedOn().get(0).getReference(), matchesPattern("Patient/[0-9]+"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTransactionNoContained() throws IOException {
|
||||
|
||||
|
|
|
@ -0,0 +1,203 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.config.TestR4Config;
|
||||
import ca.uhn.fhir.jpa.config.TestR4WithLuceneDisabledConfig;
|
||||
import ca.uhn.fhir.jpa.dao.BaseJpaTest;
|
||||
import ca.uhn.fhir.jpa.dao.BaseTransactionProcessor;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
|
||||
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
||||
import ca.uhn.fhir.jpa.search.reindex.BlockPolicy;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import org.apache.commons.compress.utils.Lists;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.Meta;
|
||||
import org.junit.jupiter.api.AfterAll;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
|
||||
import org.springframework.test.annotation.DirtiesContext;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.FileSystems;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@ContextConfiguration(classes = {TestR4WithLuceneDisabledConfig.class})
|
||||
@DirtiesContext
|
||||
public class SyntheaPerfTest extends BaseJpaTest {
|
||||
|
||||
@BeforeAll
|
||||
public static void beforeAll() {
|
||||
System.setProperty("unlimited_db_connection", "true");
|
||||
System.setProperty("mass_ingestion_mode", "true");
|
||||
}
|
||||
|
||||
@AfterAll
|
||||
public static void afterAll() {
|
||||
System.clearProperty("unlimited_db_connection");
|
||||
System.clearProperty("mass_ingestion_mode");
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
public void afterEach() {
|
||||
myCtx.getParserOptions().setAutoContainReferenceTargetsWithNoId(true);
|
||||
}
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(SyntheaPerfTest.class);
|
||||
private static final FhirContext ourCtx = FhirContext.forR4Cached();
|
||||
public static final String PATH_TO_SYNTHEA_OUTPUT = "../../synthea/output/fhir/";
|
||||
public static final int CONCURRENCY = 4;
|
||||
@Autowired
|
||||
private FhirContext myCtx;
|
||||
@Autowired
|
||||
private PlatformTransactionManager myTxManager;
|
||||
@Autowired
|
||||
private IFhirSystemDao<Bundle, Meta> mySystemDao;
|
||||
|
||||
@Disabled
|
||||
@Test
|
||||
public void testLoadSynthea() throws Exception {
|
||||
assertEquals(100, TestR4Config.getMaxThreads());
|
||||
|
||||
myDaoConfig.setResourceEncoding(ResourceEncodingEnum.JSON);
|
||||
myDaoConfig.setTagStorageMode(DaoConfig.TagStorageModeEnum.INLINE);
|
||||
myDaoConfig.setMatchUrlCacheEnabled(true);
|
||||
myDaoConfig.setDeleteEnabled(false);
|
||||
myCtx.getParserOptions().setAutoContainReferenceTargetsWithNoId(false);
|
||||
|
||||
assertTrue(myDaoConfig.isMassIngestionMode());
|
||||
|
||||
List<Path> files = Files
|
||||
.list(FileSystems.getDefault().getPath(PATH_TO_SYNTHEA_OUTPUT))
|
||||
.filter(t->t.toString().endsWith(".json"))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
List<Path> meta = files.stream().filter(t -> t.toString().contains("hospital") || t.toString().contains("practitioner")).collect(Collectors.toList());
|
||||
new Uploader(meta);
|
||||
|
||||
List<Path> nonMeta = files.stream().filter(t -> !t.toString().contains("hospital") && !t.toString().contains("practitioner")).collect(Collectors.toList());
|
||||
|
||||
// new Uploader(Collections.singletonList(nonMeta.remove(0)));
|
||||
// new Uploader(Collections.singletonList(nonMeta.remove(0)));
|
||||
// new Uploader(Collections.singletonList(nonMeta.remove(0)));
|
||||
new Uploader(Collections.singletonList(nonMeta.remove(0)));
|
||||
|
||||
new Uploader(nonMeta);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected FhirContext getContext() {
|
||||
return myCtx;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PlatformTransactionManager getTxManager() {
|
||||
return myTxManager;
|
||||
}
|
||||
|
||||
|
||||
private class Uploader {
|
||||
|
||||
private final ThreadPoolTaskExecutor myExecutor;
|
||||
private final int myTotal;
|
||||
private final StopWatch mySw;
|
||||
private final AtomicInteger myFilesCounter = new AtomicInteger(0);
|
||||
private final AtomicInteger myResourcesCounter = new AtomicInteger(0);
|
||||
|
||||
public Uploader(List<Path> thePaths) throws ExecutionException, InterruptedException {
|
||||
Validate.isTrue(thePaths.size() > 0);
|
||||
myTotal = thePaths.size();
|
||||
|
||||
myExecutor = new ThreadPoolTaskExecutor();
|
||||
myExecutor.setCorePoolSize(0);
|
||||
myExecutor.setMaxPoolSize(CONCURRENCY);
|
||||
myExecutor.setQueueCapacity(100);
|
||||
myExecutor.setAllowCoreThreadTimeOut(true);
|
||||
myExecutor.setThreadNamePrefix("Uploader-");
|
||||
myExecutor.setRejectedExecutionHandler(new BlockPolicy());
|
||||
myExecutor.initialize();
|
||||
|
||||
mySw = new StopWatch();
|
||||
List<Future<?>> futures = new ArrayList<>();
|
||||
for (Path next : thePaths) {
|
||||
futures.add(myExecutor.submit(new MyTask(next)));
|
||||
}
|
||||
|
||||
for (Future<?> next : futures) {
|
||||
next.get();
|
||||
}
|
||||
|
||||
ourLog.info("Finished uploading {} files with {} resources in {} - {} files/sec - {} res/sec",
|
||||
myFilesCounter.get(),
|
||||
myResourcesCounter.get(),
|
||||
mySw,
|
||||
mySw.formatThroughput(myFilesCounter.get(), TimeUnit.SECONDS),
|
||||
mySw.formatThroughput(myResourcesCounter.get(), TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
private class MyTask implements Runnable {
|
||||
|
||||
private final Path myPath;
|
||||
|
||||
public MyTask(Path thePath) {
|
||||
myPath = thePath;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
Bundle bundle;
|
||||
try (FileReader reader = new FileReader(myPath.toFile())) {
|
||||
bundle = ourCtx.newJsonParser().parseResource(Bundle.class, reader);
|
||||
} catch (IOException e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
|
||||
mySystemDao.transaction(new SystemRequestDetails(myInterceptorRegistry), bundle);
|
||||
|
||||
int fileCount = myFilesCounter.incrementAndGet();
|
||||
myResourcesCounter.addAndGet(bundle.getEntry().size());
|
||||
|
||||
if (fileCount % 10 == 0) {
|
||||
ourLog.info("Have uploaded {} files with {} resources in {} - {} files/sec - {} res/sec",
|
||||
myFilesCounter.get(),
|
||||
myResourcesCounter.get(),
|
||||
mySw,
|
||||
mySw.formatThroughput(myFilesCounter.get(), TimeUnit.SECONDS),
|
||||
mySw.formatThroughput(myResourcesCounter.get(), TimeUnit.SECONDS));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -1,5 +1,25 @@
|
|||
package ca.uhn.fhir.jpa.mdm.broker;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Master Data Management
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.subscription.api.ISubscriptionMessageKeySvc;
|
||||
import ca.uhn.fhir.mdm.model.CanonicalEID;
|
||||
import ca.uhn.fhir.mdm.util.EIDHelper;
|
||||
|
|
|
@ -34,7 +34,6 @@ import javax.persistence.TemporalType;
|
|||
import javax.persistence.Transient;
|
||||
import java.util.Collection;
|
||||
import java.util.Date;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||
|
||||
|
@ -113,12 +112,16 @@ public abstract class BaseHasResource extends BasePartitionable implements IBase
|
|||
@Override
|
||||
public InstantDt getPublished() {
|
||||
if (myPublished != null) {
|
||||
return new InstantDt(cloneDate(myPublished));
|
||||
return new InstantDt(getPublishedDate());
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public Date getPublishedDate() {
|
||||
return cloneDate(myPublished);
|
||||
}
|
||||
|
||||
public void setPublished(Date thePublished) {
|
||||
myPublished = thePublished;
|
||||
}
|
||||
|
@ -137,7 +140,12 @@ public abstract class BaseHasResource extends BasePartitionable implements IBase
|
|||
|
||||
@Override
|
||||
public InstantDt getUpdated() {
|
||||
return new InstantDt(cloneDate(myUpdated));
|
||||
return new InstantDt(getUpdatedDate());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Date getUpdatedDate() {
|
||||
return cloneDate(myUpdated);
|
||||
}
|
||||
|
||||
public void setUpdated(Date theUpdated) {
|
||||
|
@ -148,11 +156,6 @@ public abstract class BaseHasResource extends BasePartitionable implements IBase
|
|||
myUpdated = theUpdated.getValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Date getUpdatedDate() {
|
||||
return cloneDate(myUpdated);
|
||||
}
|
||||
|
||||
@Override
|
||||
public abstract long getVersion();
|
||||
|
||||
|
|
|
@ -28,8 +28,6 @@ import org.hl7.fhir.dstu2.model.Subscription;
|
|||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.hl7.fhir.r4.model.DateTimeType;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.PostConstruct;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
|
@ -102,6 +100,7 @@ public class ModelConfig {
|
|||
private boolean myIndexOnContainedResources = false;
|
||||
private boolean myIndexOnContainedResourcesRecursively = false;
|
||||
private boolean myAllowMdmExpansion = false;
|
||||
private boolean myAutoSupportDefaultSearchParams = true;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
|
@ -162,36 +161,6 @@ public class ModelConfig {
|
|||
return myAllowContainsSearches;
|
||||
}
|
||||
|
||||
/**
|
||||
* If enabled, the server will support the use of :mdm search parameter qualifier on Reference Search Parameters.
|
||||
* This Parameter Qualifier is HAPI-specific, and not defined anywhere in the FHIR specification. Using this qualifier
|
||||
* will result in an MDM expansion being done on the reference, which will expand the search scope. For example, if Patient/1
|
||||
* is MDM-matched to Patient/2 and you execute the search:
|
||||
* Observation?subject:mdm=Patient/1 , you will receive observations for both Patient/1 and Patient/2.
|
||||
* <p>
|
||||
* Default is <code>false</code>
|
||||
* </p>
|
||||
* @since 5.4.0
|
||||
*/
|
||||
public boolean isAllowMdmExpansion() {
|
||||
return myAllowMdmExpansion;
|
||||
}
|
||||
|
||||
/**
|
||||
* If enabled, the server will support the use of :mdm search parameter qualifier on Reference Search Parameters.
|
||||
* This Parameter Qualifier is HAPI-specific, and not defined anywhere in the FHIR specification. Using this qualifier
|
||||
* will result in an MDM expansion being done on the reference, which will expand the search scope. For example, if Patient/1
|
||||
* is MDM-matched to Patient/2 and you execute the search:
|
||||
* Observation?subject:mdm=Patient/1 , you will receive observations for both Patient/1 and Patient/2.
|
||||
* <p>
|
||||
* Default is <code>false</code>
|
||||
* </p>
|
||||
* @since 5.4.0
|
||||
*/
|
||||
public void setAllowMdmExpansion(boolean theAllowMdmExpansion) {
|
||||
myAllowMdmExpansion = theAllowMdmExpansion;
|
||||
}
|
||||
|
||||
/**
|
||||
* If enabled, the server will support the use of :contains searches,
|
||||
* which are helpful but can have adverse effects on performance.
|
||||
|
@ -210,6 +179,38 @@ public class ModelConfig {
|
|||
this.myAllowContainsSearches = theAllowContainsSearches;
|
||||
}
|
||||
|
||||
/**
|
||||
* If enabled, the server will support the use of :mdm search parameter qualifier on Reference Search Parameters.
|
||||
* This Parameter Qualifier is HAPI-specific, and not defined anywhere in the FHIR specification. Using this qualifier
|
||||
* will result in an MDM expansion being done on the reference, which will expand the search scope. For example, if Patient/1
|
||||
* is MDM-matched to Patient/2 and you execute the search:
|
||||
* Observation?subject:mdm=Patient/1 , you will receive observations for both Patient/1 and Patient/2.
|
||||
* <p>
|
||||
* Default is <code>false</code>
|
||||
* </p>
|
||||
*
|
||||
* @since 5.4.0
|
||||
*/
|
||||
public boolean isAllowMdmExpansion() {
|
||||
return myAllowMdmExpansion;
|
||||
}
|
||||
|
||||
/**
|
||||
* If enabled, the server will support the use of :mdm search parameter qualifier on Reference Search Parameters.
|
||||
* This Parameter Qualifier is HAPI-specific, and not defined anywhere in the FHIR specification. Using this qualifier
|
||||
* will result in an MDM expansion being done on the reference, which will expand the search scope. For example, if Patient/1
|
||||
* is MDM-matched to Patient/2 and you execute the search:
|
||||
* Observation?subject:mdm=Patient/1 , you will receive observations for both Patient/1 and Patient/2.
|
||||
* <p>
|
||||
* Default is <code>false</code>
|
||||
* </p>
|
||||
*
|
||||
* @since 5.4.0
|
||||
*/
|
||||
public void setAllowMdmExpansion(boolean theAllowMdmExpansion) {
|
||||
myAllowMdmExpansion = theAllowMdmExpansion;
|
||||
}
|
||||
|
||||
/**
|
||||
* If set to <code>true</code> (default is <code>false</code>) the server will allow
|
||||
* resources to have references to external servers. For example if this server is
|
||||
|
@ -385,7 +386,6 @@ public class ModelConfig {
|
|||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* This setting indicates which subscription channel types are supported by the server. Any subscriptions submitted
|
||||
* to the server matching these types will be activated.
|
||||
|
@ -806,6 +806,38 @@ public class ModelConfig {
|
|||
myIndexOnContainedResourcesRecursively = theIndexOnContainedResourcesRecursively;
|
||||
}
|
||||
|
||||
/**
|
||||
* If this is disabled by setting this to {@literal false} (default is {@literal true}),
|
||||
* the server will not automatically implement and support search parameters that
|
||||
* are not explcitly created in the repository.
|
||||
* <p>
|
||||
* Disabling this can have a dramatic improvement on performance (especially write performance)
|
||||
* in servers that only need to support a small number of search parameters, or no search parameters at all.
|
||||
* Disabling this obviously reduces the options for searching however.
|
||||
* </p>
|
||||
*
|
||||
* @since 6.0.0
|
||||
*/
|
||||
public boolean isAutoSupportDefaultSearchParams() {
|
||||
return myAutoSupportDefaultSearchParams;
|
||||
}
|
||||
|
||||
/**
|
||||
* If this is disabled by setting this to {@literal false} (default is {@literal true}),
|
||||
* the server will not automatically implement and support search parameters that
|
||||
* are not explcitly created in the repository.
|
||||
* <p>
|
||||
* Disabling this can have a dramatic improvement on performance (especially write performance)
|
||||
* in servers that only need to support a small number of search parameters, or no search parameters at all.
|
||||
* Disabling this obviously reduces the options for searching however.
|
||||
* </p>
|
||||
*
|
||||
* @since 6.0.0
|
||||
*/
|
||||
public void setAutoSupportDefaultSearchParams(boolean theAutoSupportDefaultSearchParams) {
|
||||
myAutoSupportDefaultSearchParams = theAutoSupportDefaultSearchParams;
|
||||
}
|
||||
|
||||
private static void validateTreatBaseUrlsAsLocal(String theUrl) {
|
||||
Validate.notBlank(theUrl, "Base URL must not be null or empty");
|
||||
|
||||
|
|
|
@ -638,8 +638,8 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
|||
retVal.setVersion(myVersion);
|
||||
retVal.setTransientForcedId(getTransientForcedId());
|
||||
|
||||
retVal.setPublished(getPublished());
|
||||
retVal.setUpdated(getUpdated());
|
||||
retVal.setPublished(getPublishedDate());
|
||||
retVal.setUpdated(getUpdatedDate());
|
||||
retVal.setFhirVersion(getFhirVersion());
|
||||
retVal.setDeleted(getDeleted());
|
||||
retVal.setResourceTable(this);
|
||||
|
|
|
@ -991,7 +991,7 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
|
|||
myModelConfig.isIndexOnContainedResources()
|
||||
|| anySearchParameterUsesResolve(searchParams, theSearchParamType);
|
||||
|
||||
if (havePathWithResolveExpression) {
|
||||
if (havePathWithResolveExpression && myContext.getParserOptions().isAutoContainReferenceTargetsWithNoId()) {
|
||||
//TODO GGG/JA: At this point, if the Task.basedOn.reference.resource does _not_ have an ID, we will attempt to contain it internally. Wild
|
||||
myContext.newTerser().containResources(theResource, FhirTerser.OptionsEnum.MODIFY_RESOURCE, FhirTerser.OptionsEnum.STORE_AND_REUSE_RESULTS);
|
||||
}
|
||||
|
|
|
@ -24,6 +24,8 @@ import ca.uhn.fhir.context.FhirContext;
|
|||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import com.github.benmanes.caffeine.cache.Cache;
|
||||
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.hl7.fhir.exceptions.FHIRException;
|
||||
import org.hl7.fhir.exceptions.PathEngineException;
|
||||
|
@ -32,6 +34,7 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
|
|||
import org.hl7.fhir.r4.context.IWorkerContext;
|
||||
import org.hl7.fhir.r4.hapi.ctx.HapiWorkerContext;
|
||||
import org.hl7.fhir.r4.model.Base;
|
||||
import org.hl7.fhir.r4.model.ExpressionNode;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Resource;
|
||||
import org.hl7.fhir.r4.model.ResourceType;
|
||||
|
@ -45,11 +48,13 @@ import java.util.Collections;
|
|||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements ISearchParamExtractor {
|
||||
|
||||
private Cache<String, ExpressionNode> myParsedFhirPathCache;
|
||||
private FHIRPathEngine myFhirPathEngine;
|
||||
|
||||
/**
|
||||
|
@ -70,8 +75,8 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements
|
|||
@Override
|
||||
public IValueExtractor getPathValueExtractor(IBaseResource theResource, String theSinglePath) {
|
||||
return () -> {
|
||||
List<Base> allValues = myFhirPathEngine.evaluate((Base) theResource, theSinglePath);
|
||||
return (List<IBase>) new ArrayList<IBase>(allValues);
|
||||
ExpressionNode parsed = myParsedFhirPathCache.get(theSinglePath, path -> myFhirPathEngine.parse(path));
|
||||
return myFhirPathEngine.evaluate((Base) theResource, parsed);
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -89,6 +94,11 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements
|
|||
IWorkerContext worker = new HapiWorkerContext(getContext(), getContext().getValidationSupport());
|
||||
myFhirPathEngine = new FHIRPathEngine(worker);
|
||||
myFhirPathEngine.setHostServices(new SearchParamExtractorR4HostServices());
|
||||
|
||||
myParsedFhirPathCache = Caffeine
|
||||
.newBuilder()
|
||||
.expireAfterWrite(10, TimeUnit.MINUTES)
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -24,12 +24,16 @@ import ca.uhn.fhir.context.FhirContext;
|
|||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import com.github.benmanes.caffeine.cache.Cache;
|
||||
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||
import org.hl7.fhir.exceptions.FHIRException;
|
||||
import org.hl7.fhir.exceptions.PathEngineException;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r5.context.IWorkerContext;
|
||||
import org.hl7.fhir.r5.hapi.ctx.HapiWorkerContext;
|
||||
import org.hl7.fhir.r5.model.Base;
|
||||
import org.hl7.fhir.r5.model.ExpressionNode;
|
||||
import org.hl7.fhir.r5.model.IdType;
|
||||
import org.hl7.fhir.r5.model.Resource;
|
||||
import org.hl7.fhir.r5.model.ResourceType;
|
||||
|
@ -38,16 +42,19 @@ import org.hl7.fhir.r5.model.ValueSet;
|
|||
import org.hl7.fhir.r5.utils.FHIRPathEngine;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
public class SearchParamExtractorR5 extends BaseSearchParamExtractor implements ISearchParamExtractor {
|
||||
|
||||
private FHIRPathEngine myFhirPathEngine;
|
||||
private Cache<String, ExpressionNode> myParsedFhirPathCache;
|
||||
|
||||
public SearchParamExtractorR5() {
|
||||
super();
|
||||
|
@ -75,11 +82,19 @@ public class SearchParamExtractorR5 extends BaseSearchParamExtractor implements
|
|||
IWorkerContext worker = new HapiWorkerContext(getContext(), getContext().getValidationSupport());
|
||||
myFhirPathEngine = new FHIRPathEngine(worker);
|
||||
myFhirPathEngine.setHostServices(new SearchParamExtractorR5HostServices());
|
||||
|
||||
myParsedFhirPathCache = Caffeine
|
||||
.newBuilder()
|
||||
.expireAfterWrite(10, TimeUnit.MINUTES)
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public IValueExtractor getPathValueExtractor(IBaseResource theResource, String nextPath) {
|
||||
return () -> myFhirPathEngine.evaluate((Base) theResource, nextPath);
|
||||
public IValueExtractor getPathValueExtractor(IBaseResource theResource, String theSinglePath) {
|
||||
return () -> {
|
||||
ExpressionNode parsed = myParsedFhirPathCache.get(theSinglePath, path -> myFhirPathEngine.parse(path));
|
||||
return myFhirPathEngine.evaluate((Base) theResource, parsed);
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -26,11 +26,12 @@ import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
|||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.util.BundleUtil;
|
||||
import ca.uhn.fhir.util.ClasspathUtil;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
|
@ -80,7 +81,11 @@ public class ReadOnlySearchParamCache {
|
|||
return myUrlToParam.get(theUrl);
|
||||
}
|
||||
|
||||
public static ReadOnlySearchParamCache fromFhirContext(FhirContext theFhirContext, SearchParameterCanonicalizer theCanonicalizer) {
|
||||
public static ReadOnlySearchParamCache fromFhirContext(@Nonnull FhirContext theFhirContext, @Nonnull SearchParameterCanonicalizer theCanonicalizer) {
|
||||
return fromFhirContext(theFhirContext, theCanonicalizer, null);
|
||||
}
|
||||
|
||||
public static ReadOnlySearchParamCache fromFhirContext(@Nonnull FhirContext theFhirContext, @Nonnull SearchParameterCanonicalizer theCanonicalizer, @Nullable Set<String> theSearchParamPatternsToInclude) {
|
||||
assert theCanonicalizer != null;
|
||||
|
||||
ReadOnlySearchParamCache retVal = new ReadOnlySearchParamCache();
|
||||
|
@ -97,10 +102,12 @@ public class ReadOnlySearchParamCache {
|
|||
base = resourceNames;
|
||||
}
|
||||
|
||||
for (String nextBase : base) {
|
||||
Map<String, RuntimeSearchParam> nameToParam = retVal.myResourceNameToSpNameToSp.computeIfAbsent(nextBase, t -> new HashMap<>());
|
||||
String nextName = nextCanonical.getName();
|
||||
nameToParam.putIfAbsent(nextName, nextCanonical);
|
||||
for (String nextResourceName : base) {
|
||||
Map<String, RuntimeSearchParam> nameToParam = retVal.myResourceNameToSpNameToSp.computeIfAbsent(nextResourceName, t -> new HashMap<>());
|
||||
String nextParamName = nextCanonical.getName();
|
||||
if (theSearchParamPatternsToInclude == null || searchParamMatchesAtLeastOnePattern(theSearchParamPatternsToInclude, nextResourceName, nextParamName)) {
|
||||
nameToParam.putIfAbsent(nextParamName, nextCanonical);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -112,13 +119,40 @@ public class ReadOnlySearchParamCache {
|
|||
|
||||
Map<String, RuntimeSearchParam> nameToParam = retVal.myResourceNameToSpNameToSp.computeIfAbsent(nextResourceName, t -> new HashMap<>());
|
||||
for (RuntimeSearchParam nextSp : nextResDef.getSearchParams()) {
|
||||
nameToParam.putIfAbsent(nextSp.getName(), nextSp);
|
||||
String nextParamName = nextSp.getName();
|
||||
if (theSearchParamPatternsToInclude == null || searchParamMatchesAtLeastOnePattern(theSearchParamPatternsToInclude, nextResourceName, nextParamName)) {
|
||||
nameToParam.putIfAbsent(nextParamName, nextSp);
|
||||
}
|
||||
}
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
public static boolean searchParamMatchesAtLeastOnePattern(Set<String> theSearchParamPatterns, String theResourceType, String theSearchParamName) {
|
||||
for (String nextPattern : theSearchParamPatterns) {
|
||||
if ("*".equals(nextPattern)) {
|
||||
return true;
|
||||
}
|
||||
int colonIdx = nextPattern.indexOf(':');
|
||||
Validate.isTrue(colonIdx > 0, "Invalid search param pattern: %s", nextPattern);
|
||||
String resourceType = nextPattern.substring(0, colonIdx);
|
||||
String searchParamName = nextPattern.substring(colonIdx + 1);
|
||||
Validate.notBlank(resourceType, "No resource type specified in pattern: %s", nextPattern);
|
||||
Validate.notBlank(searchParamName, "No param name specified in pattern: %s", nextPattern);
|
||||
if (!resourceType.equals("*") && !resourceType.equals(theResourceType)) {
|
||||
continue;
|
||||
}
|
||||
if (!searchParamName.equals("*") && !searchParamName.equals(theSearchParamName)) {
|
||||
continue;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
public static ReadOnlySearchParamCache fromRuntimeSearchParamCache(RuntimeSearchParamCache theRuntimeSearchParamCache) {
|
||||
return new ReadOnlySearchParamCache(theRuntimeSearchParamCache);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -37,6 +37,7 @@ import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
|||
import ca.uhn.fhir.util.SearchParameterUtil;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.Sets;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
@ -59,7 +60,12 @@ import java.util.Set;
|
|||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
|
||||
public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceChangeListener, ISearchParamRegistryController {
|
||||
// TODO: JA remove unused?
|
||||
|
||||
public static final Set<String> NON_DISABLEABLE_SEARCH_PARAMS = Collections.unmodifiableSet(Sets.newHashSet(
|
||||
"*:url",
|
||||
"Subscription:*",
|
||||
"SearchParameter:*"
|
||||
));
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(SearchParamRegistryImpl.class);
|
||||
private static final int MAX_MANAGED_PARAM_COUNT = 10000;
|
||||
|
@ -78,7 +84,6 @@ public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceC
|
|||
private volatile ReadOnlySearchParamCache myBuiltInSearchParams;
|
||||
private volatile IPhoneticEncoder myPhoneticEncoder;
|
||||
private volatile RuntimeSearchParamCache myActiveSearchParams;
|
||||
|
||||
@Autowired
|
||||
private IInterceptorService myInterceptorBroadcaster;
|
||||
private IResourceChangeListenerCache myResourceChangeListenerCache;
|
||||
|
@ -134,6 +139,8 @@ public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceC
|
|||
params.setLoadSynchronousUpTo(MAX_MANAGED_PARAM_COUNT);
|
||||
|
||||
IBundleProvider allSearchParamsBp = mySearchParamProvider.search(params);
|
||||
|
||||
List<IBaseResource> allSearchParams = allSearchParamsBp.getResources(0, MAX_MANAGED_PARAM_COUNT);
|
||||
int size = allSearchParamsBp.sizeOrThrowNpe();
|
||||
|
||||
ourLog.trace("Loaded {} search params from the DB", size);
|
||||
|
@ -141,9 +148,8 @@ public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceC
|
|||
// Just in case..
|
||||
if (size >= MAX_MANAGED_PARAM_COUNT) {
|
||||
ourLog.warn("Unable to support >" + MAX_MANAGED_PARAM_COUNT + " search params!");
|
||||
size = MAX_MANAGED_PARAM_COUNT;
|
||||
}
|
||||
List<IBaseResource> allSearchParams = allSearchParamsBp.getResources(0, size);
|
||||
|
||||
initializeActiveSearchParams(allSearchParams);
|
||||
}
|
||||
|
||||
|
@ -167,7 +173,12 @@ public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceC
|
|||
|
||||
private ReadOnlySearchParamCache getBuiltInSearchParams() {
|
||||
if (myBuiltInSearchParams == null) {
|
||||
if (myModelConfig.isAutoSupportDefaultSearchParams()) {
|
||||
myBuiltInSearchParams = ReadOnlySearchParamCache.fromFhirContext(myFhirContext, mySearchParameterCanonicalizer);
|
||||
} else {
|
||||
// Only the built-in search params that can not be disabled will be supported automatically
|
||||
myBuiltInSearchParams = ReadOnlySearchParamCache.fromFhirContext(myFhirContext, mySearchParameterCanonicalizer, NON_DISABLEABLE_SEARCH_PARAMS);
|
||||
}
|
||||
}
|
||||
return myBuiltInSearchParams;
|
||||
}
|
||||
|
@ -311,6 +322,7 @@ public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceC
|
|||
|
||||
@VisibleForTesting
|
||||
public void resetForUnitTest() {
|
||||
myBuiltInSearchParams = null;
|
||||
handleInit(Collections.emptyList());
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
package ca.uhn.fhir.jpa.searchparam.registry;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static ca.uhn.fhir.jpa.searchparam.registry.ReadOnlySearchParamCache.searchParamMatchesAtLeastOnePattern;
|
||||
import static com.google.common.collect.Sets.newHashSet;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
public class ReadOnlySearchParamCacheTest {
|
||||
|
||||
@Test
|
||||
void testSearchParamMatchesAtLeastOnePattern() {
|
||||
assertTrue(searchParamMatchesAtLeastOnePattern(newHashSet("*"), "Patient", "name"));
|
||||
assertTrue(searchParamMatchesAtLeastOnePattern(newHashSet("Patient:name"), "Patient", "name"));
|
||||
assertTrue(searchParamMatchesAtLeastOnePattern(newHashSet("Patient:*"), "Patient", "name"));
|
||||
assertTrue(searchParamMatchesAtLeastOnePattern(newHashSet("*:name"), "Patient", "name"));
|
||||
assertFalse(searchParamMatchesAtLeastOnePattern(newHashSet("Patient:foo"), "Patient", "name"));
|
||||
assertFalse(searchParamMatchesAtLeastOnePattern(newHashSet("Foo:name"), "Patient", "name"));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void testSearchParamMatchesAtLeastOnePattern_InvalidPattern() {
|
||||
assertThrows(IllegalArgumentException.class, () -> searchParamMatchesAtLeastOnePattern(newHashSet("aaa"), "Patient", "name"));
|
||||
assertThrows(IllegalArgumentException.class, () -> searchParamMatchesAtLeastOnePattern(newHashSet(":name"), "Patient", "name"));
|
||||
assertThrows(IllegalArgumentException.class, () -> searchParamMatchesAtLeastOnePattern(newHashSet("Patient:"), "Patient", "name"));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,5 +1,25 @@
|
|||
package ca.uhn.fhir.jpa.subscription.api;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR Subscription Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
|
|
@ -1,5 +1,25 @@
|
|||
package ca.uhn.fhir.jpa.subscription.channel.models;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR Subscription Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.subscription.model.ChannelRetryConfiguration;
|
||||
|
||||
public class BaseChannelParameters {
|
||||
|
|
|
@ -1,5 +1,25 @@
|
|||
package ca.uhn.fhir.jpa.subscription.channel.models;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR Subscription Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
public class ProducingChannelParameters extends BaseChannelParameters {
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,5 +1,25 @@
|
|||
package ca.uhn.fhir.jpa.subscription.channel.models;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR Subscription Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
public class ReceivingChannelParameters extends BaseChannelParameters {
|
||||
|
||||
/**
|
||||
|
|
|
@ -108,6 +108,7 @@ import org.springframework.transaction.TransactionDefinition;
|
|||
import org.springframework.transaction.support.TransactionCallback;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.PostConstruct;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
@ -137,8 +138,10 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
public abstract class BaseTransactionProcessor {
|
||||
|
||||
public static final String URN_PREFIX = "urn:";
|
||||
public static final String URN_PREFIX_ESCAPED = UrlUtil.escapeUrlParam(URN_PREFIX);
|
||||
public static final Pattern UNQUALIFIED_MATCH_URL_START = Pattern.compile("^[a-zA-Z0-9_]+=");
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BaseTransactionProcessor.class);
|
||||
public static final Pattern INVALID_PLACEHOLDER_PATTERN = Pattern.compile("[a-zA-Z]+:.*");
|
||||
private BaseStorageDao myDao;
|
||||
@Autowired
|
||||
private PlatformTransactionManager myTxManager;
|
||||
|
@ -168,17 +171,6 @@ public abstract class BaseTransactionProcessor {
|
|||
@Autowired
|
||||
private IResourceVersionSvc myResourceVersionSvc;
|
||||
|
||||
public static boolean isPlaceholder(IIdType theId) {
|
||||
if (theId != null && theId.getValue() != null) {
|
||||
return theId.getValue().startsWith("urn:oid:") || theId.getValue().startsWith("urn:uuid:");
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private static String toStatusString(int theStatusCode) {
|
||||
return theStatusCode + " " + defaultString(Constants.HTTP_STATUS_NAMES.get(theStatusCode));
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setDaoConfig(DaoConfig theDaoConfig) {
|
||||
myDaoConfig = theDaoConfig;
|
||||
|
@ -273,14 +265,16 @@ public abstract class BaseTransactionProcessor {
|
|||
myVersionAdapter.populateEntryWithOperationOutcome(caughtEx, nextEntry);
|
||||
}
|
||||
|
||||
private void handleTransactionCreateOrUpdateOutcome(Map<IIdType, IIdType> idSubstitutions, Map<IIdType, DaoMethodOutcome> idToPersistedOutcome,
|
||||
private void handleTransactionCreateOrUpdateOutcome(IdSubstitutionMap idSubstitutions, Map<IIdType, DaoMethodOutcome> idToPersistedOutcome,
|
||||
IIdType nextResourceId, DaoMethodOutcome outcome,
|
||||
IBase newEntry, String theResourceType,
|
||||
IBaseResource theRes, RequestDetails theRequestDetails) {
|
||||
IIdType newId = outcome.getId().toUnqualified();
|
||||
IIdType resourceId = isPlaceholder(nextResourceId) ? nextResourceId : nextResourceId.toUnqualifiedVersionless();
|
||||
if (newId.equals(resourceId) == false) {
|
||||
if (!nextResourceId.isEmpty()) {
|
||||
idSubstitutions.put(resourceId, newId);
|
||||
}
|
||||
if (isPlaceholder(resourceId)) {
|
||||
/*
|
||||
* The correct way for substitution IDs to be is to be with no resource type, but we'll accept the qualified kind too just to be lenient.
|
||||
|
@ -336,30 +330,6 @@ public abstract class BaseTransactionProcessor {
|
|||
return theRes.getMeta().getLastUpdated();
|
||||
}
|
||||
|
||||
private String performIdSubstitutionsInMatchUrl(Map<IIdType, IIdType> theIdSubstitutions, String theMatchUrl) {
|
||||
String matchUrl = theMatchUrl;
|
||||
if (isNotBlank(matchUrl)) {
|
||||
for (Map.Entry<IIdType, IIdType> nextSubstitutionEntry : theIdSubstitutions.entrySet()) {
|
||||
IIdType nextTemporaryId = nextSubstitutionEntry.getKey();
|
||||
IIdType nextReplacementId = nextSubstitutionEntry.getValue();
|
||||
String nextTemporaryIdPart = nextTemporaryId.getIdPart();
|
||||
String nextReplacementIdPart = nextReplacementId.getValueAsString();
|
||||
if (isUrn(nextTemporaryId) && nextTemporaryIdPart.length() > URN_PREFIX.length()) {
|
||||
matchUrl = matchUrl.replace(nextTemporaryIdPart, nextReplacementIdPart);
|
||||
String escapedUrlParam = UrlUtil.escapeUrlParam(nextTemporaryIdPart);
|
||||
if (isNotBlank(escapedUrlParam)) {
|
||||
matchUrl = matchUrl.replace(escapedUrlParam, nextReplacementIdPart);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return matchUrl;
|
||||
}
|
||||
|
||||
private boolean isUrn(IIdType theId) {
|
||||
return defaultString(theId.getValue()).startsWith(URN_PREFIX);
|
||||
}
|
||||
|
||||
public void setDao(BaseStorageDao theDao) {
|
||||
myDao = theDao;
|
||||
}
|
||||
|
@ -637,12 +607,12 @@ public abstract class BaseTransactionProcessor {
|
|||
callWriteOperationsHook(Pointcut.STORAGE_TRANSACTION_WRITE_OPERATIONS_PRE, theRequestDetails, theTransactionDetails, writeOperationsDetails);
|
||||
}
|
||||
|
||||
TransactionCallback<Map<IBase, IIdType>> txCallback = status -> {
|
||||
TransactionCallback<EntriesToProcessMap> txCallback = status -> {
|
||||
final Set<IIdType> allIds = new LinkedHashSet<>();
|
||||
final Map<IIdType, IIdType> idSubstitutions = new HashMap<>();
|
||||
final IdSubstitutionMap idSubstitutions = new IdSubstitutionMap();
|
||||
final Map<IIdType, DaoMethodOutcome> idToPersistedOutcome = new HashMap<>();
|
||||
|
||||
Map<IBase, IIdType> retVal = doTransactionWriteOperations(theRequestDetails, theActionName,
|
||||
EntriesToProcessMap retVal = doTransactionWriteOperations(theRequestDetails, theActionName,
|
||||
theTransactionDetails, allIds,
|
||||
idSubstitutions, idToPersistedOutcome,
|
||||
theResponse, theOriginalRequestOrder,
|
||||
|
@ -651,7 +621,7 @@ public abstract class BaseTransactionProcessor {
|
|||
theTransactionStopWatch.startTask("Commit writes to database");
|
||||
return retVal;
|
||||
};
|
||||
Map<IBase, IIdType> entriesToProcess;
|
||||
EntriesToProcessMap entriesToProcess;
|
||||
|
||||
try {
|
||||
entriesToProcess = myHapiTransactionService.execute(theRequestDetails, theTransactionDetails, txCallback);
|
||||
|
@ -844,9 +814,14 @@ public abstract class BaseTransactionProcessor {
|
|||
}
|
||||
}
|
||||
|
||||
if (nextResourceId.hasIdPart() && nextResourceId.getIdPart().matches("[a-zA-Z]+:.*") && !isPlaceholder(nextResourceId)) {
|
||||
if (nextResourceId.hasIdPart() && !isPlaceholder(nextResourceId)) {
|
||||
int colonIndex = nextResourceId.getIdPart().indexOf(':');
|
||||
if (colonIndex != -1) {
|
||||
if (INVALID_PLACEHOLDER_PATTERN.matcher(nextResourceId.getIdPart()).matches()) {
|
||||
throw new InvalidRequestException("Invalid placeholder ID found: " + nextResourceId.getIdPart() + " - Must be of the form 'urn:uuid:[uuid]' or 'urn:oid:[oid]'");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (nextResourceId.hasIdPart() && !nextResourceId.hasResourceType() && !isPlaceholder(nextResourceId)) {
|
||||
nextResourceId = newIdType(toResourceName(theBaseResource.getClass()), nextResourceId.getIdPart());
|
||||
|
@ -875,9 +850,9 @@ public abstract class BaseTransactionProcessor {
|
|||
/**
|
||||
* After pre-hooks have been called
|
||||
*/
|
||||
protected Map<IBase, IIdType> doTransactionWriteOperations(final RequestDetails theRequest, String theActionName,
|
||||
protected EntriesToProcessMap doTransactionWriteOperations(final RequestDetails theRequest, String theActionName,
|
||||
TransactionDetails theTransactionDetails, Set<IIdType> theAllIds,
|
||||
Map<IIdType, IIdType> theIdSubstitutions, Map<IIdType, DaoMethodOutcome> theIdToPersistedOutcome,
|
||||
IdSubstitutionMap theIdSubstitutions, Map<IIdType, DaoMethodOutcome> theIdToPersistedOutcome,
|
||||
IBaseBundle theResponse, IdentityHashMap<IBase, Integer> theOriginalRequestOrder,
|
||||
List<IBase> theEntries, StopWatch theTransactionStopWatch) {
|
||||
|
||||
|
@ -890,7 +865,7 @@ public abstract class BaseTransactionProcessor {
|
|||
try {
|
||||
Set<String> deletedResources = new HashSet<>();
|
||||
DeleteConflictList deleteConflicts = new DeleteConflictList();
|
||||
Map<IBase, IIdType> entriesToProcess = new IdentityHashMap<>();
|
||||
EntriesToProcessMap entriesToProcess = new EntriesToProcessMap();
|
||||
Set<IIdType> nonUpdatedEntities = new HashSet<>();
|
||||
Set<IBasePersistedResource> updatedEntities = new HashSet<>();
|
||||
Map<String, IIdType> conditionalUrlToIdMap = new HashMap<>();
|
||||
|
@ -1139,7 +1114,7 @@ public abstract class BaseTransactionProcessor {
|
|||
theTransactionStopWatch.endCurrentTask();
|
||||
|
||||
for (IIdType next : theAllIds) {
|
||||
IIdType replacement = theIdSubstitutions.get(next);
|
||||
IIdType replacement = theIdSubstitutions.getForSource(next);
|
||||
if (replacement != null && !replacement.equals(next)) {
|
||||
ourLog.debug("Placeholder resource ID \"{}\" was replaced with permanent ID \"{}\"", next, replacement);
|
||||
}
|
||||
|
@ -1183,9 +1158,6 @@ public abstract class BaseTransactionProcessor {
|
|||
/**
|
||||
* After transaction processing and resolution of indexes and references, we want to validate that the resources that were stored _actually_
|
||||
* match the conditional URLs that they were brought in on.
|
||||
*
|
||||
* @param theIdToPersistedOutcome
|
||||
* @param conditionalUrlToIdMap
|
||||
*/
|
||||
private void validateAllInsertsMatchTheirConditionalUrls(Map<IIdType, DaoMethodOutcome> theIdToPersistedOutcome, Map<String, IIdType> conditionalUrlToIdMap, RequestDetails theRequest) {
|
||||
conditionalUrlToIdMap.entrySet().stream()
|
||||
|
@ -1280,8 +1252,8 @@ public abstract class BaseTransactionProcessor {
|
|||
* account for NOPs, so we block NOPs in that pass.
|
||||
*/
|
||||
private void resolveReferencesThenSaveAndIndexResources(RequestDetails theRequest, TransactionDetails theTransactionDetails,
|
||||
Map<IIdType, IIdType> theIdSubstitutions, Map<IIdType, DaoMethodOutcome> theIdToPersistedOutcome,
|
||||
StopWatch theTransactionStopWatch, Map<IBase, IIdType> entriesToProcess,
|
||||
IdSubstitutionMap theIdSubstitutions, Map<IIdType, DaoMethodOutcome> theIdToPersistedOutcome,
|
||||
StopWatch theTransactionStopWatch, EntriesToProcessMap entriesToProcess,
|
||||
Set<IIdType> nonUpdatedEntities, Set<IBasePersistedResource> updatedEntities) {
|
||||
FhirTerser terser = myContext.newTerser();
|
||||
theTransactionStopWatch.startTask("Index " + theIdToPersistedOutcome.size() + " resources");
|
||||
|
@ -1339,8 +1311,8 @@ public abstract class BaseTransactionProcessor {
|
|||
}
|
||||
|
||||
private void resolveReferencesThenSaveAndIndexResource(RequestDetails theRequest, TransactionDetails theTransactionDetails,
|
||||
Map<IIdType, IIdType> theIdSubstitutions, Map<IIdType, DaoMethodOutcome> theIdToPersistedOutcome,
|
||||
Map<IBase, IIdType> entriesToProcess, Set<IIdType> nonUpdatedEntities,
|
||||
IdSubstitutionMap theIdSubstitutions, Map<IIdType, DaoMethodOutcome> theIdToPersistedOutcome,
|
||||
EntriesToProcessMap entriesToProcess, Set<IIdType> nonUpdatedEntities,
|
||||
Set<IBasePersistedResource> updatedEntities, FhirTerser terser,
|
||||
DaoMethodOutcome nextOutcome, IBaseResource nextResource,
|
||||
Set<IBaseReference> theReferencesToAutoVersion) {
|
||||
|
@ -1356,7 +1328,7 @@ public abstract class BaseTransactionProcessor {
|
|||
if (targetId.getValue() == null || targetId.getValue().startsWith("#")) {
|
||||
// This means it's a contained resource
|
||||
continue;
|
||||
} else if (theIdSubstitutions.containsValue(targetId)) {
|
||||
} else if (theIdSubstitutions.containsTarget(targetId)) {
|
||||
newId = targetId;
|
||||
} else {
|
||||
throw new InternalErrorException("References by resource with no reference ID are not supported in DAO layer");
|
||||
|
@ -1365,9 +1337,9 @@ public abstract class BaseTransactionProcessor {
|
|||
continue;
|
||||
}
|
||||
}
|
||||
if (newId != null || theIdSubstitutions.containsKey(nextId)) {
|
||||
if (newId != null || theIdSubstitutions.containsSource(nextId)) {
|
||||
if (newId == null) {
|
||||
newId = theIdSubstitutions.get(nextId);
|
||||
newId = theIdSubstitutions.getForSource(nextId);
|
||||
}
|
||||
if (newId != null) {
|
||||
ourLog.debug(" * Replacing resource ref {} with {}", nextId, newId);
|
||||
|
@ -1427,9 +1399,9 @@ public abstract class BaseTransactionProcessor {
|
|||
if (nextRef instanceof IIdType) {
|
||||
continue; // No substitution on the resource ID itself!
|
||||
}
|
||||
IIdType nextUriString = newIdType(nextRef.getValueAsString());
|
||||
if (theIdSubstitutions.containsKey(nextUriString)) {
|
||||
IIdType newId = theIdSubstitutions.get(nextUriString);
|
||||
String nextUriString = nextRef.getValueAsString();
|
||||
if (theIdSubstitutions.containsSource(nextUriString)) {
|
||||
IIdType newId = theIdSubstitutions.getForSource(nextUriString);
|
||||
ourLog.debug(" * Replacing resource ref {} with {}", nextUriString, newId);
|
||||
|
||||
String existingValue = nextRef.getValueAsString();
|
||||
|
@ -1464,26 +1436,17 @@ public abstract class BaseTransactionProcessor {
|
|||
// Make sure we reflect the actual final version for the resource.
|
||||
if (updateOutcome != null) {
|
||||
IIdType newId = updateOutcome.getIdDt();
|
||||
for (IIdType nextEntry : entriesToProcess.values()) {
|
||||
if (nextEntry.getResourceType().equals(newId.getResourceType())) {
|
||||
if (nextEntry.getIdPart().equals(newId.getIdPart())) {
|
||||
if (!nextEntry.hasVersionIdPart() || !nextEntry.getVersionIdPart().equals(newId.getVersionIdPart())) {
|
||||
nextEntry.setParts(nextEntry.getBaseUrl(), nextEntry.getResourceType(), nextEntry.getIdPart(), newId.getVersionIdPart());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
IIdType entryId = entriesToProcess.getIdWithVersionlessComparison(newId);
|
||||
if (entryId != null && !StringUtils.equals(entryId.getValue(), newId.getValue())) {
|
||||
entryId.setValue(newId.getValue());
|
||||
}
|
||||
|
||||
nextOutcome.setId(newId);
|
||||
|
||||
for (IIdType next : theIdSubstitutions.values()) {
|
||||
if (next.getResourceType().equals(newId.getResourceType())) {
|
||||
if (next.getIdPart().equals(newId.getIdPart())) {
|
||||
if (!next.getValue().equals(newId.getValue())) {
|
||||
next.setValue(newId.getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
IIdType target = theIdSubstitutions.getForSource(newId);
|
||||
if (target != null) {
|
||||
target.setValue(newId.getValue());
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1632,18 +1595,6 @@ public abstract class BaseTransactionProcessor {
|
|||
return null;
|
||||
}
|
||||
|
||||
private static class BaseServerResponseExceptionHolder {
|
||||
private BaseServerResponseException myException;
|
||||
|
||||
public BaseServerResponseException getException() {
|
||||
return myException;
|
||||
}
|
||||
|
||||
public void setException(BaseServerResponseException myException) {
|
||||
this.myException = myException;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Transaction Order, per the spec:
|
||||
* <p>
|
||||
|
@ -1814,4 +1765,93 @@ public abstract class BaseTransactionProcessor {
|
|||
}
|
||||
|
||||
}
|
||||
|
||||
private static class BaseServerResponseExceptionHolder {
|
||||
private BaseServerResponseException myException;
|
||||
|
||||
public BaseServerResponseException getException() {
|
||||
return myException;
|
||||
}
|
||||
|
||||
public void setException(BaseServerResponseException myException) {
|
||||
this.myException = myException;
|
||||
}
|
||||
}
|
||||
|
||||
public static boolean isPlaceholder(IIdType theId) {
|
||||
if (theId != null && theId.getValue() != null) {
|
||||
return theId.getValue().startsWith("urn:oid:") || theId.getValue().startsWith("urn:uuid:");
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private static String toStatusString(int theStatusCode) {
|
||||
return theStatusCode + " " + defaultString(Constants.HTTP_STATUS_NAMES.get(theStatusCode));
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a match URL containing
|
||||
*
|
||||
* @param theIdSubstitutions
|
||||
* @param theMatchUrl
|
||||
* @return
|
||||
*/
|
||||
static String performIdSubstitutionsInMatchUrl(IdSubstitutionMap theIdSubstitutions, String theMatchUrl) {
|
||||
String matchUrl = theMatchUrl;
|
||||
if (isNotBlank(matchUrl) && !theIdSubstitutions.isEmpty()) {
|
||||
|
||||
int startIdx = matchUrl.indexOf('?');
|
||||
while (startIdx != -1) {
|
||||
|
||||
int endIdx = matchUrl.indexOf('&', startIdx + 1);
|
||||
if (endIdx == -1) {
|
||||
endIdx = matchUrl.length();
|
||||
}
|
||||
|
||||
int equalsIdx = matchUrl.indexOf('=', startIdx + 1);
|
||||
|
||||
int searchFrom;
|
||||
if (equalsIdx == -1) {
|
||||
searchFrom = matchUrl.length();
|
||||
} else if (equalsIdx >= endIdx) {
|
||||
// First equals we found is from a subsequent parameter
|
||||
searchFrom = matchUrl.length();
|
||||
} else {
|
||||
String paramValue = matchUrl.substring(equalsIdx + 1, endIdx);
|
||||
boolean isUrn = isUrn(paramValue);
|
||||
boolean isUrnEscaped = !isUrn && isUrnEscaped(paramValue);
|
||||
if (isUrn || isUrnEscaped) {
|
||||
if (isUrnEscaped) {
|
||||
paramValue = UrlUtil.unescape(paramValue);
|
||||
}
|
||||
IIdType replacement = theIdSubstitutions.getForSource(paramValue);
|
||||
if (replacement != null) {
|
||||
matchUrl = matchUrl.substring(0, equalsIdx + 1) + replacement.getValue() + matchUrl.substring(endIdx);
|
||||
searchFrom = equalsIdx + 1 + replacement.getValue().length();
|
||||
} else {
|
||||
searchFrom = endIdx;
|
||||
}
|
||||
} else {
|
||||
searchFrom = endIdx;
|
||||
}
|
||||
}
|
||||
|
||||
if (searchFrom >= matchUrl.length()) {
|
||||
break;
|
||||
}
|
||||
|
||||
startIdx = matchUrl.indexOf('&', searchFrom);
|
||||
}
|
||||
|
||||
}
|
||||
return matchUrl;
|
||||
}
|
||||
|
||||
private static boolean isUrn(@Nonnull String theId) {
|
||||
return theId.startsWith(URN_PREFIX);
|
||||
}
|
||||
|
||||
private static boolean isUrnEscaped(@Nonnull String theId) {
|
||||
return theId.startsWith(URN_PREFIX_ESCAPED);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,50 @@
|
|||
package ca.uhn.fhir.jpa.dao;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR Storage api
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static ca.uhn.fhir.jpa.dao.IdSubstitutionMap.toVersionlessValue;
|
||||
|
||||
public class EntriesToProcessMap {
|
||||
|
||||
private final IdentityHashMap<IBase, IIdType> myEntriesToProcess = new IdentityHashMap<>();
|
||||
private final Map<String, IIdType> myVersionlessIdToVersionedId = new HashMap<>();
|
||||
|
||||
public void put(IBase theBundleEntry, IIdType theId) {
|
||||
myEntriesToProcess.put(theBundleEntry, theId);
|
||||
myVersionlessIdToVersionedId.put(toVersionlessValue(theId), theId);
|
||||
}
|
||||
|
||||
public IIdType getIdWithVersionlessComparison(IIdType theId) {
|
||||
return myVersionlessIdToVersionedId.get(toVersionlessValue(theId));
|
||||
}
|
||||
|
||||
public Set<Map.Entry<IBase, IIdType>> entrySet() {
|
||||
return myEntriesToProcess.entrySet();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,141 @@
|
|||
package ca.uhn.fhir.jpa.dao;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR Storage api
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import com.google.common.collect.Multimap;
|
||||
import com.google.common.collect.MultimapBuilder;
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class IdSubstitutionMap {
|
||||
|
||||
private final Map<Entry, Entry> myMap = new HashMap<>();
|
||||
private final Multimap<Entry, Entry> myReverseMap = MultimapBuilder.hashKeys().arrayListValues().build();
|
||||
|
||||
|
||||
public boolean containsSource(IIdType theId) {
|
||||
if (theId.isLocal()) {
|
||||
return false;
|
||||
}
|
||||
return myMap.containsKey(new Entry(theId));
|
||||
}
|
||||
|
||||
public boolean containsSource(String theId) {
|
||||
return myMap.containsKey(new Entry(theId));
|
||||
}
|
||||
|
||||
public boolean containsTarget(IIdType theId) {
|
||||
return myReverseMap.containsKey(new Entry(theId));
|
||||
}
|
||||
|
||||
public boolean containsTarget(String theId) {
|
||||
return myReverseMap.containsKey(new Entry(theId));
|
||||
}
|
||||
|
||||
public IIdType getForSource(IIdType theId) {
|
||||
Entry target = myMap.get(new Entry(theId));
|
||||
if (target != null) {
|
||||
assert target.myId != null;
|
||||
return target.myId;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
public IIdType getForSource(String theId) {
|
||||
Entry target = myMap.get(new Entry(theId));
|
||||
if (target != null) {
|
||||
assert target.myId != null;
|
||||
return target.myId;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public List<Pair<IIdType, IIdType>> entrySet() {
|
||||
return myMap
|
||||
.entrySet()
|
||||
.stream()
|
||||
.map(t->Pair.of(t.getKey().myId, t.getValue().myId))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
public void put(IIdType theSource, IIdType theTarget) {
|
||||
myMap.put(new Entry(theSource), new Entry(theTarget));
|
||||
myReverseMap.put(new Entry(theTarget), new Entry(theSource));
|
||||
}
|
||||
|
||||
public boolean isEmpty() {
|
||||
return myMap.isEmpty();
|
||||
}
|
||||
|
||||
|
||||
private static class Entry {
|
||||
|
||||
private final String myUnversionedId;
|
||||
private final IIdType myId;
|
||||
|
||||
private Entry(String theId) {
|
||||
myId = null;
|
||||
myUnversionedId = theId;
|
||||
}
|
||||
|
||||
private Entry(IIdType theId) {
|
||||
String unversionedId = toVersionlessValue(theId);
|
||||
myUnversionedId = unversionedId;
|
||||
myId = theId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object theOther) {
|
||||
if (theOther instanceof Entry) {
|
||||
String otherUnversionedId = ((Entry) theOther).myUnversionedId;
|
||||
if (myUnversionedId.equals(otherUnversionedId)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return myUnversionedId.hashCode();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static String toVersionlessValue(IIdType theId) {
|
||||
boolean isPlaceholder = theId.getValue().startsWith("urn:");
|
||||
String unversionedId;
|
||||
if (isPlaceholder || (!theId.hasBaseUrl() && !theId.hasVersionIdPart()) || !theId.hasResourceType()) {
|
||||
unversionedId = theId.getValue();
|
||||
} else {
|
||||
unversionedId = theId.toUnqualifiedVersionless().getValue();
|
||||
}
|
||||
return unversionedId;
|
||||
}
|
||||
}
|
|
@ -185,7 +185,7 @@ public class MatchResourceUrlService {
|
|||
@Nullable
|
||||
public ResourcePersistentId processMatchUrlUsingCacheOnly(String theResourceType, String theMatchUrl) {
|
||||
ResourcePersistentId existing = null;
|
||||
if (myDaoConfig.getMatchUrlCache()) {
|
||||
if (myDaoConfig.isMatchUrlCacheEnabled()) {
|
||||
String matchUrl = massageForStorage(theResourceType, theMatchUrl);
|
||||
existing = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.MATCH_URL, matchUrl);
|
||||
}
|
||||
|
|
|
@ -1,5 +1,25 @@
|
|||
package ca.uhn.fhir.jpa.subscription.model;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR Storage api
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
public class ChannelRetryConfiguration {
|
||||
/**
|
||||
* Number of times to retry a failed message.
|
||||
|
|
|
@ -0,0 +1,80 @@
|
|||
package ca.uhn.fhir.jpa.dao;
|
||||
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
|
||||
public class BaseTransactionProcessorTest {
|
||||
|
||||
@Test
|
||||
void testPerformIdSubstitutionsInMatchUrl_MatchAtStart() {
|
||||
IdSubstitutionMap idSubstitutions = new IdSubstitutionMap();
|
||||
idSubstitutions.put(new IdType("urn:uuid:1234"), new IdType("Patient/123"));
|
||||
String outcome = BaseTransactionProcessor.performIdSubstitutionsInMatchUrl(idSubstitutions, "Patient?foo=urn:uuid:1234&bar=baz");
|
||||
assertEquals("Patient?foo=Patient/123&bar=baz", outcome);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testPerformIdSubstitutionsInMatchUrl_MatchAtEnd() {
|
||||
IdSubstitutionMap idSubstitutions = new IdSubstitutionMap();
|
||||
idSubstitutions.put(new IdType("urn:uuid:7ea4f3a6-d2a3-4105-9f31-374d525085d4"), new IdType("Patient/123"));
|
||||
String outcome = BaseTransactionProcessor.performIdSubstitutionsInMatchUrl(idSubstitutions, "Patient?name=FAMILY1&organization=urn%3Auuid%3A7ea4f3a6-d2a3-4105-9f31-374d525085d4");
|
||||
assertEquals("Patient?name=FAMILY1&organization=Patient/123", outcome);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testPerformIdSubstitutionsInMatchUrl_MatchEscapedParam() {
|
||||
IdSubstitutionMap idSubstitutions = new IdSubstitutionMap();
|
||||
idSubstitutions.put(new IdType("urn:uuid:1234"), new IdType("Patient/123"));
|
||||
String outcome = BaseTransactionProcessor.performIdSubstitutionsInMatchUrl(idSubstitutions, "Patient?foo=" + UrlUtil.escapeUrlParam("urn:uuid:1234") + "&bar=baz");
|
||||
assertEquals("Patient?foo=Patient/123&bar=baz", outcome);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testPerformIdSubstitutionsInMatchUrl_MatchInParamNameShouldntBeReplaced() {
|
||||
IdSubstitutionMap idSubstitutions = new IdSubstitutionMap();
|
||||
idSubstitutions.put(new IdType("urn:uuid:1234"), new IdType("Patient/123"));
|
||||
String outcome = BaseTransactionProcessor.performIdSubstitutionsInMatchUrl(idSubstitutions, "Patient?urn:uuid:1234=foo&bar=baz");
|
||||
assertEquals("Patient?urn:uuid:1234=foo&bar=baz", outcome);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testPerformIdSubstitutionsInMatchUrl_NoParams() {
|
||||
IdSubstitutionMap idSubstitutions = new IdSubstitutionMap();
|
||||
idSubstitutions.put(new IdType("urn:uuid:1234"), new IdType("Patient/123"));
|
||||
String input = "Patient";
|
||||
String outcome = BaseTransactionProcessor.performIdSubstitutionsInMatchUrl(idSubstitutions, input);
|
||||
assertEquals(input, outcome);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testPerformIdSubstitutionsInMatchUrl_UnterminatedParams() {
|
||||
IdSubstitutionMap idSubstitutions = new IdSubstitutionMap();
|
||||
idSubstitutions.put(new IdType("urn:uuid:1234"), new IdType("Patient/123"));
|
||||
String input = "Patient?foo&bar=&baz";
|
||||
String outcome = BaseTransactionProcessor.performIdSubstitutionsInMatchUrl(idSubstitutions, input);
|
||||
assertEquals(input, outcome);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testPerformIdSubstitutionsInMatchUrl_ReplaceMultiple() {
|
||||
IdSubstitutionMap idSubstitutions = new IdSubstitutionMap();
|
||||
idSubstitutions.put(new IdType("urn:uuid:1234"), new IdType("Patient/abcdefghijklmnopqrstuvwxyz0123456789"));
|
||||
String input = "Patient?foo=urn:uuid:1234&bar=urn:uuid:1234&baz=urn:uuid:1234";
|
||||
String outcome = BaseTransactionProcessor.performIdSubstitutionsInMatchUrl(idSubstitutions, input);
|
||||
String expected = "Patient?foo=Patient/abcdefghijklmnopqrstuvwxyz0123456789&bar=Patient/abcdefghijklmnopqrstuvwxyz0123456789&baz=Patient/abcdefghijklmnopqrstuvwxyz0123456789";
|
||||
assertEquals(expected, outcome);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testPerformIdSubstitutionsInMatchUrl_NonUrnSubstitution() {
|
||||
IdSubstitutionMap idSubstitutions = new IdSubstitutionMap();
|
||||
idSubstitutions.put(new IdType("Patient/123"), new IdType("Patient/456"));
|
||||
String input = "Patient?foo=Patient/123";
|
||||
String outcome = BaseTransactionProcessor.performIdSubstitutionsInMatchUrl(idSubstitutions, input);
|
||||
assertEquals(input, outcome);
|
||||
}
|
||||
|
||||
}
|
|
@ -2338,7 +2338,7 @@ public class JsonParserDstu3Test {
|
|||
ourCtx.newJsonParser().parseResource(Bundle.class, bundle);
|
||||
fail();
|
||||
} catch (DataFormatException e) {
|
||||
assertEquals("Failed to parse JSON encoded FHIR content: Unexpected close marker '}': expected ']' (for root starting at [Source: UNKNOWN; line: 1, column: 0])\n" +
|
||||
assertEquals("Failed to parse JSON encoded FHIR content: Unexpected close marker '}': expected ']' (for root starting at [Source: UNKNOWN; line: 1])\n" +
|
||||
" at [Source: UNKNOWN; line: 4, column: 3]", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -38,6 +38,7 @@ import org.hl7.fhir.r4.model.StringType;
|
|||
import org.hl7.fhir.r4.model.Type;
|
||||
import org.hl7.fhir.utilities.xhtml.XhtmlNode;
|
||||
import org.junit.jupiter.api.AfterAll;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -80,6 +81,11 @@ public class JsonParserR4Test extends BaseTest {
|
|||
return b;
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
public void afterEach() {
|
||||
ourCtx.getParserOptions().setAutoContainReferenceTargetsWithNoId(true);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEntitiesNotConverted() throws IOException {
|
||||
Device input = loadResource(ourCtx, Device.class, "/entities-from-cerner.json");
|
||||
|
@ -198,6 +204,25 @@ public class JsonParserR4Test extends BaseTest {
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testContainedResourcesNotAutoContainedWhenConfiguredNotToDoSo() {
|
||||
MedicationDispense md = new MedicationDispense();
|
||||
md.addIdentifier().setValue("DISPENSE");
|
||||
|
||||
Medication med = new Medication();
|
||||
med.getCode().setText("MED");
|
||||
md.setMedication(new Reference(med));
|
||||
|
||||
ourCtx.getParserOptions().setAutoContainReferenceTargetsWithNoId(false);
|
||||
String encoded = ourCtx.newJsonParser().setPrettyPrint(false).encodeResourceToString(md);
|
||||
assertEquals("{\"resourceType\":\"MedicationDispense\",\"identifier\":[{\"value\":\"DISPENSE\"}],\"medicationReference\":{}}", encoded);
|
||||
|
||||
ourCtx.getParserOptions().setAutoContainReferenceTargetsWithNoId(true);
|
||||
encoded = ourCtx.newJsonParser().setPrettyPrint(false).encodeResourceToString(md);
|
||||
assertEquals("{\"resourceType\":\"MedicationDispense\",\"contained\":[{\"resourceType\":\"Medication\",\"id\":\"1\",\"code\":{\"text\":\"MED\"}}],\"identifier\":[{\"value\":\"DISPENSE\"}],\"medicationReference\":{\"reference\":\"#1\"}}", encoded);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testParseBundleWithMultipleNestedContainedResources() throws Exception {
|
||||
String text = loadResource("/bundle-with-two-patient-resources.json");
|
||||
|
|
|
@ -28,6 +28,7 @@ import java.math.BigDecimal;
|
|||
import java.math.BigInteger;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -537,8 +538,8 @@ public class JsonLikeParserTest {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Set<String> keySet() {
|
||||
return nativeObject.keySet();
|
||||
public Iterator<String> keyIterator() {
|
||||
return nativeObject.keySet().iterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
2
pom.xml
2
pom.xml
|
@ -818,7 +818,7 @@
|
|||
<hibernate_validator_version>6.1.5.Final</hibernate_validator_version>
|
||||
<httpcore_version>4.4.13</httpcore_version>
|
||||
<httpclient_version>4.5.13</httpclient_version>
|
||||
<jackson_version>2.12.3</jackson_version>
|
||||
<jackson_version>2.13.0</jackson_version>
|
||||
<jackson_databind_version>${jackson_version}</jackson_databind_version>
|
||||
<maven_assembly_plugin_version>3.3.0</maven_assembly_plugin_version>
|
||||
<maven_license_plugin_version>1.8</maven_license_plugin_version>
|
||||
|
|
Loading…
Reference in New Issue