Merge remote-tracking branch 'origin/master' into mdm-expansion-interceptor
This commit is contained in:
commit
ab143e34cb
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -24,6 +24,8 @@ import ca.uhn.fhir.context.FhirContext;
|
|||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.util.ParametersUtil;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
@ -84,6 +86,7 @@ public interface IValidationSupport {
|
|||
* @param theValueSetToExpand The valueset that should be expanded
|
||||
* @return The expansion, or null
|
||||
*/
|
||||
@Nullable
|
||||
default ValueSetExpansionOutcome expandValueSet(ValidationSupportContext theValidationSupportContext, @Nullable ValueSetExpansionOptions theExpansionOptions, @Nonnull IBaseResource theValueSetToExpand) {
|
||||
return null;
|
||||
}
|
||||
|
@ -93,6 +96,7 @@ public interface IValidationSupport {
|
|||
* validation support module. This method may return null if it doesn't
|
||||
* make sense for a given module.
|
||||
*/
|
||||
@Nullable
|
||||
default List<IBaseResource> fetchAllConformanceResources() {
|
||||
return null;
|
||||
}
|
||||
|
@ -100,6 +104,7 @@ public interface IValidationSupport {
|
|||
/**
|
||||
* Load and return all possible structure definitions
|
||||
*/
|
||||
@Nullable
|
||||
default <T extends IBaseResource> List<T> fetchAllStructureDefinitions() {
|
||||
return null;
|
||||
}
|
||||
|
@ -110,6 +115,7 @@ public interface IValidationSupport {
|
|||
* @param theSystem The code system
|
||||
* @return The valueset (must not be null, but can be an empty ValueSet)
|
||||
*/
|
||||
@Nullable
|
||||
default IBaseResource fetchCodeSystem(String theSystem) {
|
||||
return null;
|
||||
}
|
||||
|
@ -128,6 +134,7 @@ public interface IValidationSupport {
|
|||
* given URI can be found
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
@Nullable
|
||||
default <T extends IBaseResource> T fetchResource(@Nullable Class<T> theClass, String theUri) {
|
||||
Validate.notBlank(theUri, "theUri must not be null or blank");
|
||||
|
||||
|
@ -161,6 +168,7 @@ public interface IValidationSupport {
|
|||
return null;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
default IBaseResource fetchStructureDefinition(String theUrl) {
|
||||
return null;
|
||||
}
|
||||
|
@ -182,6 +190,7 @@ public interface IValidationSupport {
|
|||
/**
|
||||
* Fetch the given ValueSet by URL
|
||||
*/
|
||||
@Nullable
|
||||
default IBaseResource fetchValueSet(String theValueSetUrl) {
|
||||
return null;
|
||||
}
|
||||
|
@ -199,6 +208,7 @@ public interface IValidationSupport {
|
|||
* @param theDisplay The display name, if it should also be validated
|
||||
* @return Returns a validation result object
|
||||
*/
|
||||
@Nullable
|
||||
default CodeValidationResult validateCode(ValidationSupportContext theValidationSupportContext, ConceptValidationOptions theOptions, String theCodeSystem, String theCode, String theDisplay, String theValueSetUrl) {
|
||||
return null;
|
||||
}
|
||||
|
@ -216,6 +226,7 @@ public interface IValidationSupport {
|
|||
* @param theValueSet The ValueSet to validate against. Must not be null, and must be a ValueSet resource.
|
||||
* @return Returns a validation result object, or <code>null</code> if this validation support module can not handle this kind of request
|
||||
*/
|
||||
@Nullable
|
||||
default CodeValidationResult validateCodeInValueSet(ValidationSupportContext theValidationSupportContext, ConceptValidationOptions theOptions, String theCodeSystem, String theCode, String theDisplay, @Nonnull IBaseResource theValueSet) {
|
||||
return null;
|
||||
}
|
||||
|
@ -228,6 +239,7 @@ public interface IValidationSupport {
|
|||
* @param theSystem The CodeSystem URL
|
||||
* @param theCode The code
|
||||
*/
|
||||
@Nullable
|
||||
default LookupCodeResult lookupCode(ValidationSupportContext theValidationSupportContext, String theSystem, String theCode) {
|
||||
return null;
|
||||
}
|
||||
|
@ -251,6 +263,7 @@ public interface IValidationSupport {
|
|||
* other method in the support chain, so that they can be passed through the entire chain. Implementations of this interface may always safely ignore this parameter.
|
||||
* @return Returns null if this module does not know how to handle this request
|
||||
*/
|
||||
@Nullable
|
||||
default IBaseResource generateSnapshot(ValidationSupportContext theValidationSupportContext, IBaseResource theInput, String theUrl, String theWebUrl, String theProfileName) {
|
||||
return null;
|
||||
}
|
||||
|
@ -268,6 +281,14 @@ public interface IValidationSupport {
|
|||
// nothing
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempt to translate the given concept from one code system to another
|
||||
*/
|
||||
@Nullable
|
||||
default TranslateConceptResults translateConcept(TranslateCodeRequest theRequest) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
enum IssueSeverity {
|
||||
/**
|
||||
|
@ -289,6 +310,7 @@ public interface IValidationSupport {
|
|||
}
|
||||
|
||||
class ConceptDesignation {
|
||||
|
||||
private String myLanguage;
|
||||
private String myUseSystem;
|
||||
private String myUseCode;
|
||||
|
@ -710,4 +732,62 @@ public interface IValidationSupport {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
class TranslateCodeRequest {
|
||||
private final String mySourceSystemUrl;
|
||||
private final String mySourceCode;
|
||||
private final String myTargetSystemUrl;
|
||||
private final int myHashCode;
|
||||
|
||||
public TranslateCodeRequest(String theSourceSystemUrl, String theSourceCode, String theTargetSystemUrl) {
|
||||
mySourceSystemUrl = theSourceSystemUrl;
|
||||
mySourceCode = theSourceCode;
|
||||
myTargetSystemUrl = theTargetSystemUrl;
|
||||
|
||||
myHashCode = new HashCodeBuilder(17, 37)
|
||||
.append(mySourceSystemUrl)
|
||||
.append(mySourceCode)
|
||||
.append(myTargetSystemUrl)
|
||||
.toHashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object theO) {
|
||||
if (this == theO) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (theO == null || getClass() != theO.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
TranslateCodeRequest that = (TranslateCodeRequest) theO;
|
||||
|
||||
return new EqualsBuilder()
|
||||
.append(mySourceSystemUrl, that.mySourceSystemUrl)
|
||||
.append(mySourceCode, that.mySourceCode)
|
||||
.append(myTargetSystemUrl, that.myTargetSystemUrl)
|
||||
.isEquals();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return myHashCode;
|
||||
}
|
||||
|
||||
public String getSourceSystemUrl() {
|
||||
return mySourceSystemUrl;
|
||||
}
|
||||
|
||||
public String getSourceCode() {
|
||||
return mySourceCode;
|
||||
}
|
||||
|
||||
public String getTargetSystemUrl() {
|
||||
return myTargetSystemUrl;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,154 @@
|
|||
package ca.uhn.fhir.context.support;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR - Core Library
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
|
||||
public class TranslateConceptResult {
|
||||
private String mySystem;
|
||||
private String myCode;
|
||||
private String myDisplay;
|
||||
private String myEquivalence;
|
||||
private String myConceptMapUrl;
|
||||
private String myValueSet;
|
||||
private String mySystemVersion;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public TranslateConceptResult() {
|
||||
super();
|
||||
}
|
||||
|
||||
public String getSystem() {
|
||||
return mySystem;
|
||||
}
|
||||
|
||||
public TranslateConceptResult setSystem(String theSystem) {
|
||||
mySystem = theSystem;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
|
||||
.append("system", mySystem)
|
||||
.append("code", myCode)
|
||||
.append("display", myDisplay)
|
||||
.append("equivalence", myEquivalence)
|
||||
.append("conceptMapUrl", myConceptMapUrl)
|
||||
.append("valueSet", myValueSet)
|
||||
.append("systemVersion", mySystemVersion)
|
||||
.toString();
|
||||
}
|
||||
|
||||
public String getCode() {
|
||||
return myCode;
|
||||
}
|
||||
|
||||
public TranslateConceptResult setCode(String theCode) {
|
||||
myCode = theCode;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getDisplay() {
|
||||
return myDisplay;
|
||||
}
|
||||
|
||||
public TranslateConceptResult setDisplay(String theDisplay) {
|
||||
myDisplay = theDisplay;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getEquivalence() {
|
||||
return myEquivalence;
|
||||
}
|
||||
|
||||
public TranslateConceptResult setEquivalence(String theEquivalence) {
|
||||
myEquivalence = theEquivalence;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getSystemVersion() {
|
||||
return mySystemVersion;
|
||||
}
|
||||
|
||||
public void setSystemVersion(String theSystemVersion) {
|
||||
mySystemVersion = theSystemVersion;
|
||||
}
|
||||
|
||||
public String getValueSet() {
|
||||
return myValueSet;
|
||||
}
|
||||
|
||||
public TranslateConceptResult setValueSet(String theValueSet) {
|
||||
myValueSet = theValueSet;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getConceptMapUrl() {
|
||||
return myConceptMapUrl;
|
||||
}
|
||||
|
||||
public TranslateConceptResult setConceptMapUrl(String theConceptMapUrl) {
|
||||
myConceptMapUrl = theConceptMapUrl;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object theO) {
|
||||
if (this == theO) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (theO == null || getClass() != theO.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
TranslateConceptResult that = (TranslateConceptResult) theO;
|
||||
|
||||
EqualsBuilder b = new EqualsBuilder();
|
||||
b.append(mySystem, that.mySystem);
|
||||
b.append(myCode, that.myCode);
|
||||
b.append(myDisplay, that.myDisplay);
|
||||
b.append(myEquivalence, that.myEquivalence);
|
||||
b.append(myConceptMapUrl, that.myConceptMapUrl);
|
||||
b.append(myValueSet, that.myValueSet);
|
||||
b.append(mySystemVersion, that.mySystemVersion);
|
||||
return b.isEquals();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
HashCodeBuilder b = new HashCodeBuilder(17, 37);
|
||||
b.append(mySystem);
|
||||
b.append(myCode);
|
||||
b.append(myDisplay);
|
||||
b.append(myEquivalence);
|
||||
b.append(myConceptMapUrl);
|
||||
b.append(myValueSet);
|
||||
b.append(mySystemVersion);
|
||||
return b.toHashCode();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,94 @@
|
|||
package ca.uhn.fhir.context.support;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR - Core Library
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class TranslateConceptResults {
|
||||
private List<TranslateConceptResult> myResults;
|
||||
private String myMessage;
|
||||
private boolean myResult;
|
||||
|
||||
public TranslateConceptResults() {
|
||||
super();
|
||||
myResults = new ArrayList<>();
|
||||
}
|
||||
|
||||
public List<TranslateConceptResult> getResults() {
|
||||
return myResults;
|
||||
}
|
||||
|
||||
public void setResults(List<TranslateConceptResult> theResults) {
|
||||
myResults = theResults;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return myMessage;
|
||||
}
|
||||
|
||||
public void setMessage(String theMessage) {
|
||||
myMessage = theMessage;
|
||||
}
|
||||
|
||||
public boolean getResult() {
|
||||
return myResult;
|
||||
}
|
||||
|
||||
public void setResult(boolean theMatched) {
|
||||
myResult = theMatched;
|
||||
}
|
||||
|
||||
public int size() {
|
||||
return getResults().size();
|
||||
}
|
||||
|
||||
public boolean isEmpty() {
|
||||
return getResults().isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object theO) {
|
||||
if (this == theO) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (theO == null || getClass() != theO.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
TranslateConceptResults that = (TranslateConceptResults) theO;
|
||||
|
||||
EqualsBuilder b = new EqualsBuilder();
|
||||
b.append(myResults, that.myResults);
|
||||
return b.isEquals();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
HashCodeBuilder b = new HashCodeBuilder(17, 37);
|
||||
b.append(myResults);
|
||||
return b.toHashCode();
|
||||
}
|
||||
}
|
|
@ -28,6 +28,7 @@ import org.hl7.fhir.instance.model.api.IBaseExtension;
|
|||
import org.hl7.fhir.instance.model.api.IBaseHasExtensions;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
|
@ -45,7 +46,7 @@ public class ExtensionUtil {
|
|||
*/
|
||||
public static IBaseExtension<?, ?> getOrCreateExtension(IBase theBase, String theUrl) {
|
||||
IBaseHasExtensions baseHasExtensions = validateExtensionSupport(theBase);
|
||||
IBaseExtension extension = getExtension(baseHasExtensions, theUrl);
|
||||
IBaseExtension extension = getExtensionByUrl(baseHasExtensions, theUrl);
|
||||
if (extension == null) {
|
||||
extension = baseHasExtensions.addExtension();
|
||||
extension.setUrl(theUrl);
|
||||
|
@ -53,6 +54,34 @@ public class ExtensionUtil {
|
|||
return extension;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an new empty extension.
|
||||
*
|
||||
* @param theBase Base resource to add the extension to
|
||||
* @return Returns a new extension
|
||||
* @throws IllegalArgumentException IllegalArgumentException is thrown in case resource doesn't support extensions
|
||||
*/
|
||||
public static IBaseExtension<?, ?> addExtension(IBase theBase) {
|
||||
return addExtension(theBase, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an extension with the specified URL
|
||||
*
|
||||
* @param theBase Base resource to add the extension to
|
||||
* @param theUrl URL for the extension
|
||||
* @return Returns a new extension with the specified URL.
|
||||
* @throws IllegalArgumentException IllegalArgumentException is thrown in case resource doesn't support extensions
|
||||
*/
|
||||
public static IBaseExtension<?, ?> addExtension(IBase theBase, String theUrl) {
|
||||
IBaseHasExtensions baseHasExtensions = validateExtensionSupport(theBase);
|
||||
IBaseExtension extension = baseHasExtensions.addExtension();
|
||||
if (theUrl != null) {
|
||||
extension.setUrl(theUrl);
|
||||
}
|
||||
return extension;
|
||||
}
|
||||
|
||||
private static IBaseHasExtensions validateExtensionSupport(IBase theBase) {
|
||||
if (!(theBase instanceof IBaseHasExtensions)) {
|
||||
throw new IllegalArgumentException(String.format("Expected instance that supports extensions, but got %s", theBase));
|
||||
|
@ -75,7 +104,7 @@ public class ExtensionUtil {
|
|||
return false;
|
||||
}
|
||||
|
||||
return getExtension(baseHasExtensions, theExtensionUrl) != null;
|
||||
return getExtensionByUrl(baseHasExtensions, theExtensionUrl) != null;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -89,7 +118,7 @@ public class ExtensionUtil {
|
|||
if (!hasExtension(theBase, theExtensionUrl)) {
|
||||
return false;
|
||||
}
|
||||
IBaseDatatype value = getExtension((IBaseHasExtensions) theBase, theExtensionUrl).getValue();
|
||||
IBaseDatatype value = getExtensionByUrl((IBaseHasExtensions) theBase, theExtensionUrl).getValue();
|
||||
if (value == null) {
|
||||
return theExtensionValue == null;
|
||||
}
|
||||
|
@ -103,14 +132,71 @@ public class ExtensionUtil {
|
|||
* @param theExtensionUrl URL of the extension to get. Must be non-null
|
||||
* @return Returns the first available extension with the specified URL, or null if such extension doesn't exist
|
||||
*/
|
||||
public static IBaseExtension<?, ?> getExtension(IBaseHasExtensions theBase, String theExtensionUrl) {
|
||||
return theBase.getExtension()
|
||||
public static IBaseExtension<?, ?> getExtensionByUrl(IBase theBase, String theExtensionUrl) {
|
||||
Predicate<IBaseExtension> filter;
|
||||
if (theExtensionUrl == null) {
|
||||
filter = (e -> true);
|
||||
} else {
|
||||
filter = (e -> theExtensionUrl.equals(e.getUrl()));
|
||||
}
|
||||
|
||||
return getExtensionsMatchingPredicate(theBase, filter)
|
||||
.stream()
|
||||
.filter(e -> theExtensionUrl.equals(e.getUrl()))
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets all extensions that match the specified filter predicate
|
||||
*
|
||||
* @param theBase The resource to get the extension for
|
||||
* @param theFilter Predicate to match the extension against
|
||||
* @return Returns all extension with the specified URL, or an empty list if such extensions do not exist
|
||||
*/
|
||||
public static List<IBaseExtension<?, ?>> getExtensionsMatchingPredicate(IBase theBase, Predicate<? super IBaseExtension> theFilter) {
|
||||
return validateExtensionSupport(theBase)
|
||||
.getExtension()
|
||||
.stream()
|
||||
.filter(theFilter)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes all extensions.
|
||||
*
|
||||
* @param theBase The resource to clear the extension for
|
||||
* @return Returns all extension that were removed
|
||||
*/
|
||||
public static List<IBaseExtension<?, ?>> clearAllExtensions(IBase theBase) {
|
||||
return clearExtensionsMatchingPredicate(theBase, (e -> true));
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes all extensions by URL.
|
||||
*
|
||||
* @param theBase The resource to clear the extension for
|
||||
* @param theUrl The url to clear extensions for
|
||||
* @return Returns all extension that were removed
|
||||
*/
|
||||
public static List<IBaseExtension<?, ?>> clearExtensionsByUrl(IBase theBase, String theUrl) {
|
||||
return clearExtensionsMatchingPredicate(theBase, (e -> theUrl.equals(e.getUrl())));
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes all extensions that match the specified predicate
|
||||
*
|
||||
* @param theBase The base object to clear the extension for
|
||||
* @param theFilter Defines which extensions should be cleared
|
||||
* @return Returns all extension that were removed
|
||||
*/
|
||||
private static List<IBaseExtension<?, ?>> clearExtensionsMatchingPredicate(IBase theBase, Predicate<? super IBaseExtension> theFilter) {
|
||||
List<IBaseExtension<?, ?>> retVal = getExtensionsMatchingPredicate(theBase, theFilter);
|
||||
validateExtensionSupport(theBase)
|
||||
.getExtension()
|
||||
.removeIf(theFilter);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets all extensions with the specified URL
|
||||
*
|
||||
|
@ -118,11 +204,9 @@ public class ExtensionUtil {
|
|||
* @param theExtensionUrl URL of the extension to get. Must be non-null
|
||||
* @return Returns all extension with the specified URL, or an empty list if such extensions do not exist
|
||||
*/
|
||||
public static List<IBaseExtension<?, ?>> getExtensions(IBaseHasExtensions theBase, String theExtensionUrl) {
|
||||
return theBase.getExtension()
|
||||
.stream()
|
||||
.filter(e -> theExtensionUrl.equals(e.getUrl()))
|
||||
.collect(Collectors.toList());
|
||||
public static List<IBaseExtension<?, ?>> getExtensionsByUrl(IBaseHasExtensions theBase, String theExtensionUrl) {
|
||||
Predicate<IBaseExtension> urlEqualityPredicate = e -> theExtensionUrl.equals(e.getUrl());
|
||||
return getExtensionsMatchingPredicate(theBase, urlEqualityPredicate);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -133,7 +217,7 @@ public class ExtensionUtil {
|
|||
* @param theFhirContext The context containing FHIR resource definitions
|
||||
*/
|
||||
public static void setExtension(FhirContext theFhirContext, IBaseExtension theExtension, String theValue) {
|
||||
setExtension(theFhirContext, theExtension, "string", theValue);
|
||||
setExtension(theFhirContext, theExtension, "string", (Object) theValue);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -144,7 +228,7 @@ public class ExtensionUtil {
|
|||
* @param theValue The value to set
|
||||
* @param theFhirContext The context containing FHIR resource definitions
|
||||
*/
|
||||
public static void setExtension(FhirContext theFhirContext, IBaseExtension theExtension, String theExtensionType, String theValue) {
|
||||
public static void setExtension(FhirContext theFhirContext, IBaseExtension theExtension, String theExtensionType, Object theValue) {
|
||||
theExtension.setValue(TerserUtil.newElement(theFhirContext, theExtensionType, theValue));
|
||||
}
|
||||
|
||||
|
@ -156,7 +240,7 @@ public class ExtensionUtil {
|
|||
* @param theValue Extension value
|
||||
* @param theFhirContext The context containing FHIR resource definitions
|
||||
*/
|
||||
public static void setExtension(FhirContext theFhirContext, IBase theBase, String theUrl, String theValue) {
|
||||
public static void setExtensionAsString(FhirContext theFhirContext, IBase theBase, String theUrl, String theValue) {
|
||||
IBaseExtension ext = getOrCreateExtension(theBase, theUrl);
|
||||
setExtension(theFhirContext, ext, theValue);
|
||||
}
|
||||
|
@ -170,9 +254,19 @@ public class ExtensionUtil {
|
|||
* @param theValue Extension value
|
||||
* @param theFhirContext The context containing FHIR resource definitions
|
||||
*/
|
||||
public static void setExtension(FhirContext theFhirContext, IBase theBase, String theUrl, String theValueType, String theValue) {
|
||||
public static void setExtension(FhirContext theFhirContext, IBase theBase, String theUrl, String theValueType, Object theValue) {
|
||||
IBaseExtension ext = getOrCreateExtension(theBase, theUrl);
|
||||
setExtension(theFhirContext, ext, theValue);
|
||||
setExtension(theFhirContext, ext, theValueType, theValue);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares two extensions, returns true if they have the same value and url
|
||||
*
|
||||
* @param theLeftExtension : Extension to be evaluated #1
|
||||
* @param theRightExtension : Extension to be evaluated #2
|
||||
* @return Result of the comparison
|
||||
*/
|
||||
public static boolean equals(IBaseExtension theLeftExtension, IBaseExtension theRightExtension) {
|
||||
return TerserUtil.equals(theLeftExtension, theRightExtension);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -112,6 +112,11 @@ public class HapiExtensions {
|
|||
public static final String EXT_RESOURCE_PLACEHOLDER = "http://hapifhir.io/fhir/StructureDefinition/resource-placeholder";
|
||||
|
||||
/**
|
||||
* URL for extension in a Group Bulk Export which identifies the golden patient of a given exported resource.
|
||||
*/
|
||||
public static final String ASSOCIATED_GOLDEN_RESOURCE_EXTENSION_URL = "https://hapifhir.org/associated-patient-golden-resource/";
|
||||
|
||||
/**
|
||||
* Non instantiable
|
||||
*/
|
||||
private HapiExtensions() {
|
||||
|
|
|
@ -32,7 +32,7 @@ import java.util.List;
|
|||
* THIS API IS EXPERIMENTAL IN HAPI FHIR - USE WITH CAUTION AS THE PUBLISHED API MAY
|
||||
* CHANGE
|
||||
*
|
||||
* @see FhirTerser#visit(IBaseResource, IModelVisitor2)
|
||||
* @see FhirTerser#visit(IBase, IModelVisitor2)
|
||||
*/
|
||||
public interface IModelVisitor2 {
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ package ca.uhn.fhir.util;
|
|||
import ca.uhn.fhir.context.BaseRuntimeChildDefinition;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
@ -31,6 +32,7 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
|||
import javax.annotation.Nullable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
public class SearchParameterUtil {
|
||||
|
||||
|
@ -50,6 +52,62 @@ public class SearchParameterUtil {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given the resource type, fetch its patient-based search parameter name
|
||||
* 1. Attempt to find one called 'patient'
|
||||
* 2. If that fails, find one called 'subject'
|
||||
* 3. If that fails, find find by Patient Compartment.
|
||||
* 3.1 If that returns >1 result, throw an error
|
||||
* 3.2 If that returns 1 result, return it
|
||||
*/
|
||||
public static Optional<RuntimeSearchParam> getOnlyPatientSearchParamForResourceType(FhirContext theFhirContext, String theResourceType) {
|
||||
RuntimeSearchParam myPatientSearchParam = null;
|
||||
RuntimeResourceDefinition runtimeResourceDefinition = theFhirContext.getResourceDefinition(theResourceType);
|
||||
myPatientSearchParam = runtimeResourceDefinition.getSearchParam("patient");
|
||||
if (myPatientSearchParam == null) {
|
||||
myPatientSearchParam = runtimeResourceDefinition.getSearchParam("subject");
|
||||
if (myPatientSearchParam == null) {
|
||||
myPatientSearchParam = getOnlyPatientCompartmentRuntimeSearchParam(runtimeResourceDefinition);
|
||||
}
|
||||
}
|
||||
return Optional.ofNullable(myPatientSearchParam);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Search the resource definition for a compartment named 'patient' and return its related Search Parameter.
|
||||
*/
|
||||
public static RuntimeSearchParam getOnlyPatientCompartmentRuntimeSearchParam(FhirContext theFhirContext, String theResourceType) {
|
||||
RuntimeResourceDefinition resourceDefinition = theFhirContext.getResourceDefinition(theResourceType);
|
||||
return getOnlyPatientCompartmentRuntimeSearchParam(resourceDefinition);
|
||||
}
|
||||
|
||||
public static RuntimeSearchParam getOnlyPatientCompartmentRuntimeSearchParam(RuntimeResourceDefinition runtimeResourceDefinition) {
|
||||
RuntimeSearchParam patientSearchParam;
|
||||
List<RuntimeSearchParam> searchParams = runtimeResourceDefinition.getSearchParamsForCompartmentName("Patient");
|
||||
if (searchParams == null || searchParams.size() == 0) {
|
||||
String errorMessage = String.format("Resource type [%s] is not eligible for this type of export, as it contains no Patient compartment, and no `patient` or `subject` search parameter", runtimeResourceDefinition.getId());
|
||||
throw new IllegalArgumentException(errorMessage);
|
||||
} else if (searchParams.size() == 1) {
|
||||
patientSearchParam = searchParams.get(0);
|
||||
} else {
|
||||
String errorMessage = String.format("Resource type %s has more than one Search Param which references a patient compartment. We are unable to disambiguate which patient search parameter we should be searching by.", runtimeResourceDefinition.getId());
|
||||
throw new IllegalArgumentException(errorMessage);
|
||||
}
|
||||
return patientSearchParam;
|
||||
}
|
||||
|
||||
public static List<RuntimeSearchParam> getAllPatientCompartmentRuntimeSearchParams(FhirContext theFhirContext, String theResourceType) {
|
||||
RuntimeResourceDefinition runtimeResourceDefinition = theFhirContext.getResourceDefinition(theResourceType);
|
||||
return getAllPatientCompartmentRuntimeSearchParams(runtimeResourceDefinition);
|
||||
|
||||
}
|
||||
|
||||
private static List<RuntimeSearchParam> getAllPatientCompartmentRuntimeSearchParams(RuntimeResourceDefinition theRuntimeResourceDefinition) {
|
||||
List<RuntimeSearchParam> patient = theRuntimeResourceDefinition.getSearchParamsForCompartmentName("Patient");
|
||||
return patient;
|
||||
}
|
||||
|
||||
|
||||
@Nullable
|
||||
public static String getCode(FhirContext theContext, IBaseResource theResource) {
|
||||
|
|
|
@ -46,6 +46,8 @@ public final class TerserUtil {
|
|||
|
||||
public static final String FIELD_NAME_IDENTIFIER = "identifier";
|
||||
|
||||
private static final String EQUALS_DEEP = "equalsDeep";
|
||||
|
||||
public static final Collection<String> IDS_AND_META_EXCLUDES =
|
||||
Collections.unmodifiableSet(Stream.of("id", "identifier", "meta").collect(Collectors.toSet()));
|
||||
|
||||
|
@ -97,12 +99,12 @@ public final class TerserUtil {
|
|||
}
|
||||
|
||||
/**
|
||||
* get the Values of a specified field.
|
||||
* Gets all values of the specified field.
|
||||
*
|
||||
* @param theFhirContext Context holding resource definition
|
||||
* @param theResource Resource to check if the specified field is set
|
||||
* @param theFieldName name of the field to check
|
||||
* @return Returns true if field exists and has any values set, and false otherwise
|
||||
* @return Returns all values for the specified field or null if field with the provided name doesn't exist
|
||||
*/
|
||||
public static List<IBase> getValues(FhirContext theFhirContext, IBaseResource theResource, String theFieldName) {
|
||||
RuntimeResourceDefinition resourceDefinition = theFhirContext.getResourceDefinition(theResource);
|
||||
|
@ -114,6 +116,23 @@ public final class TerserUtil {
|
|||
return resourceIdentifier.getAccessor().getValues(theResource);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the first available value for the specified field.
|
||||
*
|
||||
* @param theFhirContext Context holding resource definition
|
||||
* @param theResource Resource to check if the specified field is set
|
||||
* @param theFieldName name of the field to check
|
||||
* @return Returns the first value for the specified field or null if field with the provided name doesn't exist or
|
||||
* has no values
|
||||
*/
|
||||
public static IBase getValueFirstRep(FhirContext theFhirContext, IBaseResource theResource, String theFieldName) {
|
||||
List<IBase> values = getValues(theFhirContext, theResource, theFieldName);
|
||||
if (values == null || values.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
return values.get(0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clones specified composite field (collection). Composite field values must conform to the collections
|
||||
* contract.
|
||||
|
@ -157,26 +176,50 @@ public final class TerserUtil {
|
|||
});
|
||||
}
|
||||
|
||||
private static boolean contains(IBase theItem, List<IBase> theItems) {
|
||||
private static Method getMethod(IBase theBase, String theMethodName) {
|
||||
Method method = null;
|
||||
for (Method m : theItem.getClass().getDeclaredMethods()) {
|
||||
if (m.getName().equals("equalsDeep")) {
|
||||
for (Method m : theBase.getClass().getDeclaredMethods()) {
|
||||
if (m.getName().equals(theMethodName)) {
|
||||
method = m;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return method;
|
||||
}
|
||||
|
||||
final Method m = method;
|
||||
return theItems.stream().anyMatch(i -> {
|
||||
if (m != null) {
|
||||
try {
|
||||
return (Boolean) m.invoke(theItem, i);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException("Unable to compare equality via equalsDeep", e);
|
||||
}
|
||||
/**
|
||||
* Checks if two items are equal via {@link #EQUALS_DEEP} method
|
||||
*
|
||||
* @param theItem1 First item to compare
|
||||
* @param theItem2 Second item to compare
|
||||
* @return Returns true if they are equal and false otherwise
|
||||
*/
|
||||
public static boolean equals(IBase theItem1, IBase theItem2) {
|
||||
if (theItem1 == null) {
|
||||
return theItem2 == null;
|
||||
}
|
||||
|
||||
final Method method = getMethod(theItem1, EQUALS_DEEP);
|
||||
if (method == null) {
|
||||
throw new IllegalArgumentException(String.format("Instance %s do not provide %s method", theItem1, EQUALS_DEEP));
|
||||
}
|
||||
return equals(theItem1, theItem2, method);
|
||||
}
|
||||
|
||||
private static boolean equals(IBase theItem1, IBase theItem2, Method theMethod) {
|
||||
if (theMethod != null) {
|
||||
try {
|
||||
return (Boolean) theMethod.invoke(theItem1, theItem2);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(String.format("Unable to compare equality via %s", EQUALS_DEEP), e);
|
||||
}
|
||||
return theItem.equals(i);
|
||||
});
|
||||
}
|
||||
return theItem1.equals(theItem2);
|
||||
}
|
||||
|
||||
private static boolean contains(IBase theItem, List<IBase> theItems) {
|
||||
final Method method = getMethod(theItem, EQUALS_DEEP);
|
||||
return theItems.stream().anyMatch(i -> equals(i, theItem, method));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -256,6 +299,20 @@ public final class TerserUtil {
|
|||
childDefinition.getAccessor().getValues(theResource).clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the provided field with the given values. This method will add to the collection of existing field values
|
||||
* in case of multiple cardinality. Use {@link #clearField(FhirContext, FhirTerser, String, IBaseResource, IBase...)}
|
||||
* to remove values before setting
|
||||
*
|
||||
* @param theFhirContext Context holding resource definition
|
||||
* @param theFieldName Child field name of the resource to set
|
||||
* @param theResource The resource to set the values on
|
||||
* @param theValues The values to set on the resource child field name
|
||||
*/
|
||||
public static void setField(FhirContext theFhirContext, String theFieldName, IBaseResource theResource, IBase... theValues) {
|
||||
setField(theFhirContext, theFhirContext.newTerser(), theFieldName, theResource, theValues);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the provided field with the given values. This method will add to the collection of existing field values
|
||||
* in case of multiple cardinality. Use {@link #clearField(FhirContext, FhirTerser, String, IBaseResource, IBase...)}
|
||||
|
@ -269,10 +326,20 @@ public final class TerserUtil {
|
|||
*/
|
||||
public static void setField(FhirContext theFhirContext, FhirTerser theTerser, String theFieldName, IBaseResource theResource, IBase... theValues) {
|
||||
BaseRuntimeChildDefinition childDefinition = getBaseRuntimeChildDefinition(theFhirContext, theFieldName, theResource);
|
||||
|
||||
List<IBase> theFromFieldValues = childDefinition.getAccessor().getValues(theResource);
|
||||
if (theFromFieldValues.isEmpty()) {
|
||||
for (IBase value : theValues) {
|
||||
try {
|
||||
childDefinition.getMutator().addValue(theResource, value);
|
||||
} catch (UnsupportedOperationException e) {
|
||||
ourLog.warn("Resource {} does not support multiple values, but an attempt to set {} was made. Setting the first item only", theResource, theValues);
|
||||
childDefinition.getMutator().setValue(theResource, value);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
List<IBase> theToFieldValues = Arrays.asList(theValues);
|
||||
|
||||
mergeFields(theTerser, theResource, childDefinition, theFromFieldValues, theToFieldValues);
|
||||
}
|
||||
|
||||
|
@ -303,6 +370,18 @@ public final class TerserUtil {
|
|||
setFieldByFhirPath(theFhirContext.newTerser(), theFhirPath, theResource, theValue);
|
||||
}
|
||||
|
||||
public static List<IBase> getFieldByFhirPath(FhirContext theFhirContext, String theFhirPath, IBase theResource) {
|
||||
return theFhirContext.newTerser().getValues(theResource, theFhirPath, false, false);
|
||||
}
|
||||
|
||||
public static IBase getFirstFieldByFhirPath(FhirContext theFhirContext, String theFhirPath, IBase theResource) {
|
||||
List<IBase> values = getFieldByFhirPath(theFhirContext, theFhirPath, theResource);
|
||||
if (values == null || values.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
return values.get(0);
|
||||
}
|
||||
|
||||
private static void replaceField(IBaseResource theFrom, IBaseResource theTo, BaseRuntimeChildDefinition childDefinition) {
|
||||
childDefinition.getAccessor().getFirstValueOrNull(theFrom).ifPresent(v -> {
|
||||
childDefinition.getMutator().setValue(theTo, v);
|
||||
|
@ -448,6 +527,9 @@ public final class TerserUtil {
|
|||
*/
|
||||
public static <T extends IBase> T newElement(FhirContext theFhirContext, String theElementType, Object theConstructorParam) {
|
||||
BaseRuntimeElementDefinition def = theFhirContext.getElementDefinition(theElementType);
|
||||
if (def == null) {
|
||||
throw new IllegalArgumentException(String.format("Unable to find element type definition for %s", theElementType));
|
||||
}
|
||||
return (T) def.newInstance(theConstructorParam);
|
||||
}
|
||||
|
||||
|
|
|
@ -108,15 +108,50 @@ public class TerserUtilHelper {
|
|||
}
|
||||
|
||||
/**
|
||||
* Gets values of the specified field.
|
||||
* Gets values for the specified child field.
|
||||
*
|
||||
* @param theField The field to get values from
|
||||
* @return Returns a collection of values containing values or null if the spefied field doesn't exist
|
||||
* @return Returns a list of retrieved values or null if the specified field doesn't exist
|
||||
*/
|
||||
public List<IBase> getFieldValues(String theField) {
|
||||
return TerserUtil.getValues(myContext, myResource, theField);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets values for the specified field values by FHIRPath.
|
||||
*
|
||||
* @param theFhirPath The FHIR path expression to get the values from
|
||||
* @return Returns a collection of values or null if the specified field doesn't exist
|
||||
*/
|
||||
public List<IBase> getFieldValuesByFhirPath(String theFhirPath) {
|
||||
return TerserUtil.getFieldByFhirPath(myContext, theFhirPath, myResource);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets first available value for the specified field values by FHIRPath.
|
||||
*
|
||||
* @param theFhirPath The FHIR path expression to get the values from
|
||||
* @return Returns the value or null if the specified field doesn't exist or is empty
|
||||
*/
|
||||
public IBase getFieldValueByFhirPath(String theFhirPath) {
|
||||
return TerserUtil.getFirstFieldByFhirPath(myContext, theFhirPath, myResource);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets first available values of the specified field.
|
||||
*
|
||||
* @param theField The field to get values from
|
||||
* @return Returns the first available value for the field name or null if the
|
||||
* specified field doesn't exist or has no values
|
||||
*/
|
||||
public IBase getFieldValue(String theField) {
|
||||
List<IBase> values = getFieldValues(theField);
|
||||
if (values == null || values.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
return values.get(0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the terser instance, creating one if necessary.
|
||||
*
|
||||
|
|
|
@ -131,12 +131,9 @@ ca.uhn.fhir.jpa.dao.LegacySearchBuilder.sourceParamDisabled=The _source paramete
|
|||
ca.uhn.fhir.jpa.dao.LegacySearchBuilder.invalidCodeMissingSystem=Invalid token specified for parameter {0} - No system specified: {1}|{2}
|
||||
ca.uhn.fhir.jpa.dao.LegacySearchBuilder.invalidCodeMissingCode=Invalid token specified for parameter {0} - No code specified: {1}|{2}
|
||||
|
||||
ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoConceptMapDstu3.matchesFound=Matches found!
|
||||
ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoConceptMapDstu3.noMatchesFound=No matches found!
|
||||
ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoConceptMapR4.matchesFound=Matches found!
|
||||
ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoConceptMapR4.noMatchesFound=No matches found!
|
||||
ca.uhn.fhir.jpa.dao.r5.FhirResourceDaoConceptMapR5.matchesFound=Matches found!
|
||||
ca.uhn.fhir.jpa.dao.r5.FhirResourceDaoConceptMapR5.noMatchesFound=No matches found!
|
||||
ca.uhn.fhir.jpa.term.TermConceptMappingSvcImpl.matchesFound=Matches found
|
||||
ca.uhn.fhir.jpa.term.TermConceptMappingSvcImpl.noMatchesFound=No Matches found
|
||||
|
||||
ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoSearchParameterR4.invalidSearchParamExpression=The expression "{0}" can not be evaluated and may be invalid: {1}
|
||||
|
||||
ca.uhn.fhir.jpa.dao.predicate.PredicateBuilderToken.textModifierDisabledForSearchParam=The :text modifier is disabled for this search parameter
|
||||
|
|
|
@ -3,14 +3,14 @@
|
|||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-bom</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
<name>HAPI FHIR BOM</name>
|
||||
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -20,11 +20,12 @@ package ca.uhn.fhir.rest.client.interceptor;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import org.apache.commons.lang3.Validate;
|
||||
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.client.api.*;
|
||||
import ca.uhn.fhir.rest.client.api.IClientInterceptor;
|
||||
import ca.uhn.fhir.rest.client.api.IHttpRequest;
|
||||
import ca.uhn.fhir.rest.client.api.IHttpResponse;
|
||||
import ca.uhn.fhir.util.CoverageIgnore;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
|
||||
/**
|
||||
* HTTP interceptor to be used for adding HTTP Authorization using "bearer tokens" to requests. Bearer tokens are used for protocols such as OAUTH2 (see the
|
||||
|
@ -57,7 +58,7 @@ public class BearerTokenAuthInterceptor implements IClientInterceptor {
|
|||
* The bearer token to use (must not be null)
|
||||
*/
|
||||
public BearerTokenAuthInterceptor(String theToken) {
|
||||
Validate.notNull("theToken must not be null");
|
||||
Validate.notNull(theToken, "theToken must not be null");
|
||||
myToken = theToken;
|
||||
}
|
||||
|
||||
|
@ -82,6 +83,7 @@ public class BearerTokenAuthInterceptor implements IClientInterceptor {
|
|||
* Sets the bearer token to use
|
||||
*/
|
||||
public void setToken(String theToken) {
|
||||
Validate.notNull(theToken, "theToken must not be null");
|
||||
myToken = theToken;
|
||||
}
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -78,13 +78,13 @@
|
|||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-structures-dstu2</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-jpaserver-subscription</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
@ -101,7 +101,7 @@
|
|||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-testpage-overlay</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<classifier>classes</classifier>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
|
|
@ -21,6 +21,7 @@ package ca.uhn.hapi.fhir.docs;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.rest.server.RestfulServer;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.interceptor.*;
|
||||
|
@ -223,4 +224,25 @@ public class ServletExamples {
|
|||
}
|
||||
// END SNIPPET: corsInterceptor
|
||||
|
||||
|
||||
@WebServlet(urlPatterns = { "/fhir/*" }, displayName = "FHIR Server")
|
||||
public class RestfulServerWithResponseTerminologyTranslationInterceptor extends RestfulServer {
|
||||
|
||||
private IValidationSupport myValidationSupport;
|
||||
|
||||
@Override
|
||||
protected void initialize() throws ServletException {
|
||||
// START SNIPPET: ResponseTerminologyTranslationInterceptor
|
||||
|
||||
// Create an interceptor that will map from a proprietary CodeSystem to LOINC
|
||||
ResponseTerminologyTranslationInterceptor interceptor = new ResponseTerminologyTranslationInterceptor(myValidationSupport);
|
||||
interceptor.addMappingSpecification("http://examplelabs.org", "http://loinc.org");
|
||||
|
||||
// Register the interceptor
|
||||
registerInterceptor(interceptor);
|
||||
|
||||
// END SNIPPET: ResponseTerminologyTranslationInterceptor
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: security
|
||||
issue: 2194
|
||||
title: "The Testpage Overlay now suppresses authorization headers from the output headers. Thanks
|
||||
to Tuomo Ala-Vannesluoma for the pull request!"
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2478
|
||||
title: "Added matching based on extension, when given the path to a fhir resource the matcher will take the extensions and match if the url and string value are the same"
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2488
|
||||
title: "Two new server interceptors have been added that can be used to map codes and populate code display names respectively
|
||||
using the server terminology services."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2505
|
||||
title: "An incorrect path caused the select2 library to fail to load in the HAPI FHIR testpage overlay
|
||||
modue. Thanks to Ari Ruotsalainen for reporting!"
|
|
@ -193,6 +193,54 @@ If you wish to override the value of `Resource.meta.source` using the value supp
|
|||
* [CaptureResourceSourceFromHeaderInterceptor JavaDoc](/apidocs/hapi-fhir-server/ca/uhn/fhir/rest/server/interceptor/CaptureResourceSourceFromHeaderInterceptor.html)
|
||||
* [CaptureResourceSourceFromHeaderInterceptor Source](https://github.com/hapifhir/hapi-fhir/blob/master/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/CaptureResourceSourceFromHeaderInterceptor.java)
|
||||
|
||||
|
||||
# Terminology: Map Response Terminology
|
||||
|
||||
A common problem when implementing FHIR APIs is the challenge of how to return coded data using standard vocabularies when your source data is not modelled using these vocabularies. For example, suppose you want to implement support for an Implementation Guide that mandates the use of [LOINC](https://loinc.org) but your source data uses local/proprietary observation codes.
|
||||
|
||||
One solution is to simply apply mappings and add them to the FHIR data you are storing in your repository as you are storing it. This solution, often called *Mapping on the Way In*, will work but it has potential pitfalls including:
|
||||
|
||||
* All mappings must be known at the time the data is being stored.
|
||||
* If mappings change because of mistakes or new information, updating existing data is difficult.
|
||||
|
||||
A potentially better solution is to apply *Mapping on the Way Out*, meaning that your mappings are stored in a central spot and applied at runtime to data as it is leaving your system. HAPI FHIR supplies an interceptor called the ResponseTerminologyTranslationInterceptor that can help with this.
|
||||
|
||||
* [ResponseTerminologyTranslationInterceptor JavaDoc](/apidocs/hapi-fhir-server/ca/uhn/fhir/rest/server/interceptor/ResponseTerminologyTranslationInterceptor.html)
|
||||
* [ResponseTerminologyTranslationInterceptor Source](https://github.com/hapifhir/hapi-fhir/blob/master/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ResponseTerminologyTranslationInterceptor.java)
|
||||
|
||||
This interceptor uses ConceptMap resources that are stored in your system, looking up mappings for CodeableConcept codings in your resources and adding them to the responses.
|
||||
|
||||
The following code snippet shows a simple example of how to create and configure this interceptor.
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/ServletExamples.java|ResponseTerminologyTranslationInterceptor}}
|
||||
```
|
||||
|
||||
## Limitations
|
||||
|
||||
The following limitations will hopefully be resolved in the future:
|
||||
|
||||
This interceptor currently only works when registered against a RestfulServer backed by the HAPI FHIR JPA server.
|
||||
|
||||
This interceptor only modifies responses to FHIR read/vread/search/history operations. Responses to these operations are not modified if they are found within a FHIR transaction operation.
|
||||
|
||||
|
||||
# Terminology: Populate Code Display Names
|
||||
|
||||
The HAPI FHIR ResponseTerminologyDisplayPopulationInterceptor interceptor looks for Coding elements within responses where the `Coding.system` and `Coding.code` values are populated but the `Coding.display` is not. The interceptor will attempt to resolve the correct display using the validation support module and will add it to the Coding display value if one is found.
|
||||
|
||||
* [ResponseTerminologyDisplayPopulationInterceptor JavaDoc](/apidocs/hapi-fhir-server/ca/uhn/fhir/rest/server/interceptor/ResponseTerminologyDisplayPopulationInterceptor.html)
|
||||
* [ResponseTerminologyDisplayPopulationInterceptor Source](https://github.com/hapifhir/hapi-fhir/blob/master/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ResponseTerminologyDisplayPopulationInterceptor.java)
|
||||
|
||||
This interceptor uses ConceptMap resources that are stored in your system, looking up mappings for CodeableConcept codings in your resources and adding them to the responses.
|
||||
|
||||
## Limitations
|
||||
|
||||
The following limitation will hopefully be resolved in the future:
|
||||
|
||||
This interceptor only modifies responses to FHIR read/vread/search/history operations. Responses to these operations are not modified if they are found within a FHIR transaction operation.
|
||||
|
||||
|
||||
# Utility: ResponseSizeCapturingInterceptor
|
||||
|
||||
The ResponseSizeCapturingInterceptor can be used to capture the number of characters written in each HTTP FHIR response.
|
||||
|
@ -229,6 +277,8 @@ The UserRequestRetryVersionConflictsInterceptor allows clients to request that t
|
|||
The RepositoryValidatingInterceptor can be used to enforce validation rules on data stored in a HAPI FHIR JPA Repository. See [Repository Validating Interceptor](/docs/validation/repository_validating_interceptor.html) for more information.
|
||||
|
||||
|
||||
|
||||
|
||||
# Data Standardization
|
||||
|
||||
```StandardizingInterceptor``` handles data standardization (s13n) requirements. This interceptor applies standardization rules on all FHIR primitives as configured in the ```s13n.json``` file that should be made available on the classpath. This file contains FHIRPath definitions together with the standardizers that should be applied to that path. It comes with six per-build standardizers: NAME_FAMILY, NAME_GIVEN, EMAIL, TITLE, PHONE and TEXT. Custom standardizers can be developed by implementing ```ca.uhn.fhir.rest.server.interceptor.s13n.standardizers.IStandardizer``` interface.
|
||||
|
|
|
@ -403,6 +403,14 @@ The following algorithms are currently supported:
|
|||
</td>
|
||||
<td>If an optional "identifierSystem" is provided, then the identifiers only match when they belong to that system</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>EXTENSION_ANY_ORDER</td>
|
||||
<td>matcher</td>
|
||||
<td>
|
||||
Matches extensions of resources in any order. Matches are made if both resources share at least one extensions that have the same URL and value.
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>EMPTY_FIELD</td>
|
||||
<td>matcher</td>
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@ import org.hl7.fhir.r4.model.Bundle;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
@ -93,6 +94,7 @@ public class DaoConfig {
|
|||
/**
|
||||
* update setter javadoc if default changes
|
||||
*/
|
||||
@Nonnull
|
||||
private Long myTranslationCachesExpireAfterWriteInMinutes = DEFAULT_TRANSLATION_CACHES_EXPIRE_AFTER_WRITE_IN_MINUTES;
|
||||
/**
|
||||
* update setter javadoc if default changes
|
||||
|
@ -884,6 +886,7 @@ public class DaoConfig {
|
|||
* Specifies the duration in minutes for which values will be retained after being
|
||||
* written to the terminology translation cache. Defaults to 60.
|
||||
*/
|
||||
@Nonnull
|
||||
public Long getTranslationCachesExpireAfterWriteInMinutes() {
|
||||
return myTranslationCachesExpireAfterWriteInMinutes;
|
||||
}
|
||||
|
|
|
@ -21,10 +21,10 @@ package ca.uhn.fhir.jpa.api.dao;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationRequest;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationResult;
|
||||
import ca.uhn.fhir.context.support.TranslateConceptResults;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
||||
public interface IFhirResourceDaoConceptMap<T extends IBaseResource> extends IFhirResourceDao<T> {
|
||||
TranslationResult translate(TranslationRequest theTranslationRequest, RequestDetails theRequestDetails);
|
||||
TranslateConceptResults translate(TranslationRequest theTranslationRequest, RequestDetails theRequestDetails);
|
||||
}
|
||||
|
|
|
@ -1,74 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.api.model;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA API
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.hl7.fhir.r4.model.CodeType;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.Parameters.ParametersParameterComponent;
|
||||
import org.hl7.fhir.r4.model.UriType;
|
||||
|
||||
public class TranslationMatch {
|
||||
private Coding myConcept;
|
||||
private CodeType myEquivalence;
|
||||
private UriType mySource;
|
||||
|
||||
public TranslationMatch() {
|
||||
super();
|
||||
}
|
||||
|
||||
public Coding getConcept() {
|
||||
return myConcept;
|
||||
}
|
||||
|
||||
public void setConcept(Coding theConcept) {
|
||||
myConcept = theConcept;
|
||||
}
|
||||
|
||||
public CodeType getEquivalence() {
|
||||
return myEquivalence;
|
||||
}
|
||||
|
||||
public void setEquivalence(CodeType theEquivalence) {
|
||||
myEquivalence = theEquivalence;
|
||||
}
|
||||
|
||||
public UriType getSource() {
|
||||
return mySource;
|
||||
}
|
||||
|
||||
public void setSource(UriType theSource) {
|
||||
mySource = theSource;
|
||||
}
|
||||
|
||||
public void toParameterParts(ParametersParameterComponent theParam) {
|
||||
if (myEquivalence != null) {
|
||||
theParam.addPart().setName("equivalence").setValue(myEquivalence);
|
||||
}
|
||||
|
||||
if (myConcept != null) {
|
||||
theParam.addPart().setName("concept").setValue(myConcept);
|
||||
}
|
||||
|
||||
if (mySource != null) {
|
||||
theParam.addPart().setName("source").setValue(mySource);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,88 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.api.model;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA API
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.hl7.fhir.r4.model.BooleanType;
|
||||
import org.hl7.fhir.r4.model.Parameters;
|
||||
import org.hl7.fhir.r4.model.Parameters.ParametersParameterComponent;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class TranslationResult {
|
||||
private List<TranslationMatch> myMatches;
|
||||
private StringType myMessage;
|
||||
private BooleanType myResult;
|
||||
|
||||
public TranslationResult() {
|
||||
super();
|
||||
|
||||
myMatches = new ArrayList<>();
|
||||
}
|
||||
|
||||
public List<TranslationMatch> getMatches() {
|
||||
return myMatches;
|
||||
}
|
||||
|
||||
public void setMatches(List<TranslationMatch> theMatches) {
|
||||
myMatches = theMatches;
|
||||
}
|
||||
|
||||
public boolean addMatch(TranslationMatch theMatch) {
|
||||
return myMatches.add(theMatch);
|
||||
}
|
||||
|
||||
public StringType getMessage() {
|
||||
return myMessage;
|
||||
}
|
||||
|
||||
public void setMessage(StringType theMessage) {
|
||||
myMessage = theMessage;
|
||||
}
|
||||
|
||||
public BooleanType getResult() {
|
||||
return myResult;
|
||||
}
|
||||
|
||||
public void setResult(BooleanType theMatched) {
|
||||
myResult = theMatched;
|
||||
}
|
||||
|
||||
public Parameters toParameters() {
|
||||
Parameters retVal = new Parameters();
|
||||
|
||||
if (myResult != null) {
|
||||
retVal.addParameter().setName("result").setValue(myResult);
|
||||
}
|
||||
|
||||
if (myMessage != null) {
|
||||
retVal.addParameter().setName("message").setValue(myMessage);
|
||||
}
|
||||
|
||||
for (TranslationMatch translationMatch : myMatches) {
|
||||
ParametersParameterComponent matchParam = retVal.addParameter().setName("match");
|
||||
translationMatch.toParameterParts(matchParam);
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
}
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.batch;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.batch.processors.GoldenResourceAnnotatingProcessor;
|
||||
import ca.uhn.fhir.jpa.batch.processors.PidToIBaseResourceProcessor;
|
||||
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
|
@ -34,4 +35,10 @@ public class CommonBatchJobConfig {
|
|||
return new PidToIBaseResourceProcessor();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public GoldenResourceAnnotatingProcessor goldenResourceAnnotatingProcessor() {
|
||||
return new GoldenResourceAnnotatingProcessor();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,147 @@
|
|||
package ca.uhn.fhir.jpa.batch.processors;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.fhirpath.IFhirPath;
|
||||
import ca.uhn.fhir.jpa.batch.log.Logs;
|
||||
import ca.uhn.fhir.jpa.bulk.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc;
|
||||
import ca.uhn.fhir.util.ExtensionUtil;
|
||||
import ca.uhn.fhir.util.HapiExtensions;
|
||||
import ca.uhn.fhir.util.SearchParameterUtil;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBaseExtension;
|
||||
import org.hl7.fhir.instance.model.api.IBaseReference;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.slf4j.Logger;
|
||||
import org.springframework.batch.item.ItemProcessor;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
/**
|
||||
* Reusable Item Processor which attaches an extension to any outgoing resource. This extension will contain a resource
|
||||
* reference to the golden resource patient of the given resources' patient. (e.g. Observation.subject, Immunization.patient, etc)
|
||||
*/
|
||||
public class GoldenResourceAnnotatingProcessor implements ItemProcessor<List<IBaseResource>, List<IBaseResource>> {
|
||||
private static final Logger ourLog = Logs.getBatchTroubleshootingLog();
|
||||
|
||||
@Value("#{stepExecutionContext['resourceType']}")
|
||||
private String myResourceType;
|
||||
|
||||
@Autowired
|
||||
private FhirContext myContext;
|
||||
|
||||
@Autowired
|
||||
private MdmExpansionCacheSvc myMdmExpansionCacheSvc;
|
||||
|
||||
@Value("#{jobParameters['" + BulkExportJobConfig.EXPAND_MDM_PARAMETER+ "'] ?: false}")
|
||||
private boolean myMdmEnabled;
|
||||
|
||||
|
||||
private RuntimeSearchParam myRuntimeSearchParam;
|
||||
|
||||
private String myPatientFhirPath;
|
||||
|
||||
private IFhirPath myFhirPath;
|
||||
|
||||
private void populateRuntimeSearchParam() {
|
||||
Optional<RuntimeSearchParam> oPatientSearchParam= SearchParameterUtil.getOnlyPatientSearchParamForResourceType(myContext, myResourceType);
|
||||
if (!oPatientSearchParam.isPresent()) {
|
||||
String errorMessage = String.format("[%s] has no search parameters that are for patients, so it is invalid for Group Bulk Export!", myResourceType);
|
||||
throw new IllegalArgumentException(errorMessage);
|
||||
} else {
|
||||
myRuntimeSearchParam = oPatientSearchParam.get();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<IBaseResource> process(List<IBaseResource> theIBaseResources) throws Exception {
|
||||
//If MDM expansion is enabled, add this magic new extension, otherwise, return the resource as is.
|
||||
if (myMdmEnabled) {
|
||||
if (myRuntimeSearchParam == null) {
|
||||
populateRuntimeSearchParam();
|
||||
}
|
||||
if (myPatientFhirPath == null) {
|
||||
populatePatientFhirPath();
|
||||
}
|
||||
theIBaseResources.forEach(this::annotateClinicalResourceWithRelatedGoldenResourcePatient);
|
||||
}
|
||||
return theIBaseResources;
|
||||
}
|
||||
|
||||
private void annotateClinicalResourceWithRelatedGoldenResourcePatient(IBaseResource iBaseResource) {
|
||||
Optional<String> patientReference = getPatientReference(iBaseResource);
|
||||
if (patientReference.isPresent()) {
|
||||
addGoldenResourceExtension(iBaseResource, patientReference.get());
|
||||
} else {
|
||||
ourLog.error("Failed to find the patient reference information for resource {}. This is a bug, " +
|
||||
"as all resources which can be exported via Group Bulk Export must reference a patient.", iBaseResource);
|
||||
}
|
||||
}
|
||||
|
||||
private Optional<String> getPatientReference(IBaseResource iBaseResource) {
|
||||
//In the case of patient, we will just use the raw ID.
|
||||
if (myResourceType.equalsIgnoreCase("Patient")) {
|
||||
return Optional.of(iBaseResource.getIdElement().getIdPart());
|
||||
//Otherwise, we will perform evaluation of the fhirPath.
|
||||
} else {
|
||||
Optional<IBaseReference> optionalReference = getFhirParser().evaluateFirst(iBaseResource, myPatientFhirPath, IBaseReference.class);
|
||||
return optionalReference.map(theIBaseReference -> theIBaseReference.getReferenceElement().getIdPart());
|
||||
}
|
||||
}
|
||||
|
||||
private void addGoldenResourceExtension(IBaseResource iBaseResource, String sourceResourceId) {
|
||||
String goldenResourceId = myMdmExpansionCacheSvc.getGoldenResourceId(sourceResourceId);
|
||||
IBaseExtension<?, ?> extension = ExtensionUtil.getOrCreateExtension(iBaseResource, HapiExtensions.ASSOCIATED_GOLDEN_RESOURCE_EXTENSION_URL);
|
||||
if (!StringUtils.isBlank(goldenResourceId)) {
|
||||
ExtensionUtil.setExtension(myContext, extension, "reference", prefixPatient(goldenResourceId));
|
||||
}
|
||||
}
|
||||
|
||||
private String prefixPatient(String theResourceId) {
|
||||
return "Patient/" + theResourceId;
|
||||
}
|
||||
|
||||
private IFhirPath getFhirParser() {
|
||||
if (myFhirPath == null) {
|
||||
myFhirPath = myContext.newFhirPath();
|
||||
}
|
||||
return myFhirPath;
|
||||
}
|
||||
|
||||
private String populatePatientFhirPath() {
|
||||
if (myPatientFhirPath == null) {
|
||||
myPatientFhirPath = myRuntimeSearchParam.getPath();
|
||||
// GGG: Yes this is a stupid hack, but by default this runtime search param will return stuff like
|
||||
// Observation.subject.where(resolve() is Patient) which unfortunately our FHIRpath evaluator doesn't play nicely with
|
||||
// our FHIRPath evaluator.
|
||||
if (myPatientFhirPath.contains(".where")) {
|
||||
myPatientFhirPath = myPatientFhirPath.substring(0, myPatientFhirPath.indexOf(".where"));
|
||||
}
|
||||
}
|
||||
return myPatientFhirPath;
|
||||
}
|
||||
}
|
|
@ -50,6 +50,7 @@ public class PidToIBaseResourceProcessor implements ItemProcessor<List<ResourceP
|
|||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
|
||||
|
||||
@Value("#{stepExecutionContext['resourceType']}")
|
||||
private String myResourceType;
|
||||
|
||||
|
|
|
@ -36,6 +36,7 @@ import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
|||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.param.DateRangeParam;
|
||||
import ca.uhn.fhir.util.SearchParameterUtil;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -179,47 +180,15 @@ public abstract class BaseBulkItemReader implements ItemReader<List<ResourcePers
|
|||
|
||||
}
|
||||
|
||||
/**
|
||||
* Given the resource type, fetch its patient-based search parameter name
|
||||
* 1. Attempt to find one called 'patient'
|
||||
* 2. If that fails, find one called 'subject'
|
||||
* 3. If that fails, find find by Patient Compartment.
|
||||
* 3.1 If that returns >1 result, throw an error
|
||||
* 3.2 If that returns 1 result, return it
|
||||
*/
|
||||
protected RuntimeSearchParam getPatientSearchParamForCurrentResourceType() {
|
||||
if (myPatientSearchParam == null) {
|
||||
RuntimeResourceDefinition runtimeResourceDefinition = myContext.getResourceDefinition(myResourceType);
|
||||
myPatientSearchParam = runtimeResourceDefinition.getSearchParam("patient");
|
||||
if (myPatientSearchParam == null) {
|
||||
myPatientSearchParam = runtimeResourceDefinition.getSearchParam("subject");
|
||||
if (myPatientSearchParam == null) {
|
||||
myPatientSearchParam = getRuntimeSearchParamByCompartment(runtimeResourceDefinition);
|
||||
if (myPatientSearchParam == null) {
|
||||
String errorMessage = String.format("[%s] has no search parameters that are for patients, so it is invalid for Group Bulk Export!", myResourceType);
|
||||
throw new IllegalArgumentException(errorMessage);
|
||||
}
|
||||
}
|
||||
Optional<RuntimeSearchParam> onlyPatientSearchParamForResourceType = SearchParameterUtil.getOnlyPatientSearchParamForResourceType(myContext, myResourceType);
|
||||
if (onlyPatientSearchParamForResourceType.isPresent()) {
|
||||
myPatientSearchParam = onlyPatientSearchParamForResourceType.get();
|
||||
} else {
|
||||
|
||||
}
|
||||
}
|
||||
return myPatientSearchParam;
|
||||
}
|
||||
|
||||
/**
|
||||
* Search the resource definition for a compartment named 'patient' and return its related Search Parameter.
|
||||
*/
|
||||
protected RuntimeSearchParam getRuntimeSearchParamByCompartment(RuntimeResourceDefinition runtimeResourceDefinition) {
|
||||
RuntimeSearchParam patientSearchParam;
|
||||
List<RuntimeSearchParam> searchParams = runtimeResourceDefinition.getSearchParamsForCompartmentName("Patient");
|
||||
if (searchParams == null || searchParams.size() == 0) {
|
||||
String errorMessage = String.format("Resource type [%s] is not eligible for this type of export, as it contains no Patient compartment, and no `patient` or `subject` search parameter", myResourceType);
|
||||
throw new IllegalArgumentException(errorMessage);
|
||||
} else if (searchParams.size() == 1) {
|
||||
patientSearchParam = searchParams.get(0);
|
||||
} else {
|
||||
String errorMessage = String.format("Resource type [%s] is not eligible for Group Bulk export, as we are unable to disambiguate which patient search parameter we should be searching by.", myResourceType);
|
||||
throw new IllegalArgumentException(errorMessage);
|
||||
}
|
||||
return patientSearchParam;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,8 +21,10 @@ package ca.uhn.fhir.jpa.bulk.job;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
||||
import ca.uhn.fhir.jpa.batch.processors.GoldenResourceAnnotatingProcessor;
|
||||
import ca.uhn.fhir.jpa.batch.processors.PidToIBaseResourceProcessor;
|
||||
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc;
|
||||
import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.springframework.batch.core.Job;
|
||||
|
@ -32,13 +34,16 @@ import org.springframework.batch.core.configuration.annotation.JobBuilderFactory
|
|||
import org.springframework.batch.core.configuration.annotation.JobScope;
|
||||
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
|
||||
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||
import org.springframework.batch.item.ItemProcessor;
|
||||
import org.springframework.batch.item.ItemReader;
|
||||
import org.springframework.batch.item.ItemWriter;
|
||||
import org.springframework.batch.item.support.CompositeItemProcessor;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Lazy;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
|
@ -64,11 +69,23 @@ public class BulkExportJobConfig {
|
|||
@Autowired
|
||||
private PidToIBaseResourceProcessor myPidToIBaseResourceProcessor;
|
||||
|
||||
@Autowired
|
||||
private GoldenResourceAnnotatingProcessor myGoldenResourceAnnotatingProcessor;
|
||||
|
||||
@Bean
|
||||
public BulkExportDaoSvc bulkExportDaoSvc() {
|
||||
return new BulkExportDaoSvc();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
@Lazy
|
||||
@JobScope
|
||||
public MdmExpansionCacheSvc mdmExpansionCacheSvc() {
|
||||
return new MdmExpansionCacheSvc();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
@Lazy
|
||||
public Job bulkExportJob() {
|
||||
|
@ -80,6 +97,18 @@ public class BulkExportJobConfig {
|
|||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Lazy
|
||||
@StepScope
|
||||
public CompositeItemProcessor<List<ResourcePersistentId>, List<IBaseResource>> inflateResourceThenAnnotateWithGoldenResourceProcessor() {
|
||||
CompositeItemProcessor processor = new CompositeItemProcessor<>();
|
||||
ArrayList<ItemProcessor> delegates = new ArrayList<>();
|
||||
delegates.add(myPidToIBaseResourceProcessor);
|
||||
delegates.add(myGoldenResourceAnnotatingProcessor);
|
||||
processor.setDelegates(delegates);
|
||||
return processor;
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Lazy
|
||||
public Job groupBulkExportJob() {
|
||||
|
@ -132,7 +161,7 @@ public class BulkExportJobConfig {
|
|||
return myStepBuilderFactory.get("groupBulkExportGenerateResourceFilesStep")
|
||||
.<List<ResourcePersistentId>, List<IBaseResource>> chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
|
||||
.reader(groupBulkItemReader())
|
||||
.processor(myPidToIBaseResourceProcessor)
|
||||
.processor(inflateResourceThenAnnotateWithGoldenResourceProcessor())
|
||||
.writer(resourceToFileWriter())
|
||||
.listener(bulkExportGenerateResourceFilesStepListener())
|
||||
.build();
|
||||
|
|
|
@ -27,7 +27,7 @@ import ca.uhn.fhir.jpa.dao.IResultIterator;
|
|||
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
|
||||
import ca.uhn.fhir.jpa.dao.data.IMdmLinkDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc;
|
||||
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.util.QueryChunker;
|
||||
|
@ -36,6 +36,7 @@ import ca.uhn.fhir.model.primitive.IdDt;
|
|||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.param.ReferenceOrListParam;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import com.google.common.collect.Multimaps;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
|
@ -45,6 +46,7 @@ import org.springframework.beans.factory.annotation.Autowired;
|
|||
import org.springframework.beans.factory.annotation.Value;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
@ -75,6 +77,8 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade
|
|||
private IdHelperService myIdHelperService;
|
||||
@Autowired
|
||||
private IMdmLinkDao myMdmLinkDao;
|
||||
@Autowired
|
||||
private MdmExpansionCacheSvc myMdmExpansionCacheSvc;
|
||||
|
||||
@Override
|
||||
Iterator<ResourcePersistentId> getResourcePidIterator() {
|
||||
|
@ -109,17 +113,20 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade
|
|||
* possibly expanded by MDM, and don't have to go and fetch other resource DAOs.
|
||||
*/
|
||||
private Iterator<ResourcePersistentId> getExpandedPatientIterator() {
|
||||
Set<Long> patientPidsToExport = new HashSet<>();
|
||||
List<String> members = getMembers();
|
||||
List<IIdType> ids = members.stream().map(member -> new IdDt("Patient/" + member)).collect(Collectors.toList());
|
||||
List<Long> pidsOrThrowException = myIdHelperService.getPidsOrThrowException(ids);
|
||||
patientPidsToExport.addAll(pidsOrThrowException);
|
||||
Set<Long> patientPidsToExport = new HashSet<>(pidsOrThrowException);
|
||||
|
||||
if (myMdmEnabled) {
|
||||
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId));
|
||||
Long pidOrNull = myIdHelperService.getPidOrNull(group);
|
||||
List<List<Long>> lists = myMdmLinkDao.expandPidsFromGroupPidGivenMatchResult(pidOrNull, MdmMatchResultEnum.MATCH);
|
||||
lists.forEach(patientPidsToExport::addAll);
|
||||
List<IMdmLinkDao.MdmPidTuple> goldenPidSourcePidTuple = myMdmLinkDao.expandPidsFromGroupPidGivenMatchResult(pidOrNull, MdmMatchResultEnum.MATCH);
|
||||
goldenPidSourcePidTuple.forEach(tuple -> {
|
||||
patientPidsToExport.add(tuple.getGoldenPid());
|
||||
patientPidsToExport.add(tuple.getSourcePid());
|
||||
});
|
||||
populateMdmResourceCache(goldenPidSourcePidTuple);
|
||||
}
|
||||
List<ResourcePersistentId> resourcePersistentIds = patientPidsToExport
|
||||
.stream()
|
||||
|
@ -128,6 +135,45 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade
|
|||
return resourcePersistentIds.iterator();
|
||||
}
|
||||
|
||||
/**
|
||||
* @param thePidTuples
|
||||
*/
|
||||
private void populateMdmResourceCache(List<IMdmLinkDao.MdmPidTuple> thePidTuples) {
|
||||
if (myMdmExpansionCacheSvc.hasBeenPopulated()) {
|
||||
return;
|
||||
}
|
||||
//First, convert this zipped set of tuples to a map of
|
||||
//{
|
||||
// patient/gold-1 -> [patient/1, patient/2]
|
||||
// patient/gold-2 -> [patient/3, patient/4]
|
||||
//}
|
||||
Map<Long, Set<Long>> goldenResourceToSourcePidMap = new HashMap<>();
|
||||
extract(thePidTuples, goldenResourceToSourcePidMap);
|
||||
|
||||
//Next, lets convert it to an inverted index for fast lookup
|
||||
// {
|
||||
// patient/1 -> patient/gold-1
|
||||
// patient/2 -> patient/gold-1
|
||||
// patient/3 -> patient/gold-2
|
||||
// patient/4 -> patient/gold-2
|
||||
// }
|
||||
Map<String, String> sourceResourceIdToGoldenResourceIdMap = new HashMap<>();
|
||||
goldenResourceToSourcePidMap.forEach((key, value) -> {
|
||||
String goldenResourceId = myIdHelperService.translatePidIdToForcedId(new ResourcePersistentId(key)).orElse(key.toString());
|
||||
Map<Long, Optional<String>> pidsToForcedIds = myIdHelperService.translatePidsToForcedIds(value);
|
||||
|
||||
Set<String> sourceResourceIds = pidsToForcedIds.entrySet().stream()
|
||||
.map(ent -> ent.getValue().isPresent() ? ent.getValue().get() : ent.getKey().toString())
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
sourceResourceIds
|
||||
.forEach(sourceResourceId -> sourceResourceIdToGoldenResourceIdMap.put(sourceResourceId, goldenResourceId));
|
||||
});
|
||||
|
||||
//Now that we have built our cached expansion, store it.
|
||||
myMdmExpansionCacheSvc.setCacheContents(sourceResourceIdToGoldenResourceIdMap);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given the local myGroupId, read this group, and find all members' patient references.
|
||||
* @return A list of strings representing the Patient IDs of the members (e.g. ["P1", "P2", "P3"]
|
||||
|
@ -154,13 +200,19 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade
|
|||
|
||||
//Attempt to perform MDM Expansion of membership
|
||||
if (myMdmEnabled) {
|
||||
List<List<Long>> goldenPidTargetPidTuple = myMdmLinkDao.expandPidsFromGroupPidGivenMatchResult(pidOrNull, MdmMatchResultEnum.MATCH);
|
||||
List<IMdmLinkDao.MdmPidTuple> goldenPidTargetPidTuples = myMdmLinkDao.expandPidsFromGroupPidGivenMatchResult(pidOrNull, MdmMatchResultEnum.MATCH);
|
||||
//Now lets translate these pids into resource IDs
|
||||
Set<Long> uniquePids = new HashSet<>();
|
||||
goldenPidTargetPidTuple.forEach(uniquePids::addAll);
|
||||
|
||||
goldenPidTargetPidTuples.forEach(tuple -> {
|
||||
uniquePids.add(tuple.getGoldenPid());
|
||||
uniquePids.add(tuple.getSourcePid());
|
||||
});
|
||||
Map<Long, Optional<String>> pidToForcedIdMap = myIdHelperService.translatePidsToForcedIds(uniquePids);
|
||||
|
||||
Map<Long, Set<Long>> goldenResourceToSourcePidMap = new HashMap<>();
|
||||
extract(goldenPidTargetPidTuples, goldenResourceToSourcePidMap);
|
||||
populateMdmResourceCache(goldenPidTargetPidTuples);
|
||||
|
||||
//If the result of the translation is an empty optional, it means there is no forced id, and we can use the PID as the resource ID.
|
||||
Set<String> resolvedResourceIds = pidToForcedIdMap.entrySet().stream()
|
||||
.map(entry -> entry.getValue().isPresent() ? entry.getValue().get() : entry.getKey().toString())
|
||||
|
@ -176,6 +228,14 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade
|
|||
return expandedIds;
|
||||
}
|
||||
|
||||
private void extract(List<IMdmLinkDao.MdmPidTuple> theGoldenPidTargetPidTuples, Map<Long, Set<Long>> theGoldenResourceToSourcePidMap) {
|
||||
for (IMdmLinkDao.MdmPidTuple goldenPidTargetPidTuple : theGoldenPidTargetPidTuples) {
|
||||
Long goldenPid = goldenPidTargetPidTuple.getGoldenPid();
|
||||
Long sourcePid = goldenPidTargetPidTuple.getSourcePid();
|
||||
theGoldenResourceToSourcePidMap.computeIfAbsent(goldenPid, key -> new HashSet<>()).add(sourcePid);
|
||||
}
|
||||
}
|
||||
|
||||
private void queryResourceTypeWithReferencesToPatients(Set<ResourcePersistentId> myReadPids, List<String> idChunk) {
|
||||
//Build SP map
|
||||
//First, inject the _typeFilters and _since from the export job
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package ca.uhn.fhir.jpa.config;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.i18n.HapiLocalizer;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorService;
|
||||
|
@ -62,6 +63,7 @@ import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
|
|||
import ca.uhn.fhir.jpa.interceptor.JpaConsentContextServices;
|
||||
import ca.uhn.fhir.jpa.interceptor.MdmSearchExpandingInterceptorInterceptor;
|
||||
import ca.uhn.fhir.jpa.interceptor.OverridePathBasedReferentialIntegrityForDeletesInterceptor;
|
||||
import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationInterceptor;
|
||||
import ca.uhn.fhir.jpa.interceptor.validation.RepositoryValidatingRuleBuilder;
|
||||
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||
import ca.uhn.fhir.jpa.packages.IHapiPackageCacheManager;
|
||||
|
@ -121,6 +123,8 @@ import ca.uhn.fhir.jpa.searchparam.extractor.IResourceLinkResolver;
|
|||
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamProvider;
|
||||
import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
|
||||
import ca.uhn.fhir.jpa.sp.SearchParamPresenceSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.TermConceptMappingSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.jpa.validation.JpaResourceLoader;
|
||||
import ca.uhn.fhir.jpa.validation.ValidationSettings;
|
||||
|
@ -253,6 +257,13 @@ public abstract class BaseConfig {
|
|||
return new CascadingDeleteInterceptor(theFhirContext, theDaoRegistry, theInterceptorBroadcaster);
|
||||
}
|
||||
|
||||
|
||||
@Lazy
|
||||
@Bean
|
||||
public ResponseTerminologyTranslationInterceptor responseTerminologyTranslationInterceptor(IValidationSupport theValidationSupport) {
|
||||
return new ResponseTerminologyTranslationInterceptor(theValidationSupport);
|
||||
}
|
||||
|
||||
/**
|
||||
* This method should be overridden to provide an actual completed
|
||||
* bean, but it provides a partially completed entity manager
|
||||
|
@ -338,6 +349,11 @@ public abstract class BaseConfig {
|
|||
return new DatabaseSearchResultCacheSvcImpl();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ITermConceptMappingSvc termConceptMappingSvc() {
|
||||
return new TermConceptMappingSvcImpl();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ThreadPoolTaskExecutor searchCoordinatorThreadFactory() {
|
||||
final ThreadPoolTaskExecutor threadPoolTaskExecutor = new ThreadPoolTaskExecutor();
|
||||
|
|
|
@ -82,7 +82,12 @@ public abstract class BaseConfigDstu3Plus extends BaseConfig {
|
|||
@Primary
|
||||
@Bean
|
||||
public IValidationSupport validationSupportChain() {
|
||||
return new CachingValidationSupport(jpaValidationSupportChain());
|
||||
|
||||
// Short timeout for code translation because TermConceptMappingSvcImpl has its own caching
|
||||
CachingValidationSupport.CacheTimeouts cacheTimeouts = CachingValidationSupport.CacheTimeouts.defaultValues()
|
||||
.setTranslateCodeMillis(1000);
|
||||
|
||||
return new CachingValidationSupport(jpaValidationSupportChain(), cacheTimeouts);
|
||||
}
|
||||
|
||||
@Bean(name = "myInstanceValidator")
|
||||
|
|
|
@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.dao.data;
|
|||
|
||||
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
|
||||
import ca.uhn.fhir.jpa.entity.MdmLink;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Modifying;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
|
@ -33,14 +34,14 @@ import java.util.List;
|
|||
@Repository
|
||||
public interface IMdmLinkDao extends JpaRepository<MdmLink, Long> {
|
||||
@Modifying
|
||||
@Query("DELETE FROM MdmLink f WHERE f.myGoldenResourcePid = :pid OR f.mySourcePid = :pid")
|
||||
@Query("DELETE FROM MdmLink f WHERE myGoldenResourcePid = :pid OR mySourcePid = :pid")
|
||||
int deleteWithAnyReferenceToPid(@Param("pid") Long thePid);
|
||||
|
||||
@Modifying
|
||||
@Query("DELETE FROM MdmLink f WHERE (f.myGoldenResourcePid = :pid OR f.mySourcePid = :pid) AND f.myMatchResult <> :matchResult")
|
||||
@Query("DELETE FROM MdmLink f WHERE (myGoldenResourcePid = :pid OR mySourcePid = :pid) AND myMatchResult <> :matchResult")
|
||||
int deleteWithAnyReferenceToPidAndMatchResultNot(@Param("pid") Long thePid, @Param("matchResult") MdmMatchResultEnum theMatchResult);
|
||||
|
||||
@Query("SELECT ml2.myGoldenResourcePid, ml2.mySourcePid FROM MdmLink ml2 " +
|
||||
@Query("SELECT ml2.myGoldenResourcePid as goldenPid, ml2.mySourcePid as sourcePid FROM MdmLink ml2 " +
|
||||
"WHERE ml2.myMatchResult=:matchResult " +
|
||||
"AND ml2.myGoldenResourcePid IN (" +
|
||||
"SELECT ml.myGoldenResourcePid FROM MdmLink ml " +
|
||||
|
@ -50,15 +51,11 @@ public interface IMdmLinkDao extends JpaRepository<MdmLink, Long> {
|
|||
"AND hrl.mySourcePath='Group.member.entity' " +
|
||||
"AND hrl.myTargetResourceType='Patient'" +
|
||||
")")
|
||||
List<List<Long>> expandPidsFromGroupPidGivenMatchResult(@Param("groupPid") Long theGroupPid, @Param("matchResult") MdmMatchResultEnum theMdmMatchResultEnum);
|
||||
List<MdmPidTuple> expandPidsFromGroupPidGivenMatchResult(@Param("groupPid") Long theGroupPid, @Param("matchResult") MdmMatchResultEnum theMdmMatchResultEnum);
|
||||
|
||||
@Query("SELECT ml.myGoldenResourcePid, ml.mySourcePid " +
|
||||
"FROM MdmLink ml " +
|
||||
"INNER JOIN MdmLink ml2 " +
|
||||
"on ml.myGoldenResourcePid=ml2.myGoldenResourcePid " +
|
||||
"WHERE ml2.mySourcePid=:sourcePid " +
|
||||
"AND ml2.myMatchResult=:matchResult " +
|
||||
"AND ml.myMatchResult=:matchResult")
|
||||
List<List<Long>> expandPidsBySourcePidAndMatchResult(@Param("sourcePid") Long theSourcePid, @Param("matchResult") MdmMatchResultEnum theMdmMatchResultEnum);
|
||||
interface MdmPidTuple {
|
||||
Long getGoldenPid();
|
||||
Long getSourcePid();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -21,147 +21,38 @@ package ca.uhn.fhir.jpa.dao.dstu3;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoConceptMap;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationMatch;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationRequest;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationResult;
|
||||
import ca.uhn.fhir.context.support.TranslateConceptResults;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElement;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElementTarget;
|
||||
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import org.hl7.fhir.dstu3.model.ConceptMap;
|
||||
import org.hl7.fhir.exceptions.FHIRException;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.model.BooleanType;
|
||||
import org.hl7.fhir.r4.model.CodeType;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.hl7.fhir.r4.model.UriType;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.hl7.fhir.convertors.conv30_40.ConceptMap30_40.convertConceptMap;
|
||||
|
||||
public class FhirResourceDaoConceptMapDstu3 extends BaseHapiFhirResourceDao<ConceptMap> implements IFhirResourceDaoConceptMap<ConceptMap> {
|
||||
@Autowired
|
||||
private ITermReadSvc myHapiTerminologySvc;
|
||||
private ITermConceptMappingSvc myTermConceptMappingSvc;
|
||||
|
||||
@Override
|
||||
public TranslationResult translate(TranslationRequest theTranslationRequest, RequestDetails theRequestDetails) {
|
||||
public TranslateConceptResults translate(TranslationRequest theTranslationRequest, RequestDetails theRequestDetails) {
|
||||
if (theTranslationRequest.hasReverse() && theTranslationRequest.getReverseAsBoolean()) {
|
||||
return buildReverseTranslationResult(myHapiTerminologySvc.translateWithReverse(theTranslationRequest));
|
||||
return myTermConceptMappingSvc.translateWithReverse(theTranslationRequest);
|
||||
}
|
||||
|
||||
return buildTranslationResult(myHapiTerminologySvc.translate(theTranslationRequest));
|
||||
return myTermConceptMappingSvc.translate(theTranslationRequest);
|
||||
}
|
||||
|
||||
private TranslationResult buildTranslationResult(List<TermConceptMapGroupElementTarget> theTargets) {
|
||||
TranslationResult retVal = new TranslationResult();
|
||||
|
||||
String msg;
|
||||
if (theTargets.isEmpty()) {
|
||||
|
||||
retVal.setResult(new BooleanType(false));
|
||||
|
||||
msg = getContext().getLocalizer().getMessage(
|
||||
FhirResourceDaoConceptMapDstu3.class,
|
||||
"noMatchesFound");
|
||||
|
||||
retVal.setMessage(new StringType(msg));
|
||||
|
||||
} else {
|
||||
|
||||
retVal.setResult(new BooleanType(true));
|
||||
|
||||
msg = getContext().getLocalizer().getMessage(
|
||||
FhirResourceDaoConceptMapDstu3.class,
|
||||
"matchesFound");
|
||||
|
||||
retVal.setMessage(new StringType(msg));
|
||||
|
||||
TranslationMatch translationMatch;
|
||||
Set<TermConceptMapGroupElementTarget> targetsToReturn = new HashSet<>();
|
||||
for (TermConceptMapGroupElementTarget target : theTargets) {
|
||||
if (targetsToReturn.add(target)) {
|
||||
translationMatch = new TranslationMatch();
|
||||
|
||||
if (target.getEquivalence() != null) {
|
||||
translationMatch.setEquivalence(new CodeType(target.getEquivalence().toCode()));
|
||||
}
|
||||
|
||||
translationMatch.setConcept(
|
||||
new Coding()
|
||||
.setCode(target.getCode())
|
||||
.setSystem(target.getSystem())
|
||||
.setVersion(target.getSystemVersion())
|
||||
.setDisplay(target.getDisplay())
|
||||
);
|
||||
|
||||
translationMatch.setSource(new UriType(target.getConceptMapUrl()));
|
||||
|
||||
retVal.addMatch(translationMatch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private TranslationResult buildReverseTranslationResult(List<TermConceptMapGroupElement> theElements) {
|
||||
TranslationResult retVal = new TranslationResult();
|
||||
|
||||
String msg;
|
||||
if (theElements.isEmpty()) {
|
||||
|
||||
retVal.setResult(new BooleanType(false));
|
||||
|
||||
msg = getContext().getLocalizer().getMessage(
|
||||
FhirResourceDaoConceptMapDstu3.class,
|
||||
"noMatchesFound");
|
||||
|
||||
retVal.setMessage(new StringType(msg));
|
||||
|
||||
} else {
|
||||
|
||||
retVal.setResult(new BooleanType(true));
|
||||
|
||||
msg = getContext().getLocalizer().getMessage(
|
||||
FhirResourceDaoConceptMapDstu3.class,
|
||||
"matchesFound");
|
||||
|
||||
retVal.setMessage(new StringType(msg));
|
||||
|
||||
TranslationMatch translationMatch;
|
||||
Set<TermConceptMapGroupElement> elementsToReturn = new HashSet<>();
|
||||
for (TermConceptMapGroupElement element : theElements) {
|
||||
if (elementsToReturn.add(element)) {
|
||||
translationMatch = new TranslationMatch();
|
||||
|
||||
translationMatch.setConcept(
|
||||
new Coding()
|
||||
.setCode(element.getCode())
|
||||
.setSystem(element.getSystem())
|
||||
.setVersion(element.getSystemVersion())
|
||||
.setDisplay(element.getDisplay())
|
||||
);
|
||||
|
||||
translationMatch.setSource(new UriType(element.getConceptMapUrl()));
|
||||
|
||||
retVal.addMatch(translationMatch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ResourceTable updateEntity(RequestDetails theRequestDetails, IBaseResource theResource, IBasePersistedResource theEntity, Date theDeletedTimestampOrNull, boolean thePerformIndexing,
|
||||
|
@ -173,12 +64,12 @@ public class FhirResourceDaoConceptMapDstu3 extends BaseHapiFhirResourceDao<Conc
|
|||
try {
|
||||
ConceptMap conceptMap = (ConceptMap) theResource;
|
||||
org.hl7.fhir.r4.model.ConceptMap converted = convertConceptMap(conceptMap);
|
||||
myHapiTerminologySvc.storeTermConceptMapAndChildren(retVal, converted);
|
||||
myTermConceptMappingSvc.storeTermConceptMapAndChildren(retVal, converted);
|
||||
} catch (FHIRException fe) {
|
||||
throw new InternalErrorException(fe);
|
||||
}
|
||||
} else {
|
||||
myHapiTerminologySvc.deleteConceptMapAndChildren(retVal);
|
||||
myTermConceptMappingSvc.deleteConceptMapAndChildren(retVal);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,87 @@
|
|||
package ca.uhn.fhir.jpa.dao.mdm;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import static org.slf4j.LoggerFactory.getLogger;
|
||||
|
||||
/**
|
||||
* The purpose of this class is to share context between steps of a given GroupBulkExport job.
|
||||
*
|
||||
* This cache allows you to port state between reader/processor/writer. In this case, we are maintaining
|
||||
* a cache of Source Resource ID -> Golden Resource ID, so that we can annotate outgoing resources with their golden owner
|
||||
* if applicable.
|
||||
*
|
||||
*/
|
||||
public class MdmExpansionCacheSvc {
|
||||
private static final Logger ourLog = getLogger(MdmExpansionCacheSvc.class);
|
||||
|
||||
private final ConcurrentHashMap<String, String> mySourceToGoldenIdCache = new ConcurrentHashMap<>();
|
||||
|
||||
/**
|
||||
* Lookup a given resource's golden resource ID in the cache. Note that if you pass this function the resource ID of a
|
||||
* golden resource, it will just return itself.
|
||||
*
|
||||
* @param theSourceId the resource ID of the source resource ,e.g. PAT123
|
||||
* @return the resource ID of the associated golden resource.
|
||||
*/
|
||||
public String getGoldenResourceId(String theSourceId) {
|
||||
ourLog.debug(buildLogMessage("About to lookup cached resource ID " + theSourceId));
|
||||
String goldenResourceId = mySourceToGoldenIdCache.get(theSourceId);
|
||||
|
||||
//A golden resources' golden resource ID is itself.
|
||||
if (StringUtils.isBlank(goldenResourceId)) {
|
||||
if (mySourceToGoldenIdCache.containsValue(theSourceId)) {
|
||||
goldenResourceId = theSourceId;
|
||||
}
|
||||
}
|
||||
return goldenResourceId;
|
||||
}
|
||||
|
||||
private String buildLogMessage(String theMessage) {
|
||||
return buildLogMessage(theMessage, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a log message, potentially enriched with the cache content.
|
||||
*
|
||||
* @param message The log message
|
||||
* @param theAddCacheContentContent If true, will annotate the log message with the current cache contents.
|
||||
* @return a built log message, which may include the cache content.
|
||||
*/
|
||||
public String buildLogMessage(String message, boolean theAddCacheContentContent) {
|
||||
StringBuilder builder = new StringBuilder();
|
||||
builder.append(message);
|
||||
if (ourLog.isDebugEnabled() || theAddCacheContentContent) {
|
||||
builder.append("\n")
|
||||
.append("Current cache content is:")
|
||||
.append("\n");
|
||||
mySourceToGoldenIdCache.entrySet().stream().forEach(entry -> builder.append(entry.getKey()).append(" -> ").append(entry.getValue()).append("\n"));
|
||||
return builder.toString();
|
||||
}
|
||||
return builder.toString();
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Populate the cache
|
||||
*
|
||||
* @param theSourceResourceIdToGoldenResourceIdMap the source ID -> golden ID map to populate the cache with.
|
||||
*/
|
||||
public void setCacheContents(Map<String, String> theSourceResourceIdToGoldenResourceIdMap) {
|
||||
if (mySourceToGoldenIdCache.isEmpty()) {
|
||||
this.mySourceToGoldenIdCache.putAll(theSourceResourceIdToGoldenResourceIdMap);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Since this cache is used at @JobScope, we can skip a whole whack of expansions happening by simply checking
|
||||
* if one of our child steps has populated the cache yet. .
|
||||
*/
|
||||
public boolean hasBeenPopulated() {
|
||||
return !mySourceToGoldenIdCache.isEmpty();
|
||||
}
|
||||
}
|
|
@ -21,153 +21,31 @@ package ca.uhn.fhir.jpa.dao.r4;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoConceptMap;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationMatch;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationRequest;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationResult;
|
||||
import ca.uhn.fhir.context.support.TranslateConceptResults;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElement;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElementTarget;
|
||||
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
|
||||
import org.hl7.fhir.convertors.VersionConvertor_40_50;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.model.BooleanType;
|
||||
import org.hl7.fhir.r4.model.CodeType;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.hl7.fhir.r4.model.UriType;
|
||||
import org.hl7.fhir.r4.model.Enumerations.ConceptMapEquivalence;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
public class FhirResourceDaoConceptMapR4 extends BaseHapiFhirResourceDao<ConceptMap> implements IFhirResourceDaoConceptMap<ConceptMap> {
|
||||
@Autowired
|
||||
private ITermReadSvc myHapiTerminologySvc;
|
||||
private ITermConceptMappingSvc myTermConceptMappingSvc;
|
||||
|
||||
@Override
|
||||
public TranslationResult translate(TranslationRequest theTranslationRequest, RequestDetails theRequestDetails) {
|
||||
public TranslateConceptResults translate(TranslationRequest theTranslationRequest, RequestDetails theRequestDetails) {
|
||||
if (theTranslationRequest.hasReverse() && theTranslationRequest.getReverseAsBoolean()) {
|
||||
return buildReverseTranslationResult(myHapiTerminologySvc.translateWithReverse(theTranslationRequest));
|
||||
return myTermConceptMappingSvc.translateWithReverse(theTranslationRequest);
|
||||
}
|
||||
|
||||
return buildTranslationResult(myHapiTerminologySvc.translate(theTranslationRequest));
|
||||
}
|
||||
|
||||
private TranslationResult buildTranslationResult(List<TermConceptMapGroupElementTarget> theTargets) {
|
||||
TranslationResult retVal = new TranslationResult();
|
||||
|
||||
String msg;
|
||||
if (theTargets.isEmpty()) {
|
||||
|
||||
retVal.setResult(new BooleanType(false));
|
||||
|
||||
msg = getContext().getLocalizer().getMessage(
|
||||
FhirResourceDaoConceptMapR4.class,
|
||||
"noMatchesFound");
|
||||
|
||||
retVal.setMessage(new StringType(msg));
|
||||
|
||||
} else {
|
||||
|
||||
retVal.setResult(new BooleanType(true));
|
||||
|
||||
msg = getContext().getLocalizer().getMessage(
|
||||
FhirResourceDaoConceptMapR4.class,
|
||||
"matchesFound");
|
||||
|
||||
retVal.setMessage(new StringType(msg));
|
||||
|
||||
TranslationMatch translationMatch;
|
||||
Set<TermConceptMapGroupElementTarget> targetsToReturn = new HashSet<>();
|
||||
for (TermConceptMapGroupElementTarget target : theTargets) {
|
||||
if (targetsToReturn.add(target)) {
|
||||
translationMatch = new TranslationMatch();
|
||||
|
||||
if (target.getEquivalence() != null) {
|
||||
translationMatch.setEquivalence(new CodeType(target.getEquivalence().toCode()));
|
||||
}
|
||||
|
||||
translationMatch.setConcept(
|
||||
new Coding()
|
||||
.setCode(target.getCode())
|
||||
.setSystem(target.getSystem())
|
||||
.setVersion(target.getSystemVersion())
|
||||
.setDisplay(target.getDisplay())
|
||||
);
|
||||
|
||||
translationMatch.setSource(new UriType(target.getConceptMapUrl()));
|
||||
|
||||
retVal.addMatch(translationMatch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private TranslationResult buildReverseTranslationResult(List<TermConceptMapGroupElement> theElements) {
|
||||
TranslationResult retVal = new TranslationResult();
|
||||
|
||||
String msg;
|
||||
if (theElements.isEmpty()) {
|
||||
|
||||
retVal.setResult(new BooleanType(false));
|
||||
|
||||
msg = getContext().getLocalizer().getMessage(
|
||||
FhirResourceDaoConceptMapR4.class,
|
||||
"noMatchesFound");
|
||||
|
||||
retVal.setMessage(new StringType(msg));
|
||||
|
||||
} else {
|
||||
|
||||
retVal.setResult(new BooleanType(true));
|
||||
|
||||
msg = getContext().getLocalizer().getMessage(
|
||||
FhirResourceDaoConceptMapR4.class,
|
||||
"matchesFound");
|
||||
|
||||
retVal.setMessage(new StringType(msg));
|
||||
|
||||
TranslationMatch translationMatch;
|
||||
Set<TermConceptMapGroupElement> elementsToReturn = new HashSet<>();
|
||||
for (TermConceptMapGroupElement element : theElements) {
|
||||
if (elementsToReturn.add(element)) {
|
||||
translationMatch = new TranslationMatch();
|
||||
|
||||
translationMatch.setConcept(
|
||||
new Coding()
|
||||
.setCode(element.getCode())
|
||||
.setSystem(element.getSystem())
|
||||
.setVersion(element.getSystemVersion())
|
||||
.setDisplay(element.getDisplay())
|
||||
);
|
||||
|
||||
translationMatch.setSource(new UriType(element.getConceptMapUrl()));
|
||||
|
||||
if (element.getConceptMapGroupElementTargets().size() == 1) {
|
||||
|
||||
ConceptMapEquivalence eq = element.getConceptMapGroupElementTargets().get(0).getEquivalence();
|
||||
if (eq != null) {
|
||||
translationMatch.setEquivalence(new CodeType(eq.toCode()));
|
||||
}
|
||||
}
|
||||
|
||||
retVal.addMatch(translationMatch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return retVal;
|
||||
return myTermConceptMappingSvc.translate(theTranslationRequest);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -178,9 +56,9 @@ public class FhirResourceDaoConceptMapR4 extends BaseHapiFhirResourceDao<Concept
|
|||
if (!retVal.isUnchangedInCurrentOperation()) {
|
||||
if (retVal.getDeleted() == null) {
|
||||
ConceptMap conceptMap = (ConceptMap) theResource;
|
||||
myHapiTerminologySvc.storeTermConceptMapAndChildren(retVal, conceptMap);
|
||||
myTermConceptMappingSvc.storeTermConceptMapAndChildren(retVal, conceptMap);
|
||||
} else {
|
||||
myHapiTerminologySvc.deleteConceptMapAndChildren(retVal);
|
||||
myTermConceptMappingSvc.deleteConceptMapAndChildren(retVal);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,22 +1,6 @@
|
|||
package ca.uhn.fhir.jpa.dao.r5;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.hl7.fhir.convertors.VersionConvertor_40_50;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.model.Enumerations.ConceptMapEquivalence;
|
||||
import org.hl7.fhir.r5.model.BooleanType;
|
||||
import org.hl7.fhir.r5.model.CodeType;
|
||||
import org.hl7.fhir.r5.model.Coding;
|
||||
import org.hl7.fhir.r5.model.ConceptMap;
|
||||
import org.hl7.fhir.r5.model.StringType;
|
||||
import org.hl7.fhir.r5.model.UriType;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/*
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
|
@ -37,137 +21,31 @@ import org.springframework.beans.factory.annotation.Autowired;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoConceptMap;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationMatch;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationRequest;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationResult;
|
||||
import ca.uhn.fhir.context.support.TranslateConceptResults;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElement;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElementTarget;
|
||||
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r5.model.ConceptMap;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
public class FhirResourceDaoConceptMapR5 extends BaseHapiFhirResourceDao<ConceptMap> implements IFhirResourceDaoConceptMap<ConceptMap> {
|
||||
@Autowired
|
||||
private ITermReadSvc myHapiTerminologySvc;
|
||||
private ITermConceptMappingSvc myTermConceptMappingSvc;
|
||||
|
||||
@Override
|
||||
public TranslationResult translate(TranslationRequest theTranslationRequest, RequestDetails theRequestDetails) {
|
||||
public TranslateConceptResults translate(TranslationRequest theTranslationRequest, RequestDetails theRequestDetails) {
|
||||
if (theTranslationRequest.hasReverse() && theTranslationRequest.getReverseAsBoolean()) {
|
||||
return buildReverseTranslationResult(myHapiTerminologySvc.translateWithReverse(theTranslationRequest));
|
||||
return myTermConceptMappingSvc.translateWithReverse(theTranslationRequest);
|
||||
}
|
||||
|
||||
return buildTranslationResult(myHapiTerminologySvc.translate(theTranslationRequest));
|
||||
}
|
||||
|
||||
private TranslationResult buildTranslationResult(List<TermConceptMapGroupElementTarget> theTargets) {
|
||||
TranslationResult retVal = new TranslationResult();
|
||||
|
||||
String msg;
|
||||
if (theTargets.isEmpty()) {
|
||||
|
||||
retVal.setResult(VersionConvertor_40_50.convertBoolean(new BooleanType(false)));
|
||||
|
||||
msg = getContext().getLocalizer().getMessage(
|
||||
FhirResourceDaoConceptMapR5.class,
|
||||
"noMatchesFound");
|
||||
|
||||
retVal.setMessage(VersionConvertor_40_50.convertString(new StringType(msg)));
|
||||
|
||||
} else {
|
||||
|
||||
retVal.setResult(VersionConvertor_40_50.convertBoolean(new BooleanType(true)));
|
||||
|
||||
msg = getContext().getLocalizer().getMessage(
|
||||
FhirResourceDaoConceptMapR5.class,
|
||||
"matchesFound");
|
||||
|
||||
retVal.setMessage(VersionConvertor_40_50.convertString(new StringType(msg)));
|
||||
|
||||
TranslationMatch translationMatch;
|
||||
Set<TermConceptMapGroupElementTarget> targetsToReturn = new HashSet<>();
|
||||
for (TermConceptMapGroupElementTarget target : theTargets) {
|
||||
if (targetsToReturn.add(target)) {
|
||||
translationMatch = new TranslationMatch();
|
||||
|
||||
if (target.getEquivalence() != null) {
|
||||
translationMatch.setEquivalence(VersionConvertor_40_50.convertCode(new CodeType(target.getEquivalence().toCode())));
|
||||
}
|
||||
|
||||
translationMatch.setConcept(VersionConvertor_40_50.convertCoding(
|
||||
new Coding()
|
||||
.setCode(target.getCode())
|
||||
.setSystem(target.getSystem())
|
||||
.setVersion(target.getSystemVersion())
|
||||
.setDisplay(target.getDisplay())
|
||||
));
|
||||
|
||||
translationMatch.setSource(VersionConvertor_40_50.convertUri(new UriType(target.getConceptMapUrl())));
|
||||
|
||||
retVal.addMatch(translationMatch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private TranslationResult buildReverseTranslationResult(List<TermConceptMapGroupElement> theElements) {
|
||||
TranslationResult retVal = new TranslationResult();
|
||||
|
||||
String msg;
|
||||
if (theElements.isEmpty()) {
|
||||
|
||||
retVal.setResult(VersionConvertor_40_50.convertBoolean(new BooleanType(false)));
|
||||
|
||||
msg = getContext().getLocalizer().getMessage(
|
||||
FhirResourceDaoConceptMapR5.class,
|
||||
"noMatchesFound");
|
||||
|
||||
retVal.setMessage(VersionConvertor_40_50.convertString(new StringType(msg)));
|
||||
|
||||
} else {
|
||||
|
||||
retVal.setResult(VersionConvertor_40_50.convertBoolean(new BooleanType(true)));
|
||||
|
||||
msg = getContext().getLocalizer().getMessage(
|
||||
FhirResourceDaoConceptMapR5.class,
|
||||
"matchesFound");
|
||||
|
||||
retVal.setMessage(VersionConvertor_40_50.convertString(new StringType(msg)));
|
||||
|
||||
TranslationMatch translationMatch;
|
||||
Set<TermConceptMapGroupElement> elementsToReturn = new HashSet<>();
|
||||
for (TermConceptMapGroupElement element : theElements) {
|
||||
if (elementsToReturn.add(element)) {
|
||||
translationMatch = new TranslationMatch();
|
||||
|
||||
translationMatch.setConcept(VersionConvertor_40_50.convertCoding(
|
||||
new Coding()
|
||||
.setCode(element.getCode())
|
||||
.setSystem(element.getSystem())
|
||||
.setVersion(element.getSystemVersion())
|
||||
.setDisplay(element.getDisplay())
|
||||
));
|
||||
|
||||
translationMatch.setSource(VersionConvertor_40_50.convertUri(new UriType(element.getConceptMapUrl())));
|
||||
|
||||
if (element.getConceptMapGroupElementTargets().size() == 1) {
|
||||
|
||||
ConceptMapEquivalence eq = element.getConceptMapGroupElementTargets().get(0).getEquivalence();
|
||||
if (eq != null) {
|
||||
translationMatch.setEquivalence(VersionConvertor_40_50.convertCode(new CodeType(eq.toCode())));
|
||||
}
|
||||
}
|
||||
|
||||
retVal.addMatch(translationMatch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return retVal;
|
||||
return myTermConceptMappingSvc.translate(theTranslationRequest);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -178,12 +56,12 @@ public class FhirResourceDaoConceptMapR5 extends BaseHapiFhirResourceDao<Concept
|
|||
|
||||
if (retVal.getDeleted() == null) {
|
||||
ConceptMap conceptMap = (ConceptMap) theResource;
|
||||
myHapiTerminologySvc.storeTermConceptMapAndChildren(retVal, org.hl7.fhir.convertors.conv40_50.ConceptMap40_50.convertConceptMap(conceptMap));
|
||||
myTermConceptMappingSvc.storeTermConceptMapAndChildren(retVal, org.hl7.fhir.convertors.conv40_50.ConceptMap40_50.convertConceptMap(conceptMap));
|
||||
} else {
|
||||
myHapiTerminologySvc.deleteConceptMapAndChildren(retVal);
|
||||
myTermConceptMappingSvc.deleteConceptMapAndChildren(retVal);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return retVal;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,6 +43,13 @@ public class TermConceptMap implements Serializable {
|
|||
static final int MAX_URL_LENGTH = 200;
|
||||
public static final int MAX_VER_LENGTH = 200;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public TermConceptMap() {
|
||||
super();
|
||||
}
|
||||
|
||||
@Id()
|
||||
@SequenceGenerator(name = "SEQ_CONCEPT_MAP_PID", sequenceName = "SEQ_CONCEPT_MAP_PID")
|
||||
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_CONCEPT_MAP_PID")
|
||||
|
|
|
@ -129,7 +129,7 @@ public class CascadingDeleteInterceptor {
|
|||
IFhirResourceDao dao = myDaoRegistry.getResourceDao(nextSource.getResourceType());
|
||||
|
||||
// Interceptor call: STORAGE_CASCADE_DELETE
|
||||
IBaseResource resource = dao.read(nextSource);
|
||||
IBaseResource resource = dao.read(nextSource, theRequest);
|
||||
HookParams params = new HookParams()
|
||||
.add(RequestDetails.class, theRequest)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequest)
|
||||
|
|
|
@ -22,8 +22,9 @@ package ca.uhn.fhir.jpa.provider.dstu3;
|
|||
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoConceptMap;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationRequest;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationResult;
|
||||
import ca.uhn.fhir.context.support.TranslateConceptResults;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.term.TermConceptMappingSvcImpl;
|
||||
import ca.uhn.fhir.rest.annotation.IdParam;
|
||||
import ca.uhn.fhir.rest.annotation.Operation;
|
||||
import ca.uhn.fhir.rest.annotation.OperationParam;
|
||||
|
@ -151,10 +152,10 @@ public class BaseJpaResourceProviderConceptMapDstu3 extends JpaResourceProviderD
|
|||
startRequest(theServletRequest);
|
||||
try {
|
||||
IFhirResourceDaoConceptMap<ConceptMap> dao = (IFhirResourceDaoConceptMap<ConceptMap>) getDao();
|
||||
TranslationResult result = dao.translate(translationRequest, theRequestDetails);
|
||||
TranslateConceptResults result = dao.translate(translationRequest, theRequestDetails);
|
||||
|
||||
// Convert from R4 to DSTU3
|
||||
return convertParameters(result.toParameters());
|
||||
return convertParameters(TermConceptMappingSvcImpl.toParameters(result));
|
||||
} catch (FHIRException fe) {
|
||||
throw new InternalErrorException(fe);
|
||||
} finally {
|
||||
|
|
|
@ -22,8 +22,9 @@ package ca.uhn.fhir.jpa.provider.r4;
|
|||
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoConceptMap;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationRequest;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationResult;
|
||||
import ca.uhn.fhir.context.support.TranslateConceptResults;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.term.TermConceptMappingSvcImpl;
|
||||
import ca.uhn.fhir.rest.annotation.IdParam;
|
||||
import ca.uhn.fhir.rest.annotation.Operation;
|
||||
import ca.uhn.fhir.rest.annotation.OperationParam;
|
||||
|
@ -141,8 +142,8 @@ public class BaseJpaResourceProviderConceptMapR4 extends JpaResourceProviderR4<C
|
|||
startRequest(theServletRequest);
|
||||
try {
|
||||
IFhirResourceDaoConceptMap<ConceptMap> dao = (IFhirResourceDaoConceptMap<ConceptMap>) getDao();
|
||||
TranslationResult result = dao.translate(translationRequest, theRequestDetails);
|
||||
return result.toParameters();
|
||||
TranslateConceptResults result = dao.translate(translationRequest, theRequestDetails);
|
||||
return TermConceptMappingSvcImpl.toParameters(result);
|
||||
} finally {
|
||||
endRequest(theServletRequest);
|
||||
}
|
||||
|
|
|
@ -22,8 +22,9 @@ package ca.uhn.fhir.jpa.provider.r5;
|
|||
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoConceptMap;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationRequest;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationResult;
|
||||
import ca.uhn.fhir.context.support.TranslateConceptResults;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.term.TermConceptMappingSvcImpl;
|
||||
import ca.uhn.fhir.rest.annotation.IdParam;
|
||||
import ca.uhn.fhir.rest.annotation.Operation;
|
||||
import ca.uhn.fhir.rest.annotation.OperationParam;
|
||||
|
@ -142,8 +143,8 @@ public class BaseJpaResourceProviderConceptMapR5 extends JpaResourceProviderR5<C
|
|||
startRequest(theServletRequest);
|
||||
try {
|
||||
IFhirResourceDaoConceptMap<ConceptMap> dao = (IFhirResourceDaoConceptMap<ConceptMap>) getDao();
|
||||
TranslationResult result = dao.translate(translationRequest, theRequestDetails);
|
||||
org.hl7.fhir.r4.model.Parameters parameters = result.toParameters();
|
||||
TranslateConceptResults result = dao.translate(translationRequest, theRequestDetails);
|
||||
org.hl7.fhir.r4.model.Parameters parameters = TermConceptMappingSvcImpl.toParameters(result);
|
||||
return org.hl7.fhir.convertors.conv40_50.Parameters40_50.convertParameters(parameters);
|
||||
} finally {
|
||||
endRequest(theServletRequest);
|
||||
|
|
|
@ -29,18 +29,12 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
|||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationQuery;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationRequest;
|
||||
import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptDesignationDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptMapDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptMapGroupDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptMapGroupElementDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptMapGroupElementTargetDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptPropertyDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDesignationDao;
|
||||
|
@ -51,10 +45,6 @@ import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
|||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptDesignation;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptMap;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptMapGroup;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElement;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElementTarget;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptProperty;
|
||||
|
@ -72,12 +62,12 @@ import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
|||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.search.builder.SearchBuilder;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import ca.uhn.fhir.jpa.term.ex.ExpansionTooCostlyException;
|
||||
import ca.uhn.fhir.jpa.util.LogicUtil;
|
||||
import ca.uhn.fhir.jpa.util.ScrollableResultsIterator;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
|
@ -102,8 +92,6 @@ import org.apache.commons.lang3.Validate;
|
|||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.RegexpQuery;
|
||||
import org.hibernate.ScrollMode;
|
||||
import org.hibernate.ScrollableResults;
|
||||
import org.hibernate.search.backend.elasticsearch.ElasticsearchExtension;
|
||||
import org.hibernate.search.backend.lucene.LuceneExtension;
|
||||
import org.hibernate.search.engine.search.predicate.dsl.BooleanPredicateClausesStep;
|
||||
|
@ -114,7 +102,6 @@ import org.hibernate.search.mapper.orm.Search;
|
|||
import org.hibernate.search.mapper.orm.session.SearchSession;
|
||||
import org.hl7.fhir.common.hapi.validation.support.CommonCodeSystemsTerminologyService;
|
||||
import org.hl7.fhir.common.hapi.validation.support.InMemoryTerminologyServerValidationSupport;
|
||||
import org.hl7.fhir.exceptions.FHIRException;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.instance.model.api.IBaseCoding;
|
||||
import org.hl7.fhir.instance.model.api.IBaseDatatype;
|
||||
|
@ -126,11 +113,9 @@ import org.hl7.fhir.r4.model.CanonicalType;
|
|||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
import org.hl7.fhir.r4.model.DomainResource;
|
||||
import org.hl7.fhir.r4.model.Enumerations;
|
||||
import org.hl7.fhir.r4.model.Extension;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.IntegerType;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
|
@ -139,7 +124,6 @@ import org.quartz.JobExecutionContext;
|
|||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.TransactionDefinition;
|
||||
|
@ -170,7 +154,6 @@ import java.util.Collection;
|
|||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -199,10 +182,7 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseTermReadSvcImpl.class);
|
||||
private static final ValueSetExpansionOptions DEFAULT_EXPANSION_OPTIONS = new ValueSetExpansionOptions();
|
||||
private static final TermCodeSystemVersion NO_CURRENT_VERSION = new TermCodeSystemVersion().setId(-1L);
|
||||
private static boolean ourLastResultsFromTranslationCache; // For testing.
|
||||
private static boolean ourLastResultsFromTranslationWithReverseCache; // For testing.
|
||||
private static Runnable myInvokeOnNextCallForUnitTest;
|
||||
private final int myFetchSize = DEFAULT_FETCH_SIZE;
|
||||
private final Cache<String, TermCodeSystemVersion> myCodeSystemCurrentVersionCache = Caffeine.newBuilder().expireAfterWrite(1, TimeUnit.MINUTES).build();
|
||||
@Autowired
|
||||
protected DaoRegistry myDaoRegistry;
|
||||
|
@ -211,14 +191,6 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
@Autowired
|
||||
protected ITermConceptDao myConceptDao;
|
||||
@Autowired
|
||||
protected ITermConceptMapDao myConceptMapDao;
|
||||
@Autowired
|
||||
protected ITermConceptMapGroupDao myConceptMapGroupDao;
|
||||
@Autowired
|
||||
protected ITermConceptMapGroupElementDao myConceptMapGroupElementDao;
|
||||
@Autowired
|
||||
protected ITermConceptMapGroupElementTargetDao myConceptMapGroupElementTargetDao;
|
||||
@Autowired
|
||||
protected ITermConceptPropertyDao myConceptPropertyDao;
|
||||
@Autowired
|
||||
protected ITermConceptDesignationDao myConceptDesignationDao;
|
||||
|
@ -236,8 +208,6 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
private ITermCodeSystemVersionDao myCodeSystemVersionDao;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private Cache<TranslationQuery, List<TermConceptMapGroupElementTarget>> myTranslationCache;
|
||||
private Cache<TranslationQuery, List<TermConceptMapGroupElement>> myTranslationWithReverseCache;
|
||||
private TransactionTemplate myTxTemplate;
|
||||
@Autowired
|
||||
private PlatformTransactionManager myTransactionManager;
|
||||
|
@ -257,6 +227,9 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
private ITermCodeSystemStorageSvc myConceptStorageSvc;
|
||||
@Autowired
|
||||
private ApplicationContext myApplicationContext;
|
||||
@Autowired
|
||||
private ITermConceptMappingSvc myTermConceptMappingSvc;
|
||||
|
||||
private volatile IValidationSupport myJpaValidationSupport;
|
||||
private volatile IValidationSupport myValidationSupport;
|
||||
|
||||
|
@ -351,44 +324,9 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
*/
|
||||
@VisibleForTesting
|
||||
public void clearCaches() {
|
||||
myTranslationCache.invalidateAll();
|
||||
myTranslationWithReverseCache.invalidateAll();
|
||||
myCodeSystemCurrentVersionCache.invalidateAll();
|
||||
}
|
||||
|
||||
public void deleteConceptMap(ResourceTable theResourceTable) {
|
||||
// Get existing entity so it can be deleted.
|
||||
Optional<TermConceptMap> optionalExistingTermConceptMapById = myConceptMapDao.findTermConceptMapByResourcePid(theResourceTable.getId());
|
||||
|
||||
if (optionalExistingTermConceptMapById.isPresent()) {
|
||||
TermConceptMap existingTermConceptMap = optionalExistingTermConceptMapById.get();
|
||||
|
||||
ourLog.info("Deleting existing TermConceptMap[{}] and its children...", existingTermConceptMap.getId());
|
||||
for (TermConceptMapGroup group : existingTermConceptMap.getConceptMapGroups()) {
|
||||
|
||||
for (TermConceptMapGroupElement element : group.getConceptMapGroupElements()) {
|
||||
|
||||
for (TermConceptMapGroupElementTarget target : element.getConceptMapGroupElementTargets()) {
|
||||
|
||||
myConceptMapGroupElementTargetDao.deleteTermConceptMapGroupElementTargetById(target.getId());
|
||||
}
|
||||
|
||||
myConceptMapGroupElementDao.deleteTermConceptMapGroupElementById(element.getId());
|
||||
}
|
||||
|
||||
myConceptMapGroupDao.deleteTermConceptMapGroupById(group.getId());
|
||||
}
|
||||
|
||||
myConceptMapDao.deleteTermConceptMapById(existingTermConceptMap.getId());
|
||||
ourLog.info("Done deleting existing TermConceptMap[{}] and its children.", existingTermConceptMap.getId());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public void deleteConceptMapAndChildren(ResourceTable theResourceTable) {
|
||||
deleteConceptMap(theResourceTable);
|
||||
}
|
||||
|
||||
public void deleteValueSetForResource(ResourceTable theResourceTable) {
|
||||
// Get existing entity so it can be deleted.
|
||||
|
@ -533,13 +471,12 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
theAccumulator.addMessage(msg);
|
||||
if (isOracleDialect()) {
|
||||
expandConceptsOracle(theAccumulator, termValueSet, theFilter, theAdd);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
expandConcepts(theAccumulator, termValueSet, theFilter, theAdd);
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isOracleDialect(){
|
||||
private boolean isOracleDialect() {
|
||||
return myHibernatePropertiesProvider.getDialect() instanceof org.hibernate.dialect.Oracle12cDialect;
|
||||
}
|
||||
|
||||
|
@ -794,7 +731,7 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
}
|
||||
|
||||
// Allow to search by the end of the phrase. E.g. "working proficiency" will match "Limited working proficiency"
|
||||
for (int start = 0; start <= tokens.size() - 1; ++ start) {
|
||||
for (int start = 0; start <= tokens.size() - 1; ++start) {
|
||||
for (int end = start + 1; end <= tokens.size(); ++end) {
|
||||
String sublist = String.join(" ", tokens.subList(start, end));
|
||||
if (startsWithIgnoreCase(sublist, theFilterDisplay))
|
||||
|
@ -1130,14 +1067,14 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
SearchQuery<TermConcept> termConceptsQuery = searchSession.search(TermConcept.class)
|
||||
.where(f -> finishedQuery).toQuery();
|
||||
|
||||
System.out.println("About to query:" + termConceptsQuery.queryString());
|
||||
System.out.println("About to query:" + termConceptsQuery.queryString());
|
||||
List<TermConcept> termConcepts = termConceptsQuery.fetchHits(theQueryIndex * maxResultsPerBatch, maxResultsPerBatch);
|
||||
|
||||
|
||||
int resultsInBatch = termConcepts.size();
|
||||
int firstResult = theQueryIndex * maxResultsPerBatch;// TODO GGG HS we lose the ability to check the index of the first result, so just best-guessing it here.
|
||||
int delta = 0;
|
||||
for (TermConcept concept: termConcepts) {
|
||||
for (TermConcept concept : termConcepts) {
|
||||
count.incrementAndGet();
|
||||
countForBatch.incrementAndGet();
|
||||
if (theAdd && expansionStep != null) {
|
||||
|
@ -1455,7 +1392,6 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
}
|
||||
|
||||
|
||||
|
||||
private void addDisplayFilterInexact(SearchPredicateFactory f, BooleanPredicateClausesStep<?> bool, ValueSet.ConceptSetFilterComponent nextFilter) {
|
||||
bool.must(f.phrase()
|
||||
.field("myDisplay").boost(4.0f)
|
||||
|
@ -1489,7 +1425,7 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
addLoincFilterDescendantEqual(theSystem, f, b, theFilter);
|
||||
break;
|
||||
case IN:
|
||||
addLoincFilterDescendantIn(theSystem, f,b , theFilter);
|
||||
addLoincFilterDescendantIn(theSystem, f, b, theFilter);
|
||||
break;
|
||||
default:
|
||||
throw new InvalidRequestException("Don't know how to handle op=" + theFilter.getOp() + " on property " + theFilter.getProperty());
|
||||
|
@ -1545,7 +1481,6 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
}
|
||||
|
||||
|
||||
|
||||
private void logFilteringValueOnProperty(String theValue, String theProperty) {
|
||||
ourLog.debug(" * Filtering with value={} on property {}", theValue, theProperty);
|
||||
}
|
||||
|
@ -1859,26 +1794,9 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
RuleBasedTransactionAttribute rules = new RuleBasedTransactionAttribute();
|
||||
rules.getRollbackRules().add(new NoRollbackRuleAttribute(ExpansionTooCostlyException.class));
|
||||
myTxTemplate = new TransactionTemplate(myTransactionManager, rules);
|
||||
buildTranslationCaches();
|
||||
scheduleJob();
|
||||
}
|
||||
|
||||
private void buildTranslationCaches() {
|
||||
Long timeout = myDaoConfig.getTranslationCachesExpireAfterWriteInMinutes();
|
||||
|
||||
myTranslationCache =
|
||||
Caffeine.newBuilder()
|
||||
.maximumSize(10000)
|
||||
.expireAfterWrite(timeout, TimeUnit.MINUTES)
|
||||
.build();
|
||||
|
||||
myTranslationWithReverseCache =
|
||||
Caffeine.newBuilder()
|
||||
.maximumSize(10000)
|
||||
.expireAfterWrite(timeout, TimeUnit.MINUTES)
|
||||
.build();
|
||||
}
|
||||
|
||||
public void scheduleJob() {
|
||||
// Register scheduled job to pre-expand ValueSets
|
||||
// In the future it would be great to make this a cluster-aware task somehow
|
||||
|
@ -1888,163 +1806,6 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
mySchedulerService.scheduleClusteredJob(10 * DateUtils.MILLIS_PER_MINUTE, vsJobDefinition);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public void storeTermConceptMapAndChildren(ResourceTable theResourceTable, ConceptMap theConceptMap) {
|
||||
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(theResourceTable != null, "No resource supplied");
|
||||
if (isPlaceholder(theConceptMap)) {
|
||||
ourLog.info("Not storing TermConceptMap for placeholder {}", theConceptMap.getIdElement().toVersionless().getValueAsString());
|
||||
return;
|
||||
}
|
||||
|
||||
ValidateUtil.isNotBlankOrThrowUnprocessableEntity(theConceptMap.getUrl(), "ConceptMap has no value for ConceptMap.url");
|
||||
ourLog.info("Storing TermConceptMap for {}", theConceptMap.getIdElement().toVersionless().getValueAsString());
|
||||
|
||||
TermConceptMap termConceptMap = new TermConceptMap();
|
||||
termConceptMap.setResource(theResourceTable);
|
||||
termConceptMap.setUrl(theConceptMap.getUrl());
|
||||
termConceptMap.setVersion(theConceptMap.getVersion());
|
||||
|
||||
String source = theConceptMap.hasSourceUriType() ? theConceptMap.getSourceUriType().getValueAsString() : null;
|
||||
String target = theConceptMap.hasTargetUriType() ? theConceptMap.getTargetUriType().getValueAsString() : null;
|
||||
|
||||
/*
|
||||
* If this is a mapping between "resources" instead of purely between
|
||||
* "concepts" (this is a weird concept that is technically possible, at least as of
|
||||
* FHIR R4), don't try to store the mappings.
|
||||
*
|
||||
* See here for a description of what that is:
|
||||
* http://hl7.org/fhir/conceptmap.html#bnr
|
||||
*/
|
||||
if ("StructureDefinition".equals(new IdType(source).getResourceType()) ||
|
||||
"StructureDefinition".equals(new IdType(target).getResourceType())) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (source == null && theConceptMap.hasSourceCanonicalType()) {
|
||||
source = theConceptMap.getSourceCanonicalType().getValueAsString();
|
||||
}
|
||||
if (target == null && theConceptMap.hasTargetCanonicalType()) {
|
||||
target = theConceptMap.getTargetCanonicalType().getValueAsString();
|
||||
}
|
||||
|
||||
/*
|
||||
* For now we always delete old versions. At some point, it would be nice to allow configuration to keep old versions.
|
||||
*/
|
||||
deleteConceptMap(theResourceTable);
|
||||
|
||||
/*
|
||||
* Do the upload.
|
||||
*/
|
||||
String conceptMapUrl = termConceptMap.getUrl();
|
||||
String conceptMapVersion = termConceptMap.getVersion();
|
||||
Optional<TermConceptMap> optionalExistingTermConceptMapByUrl;
|
||||
if (isBlank(conceptMapVersion)) {
|
||||
optionalExistingTermConceptMapByUrl = myConceptMapDao.findTermConceptMapByUrlAndNullVersion(conceptMapUrl);
|
||||
} else {
|
||||
optionalExistingTermConceptMapByUrl = myConceptMapDao.findTermConceptMapByUrlAndVersion(conceptMapUrl, conceptMapVersion);
|
||||
}
|
||||
if (!optionalExistingTermConceptMapByUrl.isPresent()) {
|
||||
try {
|
||||
if (isNotBlank(source)) {
|
||||
termConceptMap.setSource(source);
|
||||
}
|
||||
if (isNotBlank(target)) {
|
||||
termConceptMap.setTarget(target);
|
||||
}
|
||||
} catch (FHIRException fe) {
|
||||
throw new InternalErrorException(fe);
|
||||
}
|
||||
termConceptMap = myConceptMapDao.save(termConceptMap);
|
||||
int codesSaved = 0;
|
||||
|
||||
if (theConceptMap.hasGroup()) {
|
||||
TermConceptMapGroup termConceptMapGroup;
|
||||
for (ConceptMap.ConceptMapGroupComponent group : theConceptMap.getGroup()) {
|
||||
|
||||
String groupSource = group.getSource();
|
||||
if (isBlank(groupSource)) {
|
||||
groupSource = source;
|
||||
}
|
||||
if (isBlank(groupSource)) {
|
||||
throw new UnprocessableEntityException("ConceptMap[url='" + theConceptMap.getUrl() + "'] contains at least one group without a value in ConceptMap.group.source");
|
||||
}
|
||||
|
||||
String groupTarget = group.getTarget();
|
||||
if (isBlank(groupTarget)) {
|
||||
groupTarget = target;
|
||||
}
|
||||
if (isBlank(groupTarget)) {
|
||||
throw new UnprocessableEntityException("ConceptMap[url='" + theConceptMap.getUrl() + "'] contains at least one group without a value in ConceptMap.group.target");
|
||||
}
|
||||
|
||||
termConceptMapGroup = new TermConceptMapGroup();
|
||||
termConceptMapGroup.setConceptMap(termConceptMap);
|
||||
termConceptMapGroup.setSource(groupSource);
|
||||
termConceptMapGroup.setSourceVersion(group.getSourceVersion());
|
||||
termConceptMapGroup.setTarget(groupTarget);
|
||||
termConceptMapGroup.setTargetVersion(group.getTargetVersion());
|
||||
myConceptMapGroupDao.save(termConceptMapGroup);
|
||||
|
||||
if (group.hasElement()) {
|
||||
TermConceptMapGroupElement termConceptMapGroupElement;
|
||||
for (ConceptMap.SourceElementComponent element : group.getElement()) {
|
||||
if (isBlank(element.getCode())) {
|
||||
continue;
|
||||
}
|
||||
termConceptMapGroupElement = new TermConceptMapGroupElement();
|
||||
termConceptMapGroupElement.setConceptMapGroup(termConceptMapGroup);
|
||||
termConceptMapGroupElement.setCode(element.getCode());
|
||||
termConceptMapGroupElement.setDisplay(element.getDisplay());
|
||||
myConceptMapGroupElementDao.save(termConceptMapGroupElement);
|
||||
|
||||
if (element.hasTarget()) {
|
||||
TermConceptMapGroupElementTarget termConceptMapGroupElementTarget;
|
||||
for (ConceptMap.TargetElementComponent elementTarget : element.getTarget()) {
|
||||
if (isBlank(elementTarget.getCode())) {
|
||||
continue;
|
||||
}
|
||||
termConceptMapGroupElementTarget = new TermConceptMapGroupElementTarget();
|
||||
termConceptMapGroupElementTarget.setConceptMapGroupElement(termConceptMapGroupElement);
|
||||
termConceptMapGroupElementTarget.setCode(elementTarget.getCode());
|
||||
termConceptMapGroupElementTarget.setDisplay(elementTarget.getDisplay());
|
||||
termConceptMapGroupElementTarget.setEquivalence(elementTarget.getEquivalence());
|
||||
myConceptMapGroupElementTargetDao.save(termConceptMapGroupElementTarget);
|
||||
|
||||
if (++codesSaved % 250 == 0) {
|
||||
ourLog.info("Have saved {} codes in ConceptMap", codesSaved);
|
||||
myConceptMapGroupElementTargetDao.flush();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
TermConceptMap existingTermConceptMap = optionalExistingTermConceptMapByUrl.get();
|
||||
|
||||
if (isBlank(conceptMapVersion)) {
|
||||
String msg = myContext.getLocalizer().getMessage(
|
||||
BaseTermReadSvcImpl.class,
|
||||
"cannotCreateDuplicateConceptMapUrl",
|
||||
conceptMapUrl,
|
||||
existingTermConceptMap.getResource().getIdDt().toUnqualifiedVersionless().getValue());
|
||||
throw new UnprocessableEntityException(msg);
|
||||
|
||||
} else {
|
||||
String msg = myContext.getLocalizer().getMessage(
|
||||
BaseTermReadSvcImpl.class,
|
||||
"cannotCreateDuplicateConceptMapUrlAndVersion",
|
||||
conceptMapUrl, conceptMapVersion,
|
||||
existingTermConceptMap.getResource().getIdDt().toUnqualifiedVersionless().getValue());
|
||||
throw new UnprocessableEntityException(msg);
|
||||
}
|
||||
}
|
||||
|
||||
ourLog.info("Done storing TermConceptMap[{}] for {}", termConceptMap.getId(), theConceptMap.getIdElement().toVersionless().getValueAsString());
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void preExpandDeferredValueSetsToTerminologyTables() {
|
||||
|
@ -2244,15 +2005,6 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
}
|
||||
}
|
||||
|
||||
private boolean isPlaceholder(DomainResource theResource) {
|
||||
boolean retVal = false;
|
||||
Extension extension = theResource.getExtensionByUrl(HapiExtensions.EXT_RESOURCE_PLACEHOLDER);
|
||||
if (extension != null && extension.hasValue() && extension.getValue() instanceof BooleanType) {
|
||||
retVal = ((BooleanType) extension.getValue()).booleanValue();
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public IFhirResourceDaoCodeSystem.SubsumesResult subsumes(IPrimitiveType<String> theCodeA, IPrimitiveType<String> theCodeB,
|
||||
|
@ -2362,7 +2114,6 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
private ArrayList<FhirVersionIndependentConcept> toVersionIndependentConcepts(String theSystem, Set<TermConcept> codes) {
|
||||
ArrayList<FhirVersionIndependentConcept> retVal = new ArrayList<>(codes.size());
|
||||
for (TermConcept next : codes) {
|
||||
|
@ -2371,254 +2122,6 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
public List<TermConceptMapGroupElementTarget> translate(TranslationRequest theTranslationRequest) {
|
||||
List<TermConceptMapGroupElementTarget> retVal = new ArrayList<>();
|
||||
|
||||
CriteriaBuilder criteriaBuilder = myEntityManager.getCriteriaBuilder();
|
||||
CriteriaQuery<TermConceptMapGroupElementTarget> query = criteriaBuilder.createQuery(TermConceptMapGroupElementTarget.class);
|
||||
Root<TermConceptMapGroupElementTarget> root = query.from(TermConceptMapGroupElementTarget.class);
|
||||
|
||||
Join<TermConceptMapGroupElementTarget, TermConceptMapGroupElement> elementJoin = root.join("myConceptMapGroupElement");
|
||||
Join<TermConceptMapGroupElement, TermConceptMapGroup> groupJoin = elementJoin.join("myConceptMapGroup");
|
||||
Join<TermConceptMapGroup, TermConceptMap> conceptMapJoin = groupJoin.join("myConceptMap");
|
||||
|
||||
List<TranslationQuery> translationQueries = theTranslationRequest.getTranslationQueries();
|
||||
List<TermConceptMapGroupElementTarget> cachedTargets;
|
||||
ArrayList<Predicate> predicates;
|
||||
Coding coding;
|
||||
|
||||
//-- get the latest ConceptMapVersion if theTranslationRequest has ConceptMap url but no ConceptMap version
|
||||
String latestConceptMapVersion = null;
|
||||
if (theTranslationRequest.hasUrl() && !theTranslationRequest.hasConceptMapVersion())
|
||||
latestConceptMapVersion = getLatestConceptMapVersion(theTranslationRequest);
|
||||
|
||||
for (TranslationQuery translationQuery : translationQueries) {
|
||||
cachedTargets = myTranslationCache.getIfPresent(translationQuery);
|
||||
if (cachedTargets == null) {
|
||||
final List<TermConceptMapGroupElementTarget> targets = new ArrayList<>();
|
||||
|
||||
predicates = new ArrayList<>();
|
||||
|
||||
coding = translationQuery.getCoding();
|
||||
if (coding.hasCode()) {
|
||||
predicates.add(criteriaBuilder.equal(elementJoin.get("myCode"), coding.getCode()));
|
||||
} else {
|
||||
throw new InvalidRequestException("A code must be provided for translation to occur.");
|
||||
}
|
||||
|
||||
if (coding.hasSystem()) {
|
||||
predicates.add(criteriaBuilder.equal(groupJoin.get("mySource"), coding.getSystem()));
|
||||
}
|
||||
|
||||
if (coding.hasVersion()) {
|
||||
predicates.add(criteriaBuilder.equal(groupJoin.get("mySourceVersion"), coding.getVersion()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasTargetSystem()) {
|
||||
predicates.add(criteriaBuilder.equal(groupJoin.get("myTarget"), translationQuery.getTargetSystem().getValueAsString()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasUrl()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myUrl"), translationQuery.getUrl().getValueAsString()));
|
||||
if (translationQuery.hasConceptMapVersion()) {
|
||||
// both url and conceptMapVersion
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myVersion"), translationQuery.getConceptMapVersion().getValueAsString()));
|
||||
} else {
|
||||
if (StringUtils.isNotBlank(latestConceptMapVersion)) {
|
||||
// only url and use latestConceptMapVersion
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myVersion"), latestConceptMapVersion));
|
||||
} else {
|
||||
predicates.add(criteriaBuilder.isNull(conceptMapJoin.get("myVersion")));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (translationQuery.hasSource()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("mySource"), translationQuery.getSource().getValueAsString()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasTarget()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myTarget"), translationQuery.getTarget().getValueAsString()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasResourceId()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myResourcePid"), translationQuery.getResourceId()));
|
||||
}
|
||||
|
||||
Predicate outerPredicate = criteriaBuilder.and(predicates.toArray(new Predicate[0]));
|
||||
query.where(outerPredicate);
|
||||
|
||||
// Use scrollable results.
|
||||
final TypedQuery<TermConceptMapGroupElementTarget> typedQuery = myEntityManager.createQuery(query.select(root));
|
||||
org.hibernate.query.Query<TermConceptMapGroupElementTarget> hibernateQuery = (org.hibernate.query.Query<TermConceptMapGroupElementTarget>) typedQuery;
|
||||
hibernateQuery.setFetchSize(myFetchSize);
|
||||
ScrollableResults scrollableResults = hibernateQuery.scroll(ScrollMode.FORWARD_ONLY);
|
||||
try (ScrollableResultsIterator<TermConceptMapGroupElementTarget> scrollableResultsIterator = new ScrollableResultsIterator<>(scrollableResults)) {
|
||||
|
||||
while (scrollableResultsIterator.hasNext()) {
|
||||
targets.add(scrollableResultsIterator.next());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
ourLastResultsFromTranslationCache = false; // For testing.
|
||||
myTranslationCache.get(translationQuery, k -> targets);
|
||||
retVal.addAll(targets);
|
||||
} else {
|
||||
ourLastResultsFromTranslationCache = true; // For testing.
|
||||
retVal.addAll(cachedTargets);
|
||||
}
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
public List<TermConceptMapGroupElement> translateWithReverse(TranslationRequest theTranslationRequest) {
|
||||
List<TermConceptMapGroupElement> retVal = new ArrayList<>();
|
||||
|
||||
CriteriaBuilder criteriaBuilder = myEntityManager.getCriteriaBuilder();
|
||||
CriteriaQuery<TermConceptMapGroupElement> query = criteriaBuilder.createQuery(TermConceptMapGroupElement.class);
|
||||
Root<TermConceptMapGroupElement> root = query.from(TermConceptMapGroupElement.class);
|
||||
|
||||
Join<TermConceptMapGroupElement, TermConceptMapGroupElementTarget> targetJoin = root.join("myConceptMapGroupElementTargets");
|
||||
Join<TermConceptMapGroupElement, TermConceptMapGroup> groupJoin = root.join("myConceptMapGroup");
|
||||
Join<TermConceptMapGroup, TermConceptMap> conceptMapJoin = groupJoin.join("myConceptMap");
|
||||
|
||||
List<TranslationQuery> translationQueries = theTranslationRequest.getTranslationQueries();
|
||||
List<TermConceptMapGroupElement> cachedElements;
|
||||
ArrayList<Predicate> predicates;
|
||||
Coding coding;
|
||||
|
||||
//-- get the latest ConceptMapVersion if theTranslationRequest has ConceptMap url but no ConceptMap version
|
||||
String latestConceptMapVersion = null;
|
||||
if (theTranslationRequest.hasUrl() && !theTranslationRequest.hasConceptMapVersion())
|
||||
latestConceptMapVersion = getLatestConceptMapVersion(theTranslationRequest);
|
||||
|
||||
for (TranslationQuery translationQuery : translationQueries) {
|
||||
cachedElements = myTranslationWithReverseCache.getIfPresent(translationQuery);
|
||||
if (cachedElements == null) {
|
||||
final List<TermConceptMapGroupElement> elements = new ArrayList<>();
|
||||
|
||||
predicates = new ArrayList<>();
|
||||
|
||||
coding = translationQuery.getCoding();
|
||||
String targetCode;
|
||||
String targetCodeSystem = null;
|
||||
if (coding.hasCode()) {
|
||||
predicates.add(criteriaBuilder.equal(targetJoin.get("myCode"), coding.getCode()));
|
||||
targetCode = coding.getCode();
|
||||
} else {
|
||||
throw new InvalidRequestException("A code must be provided for translation to occur.");
|
||||
}
|
||||
|
||||
if (coding.hasSystem()) {
|
||||
predicates.add(criteriaBuilder.equal(groupJoin.get("myTarget"), coding.getSystem()));
|
||||
targetCodeSystem = coding.getSystem();
|
||||
}
|
||||
|
||||
if (coding.hasVersion()) {
|
||||
predicates.add(criteriaBuilder.equal(groupJoin.get("myTargetVersion"), coding.getVersion()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasUrl()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myUrl"), translationQuery.getUrl().getValueAsString()));
|
||||
if (translationQuery.hasConceptMapVersion()) {
|
||||
// both url and conceptMapVersion
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myVersion"), translationQuery.getConceptMapVersion().getValueAsString()));
|
||||
} else {
|
||||
if (StringUtils.isNotBlank(latestConceptMapVersion)) {
|
||||
// only url and use latestConceptMapVersion
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myVersion"), latestConceptMapVersion));
|
||||
} else {
|
||||
predicates.add(criteriaBuilder.isNull(conceptMapJoin.get("myVersion")));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (translationQuery.hasTargetSystem()) {
|
||||
predicates.add(criteriaBuilder.equal(groupJoin.get("mySource"), translationQuery.getTargetSystem().getValueAsString()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasSource()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myTarget"), translationQuery.getSource().getValueAsString()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasTarget()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("mySource"), translationQuery.getTarget().getValueAsString()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasResourceId()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myResourcePid"), translationQuery.getResourceId()));
|
||||
}
|
||||
|
||||
Predicate outerPredicate = criteriaBuilder.and(predicates.toArray(new Predicate[0]));
|
||||
query.where(outerPredicate);
|
||||
|
||||
// Use scrollable results.
|
||||
final TypedQuery<TermConceptMapGroupElement> typedQuery = myEntityManager.createQuery(query.select(root));
|
||||
org.hibernate.query.Query<TermConceptMapGroupElement> hibernateQuery = (org.hibernate.query.Query<TermConceptMapGroupElement>) typedQuery;
|
||||
hibernateQuery.setFetchSize(myFetchSize);
|
||||
ScrollableResults scrollableResults = hibernateQuery.scroll(ScrollMode.FORWARD_ONLY);
|
||||
try (ScrollableResultsIterator<TermConceptMapGroupElement> scrollableResultsIterator = new ScrollableResultsIterator<>(scrollableResults)) {
|
||||
|
||||
while (scrollableResultsIterator.hasNext()) {
|
||||
TermConceptMapGroupElement nextElement = scrollableResultsIterator.next();
|
||||
// TODO: The invocation of the size() below does not seem to be necessary but for some reason, removing it causes tests in TerminologySvcImplR4Test to fail.
|
||||
nextElement.getConceptMapGroupElementTargets().size();
|
||||
myEntityManager.detach(nextElement);
|
||||
|
||||
if (isNotBlank(targetCode) && isNotBlank(targetCodeSystem)) {
|
||||
for (Iterator<TermConceptMapGroupElementTarget> iter = nextElement.getConceptMapGroupElementTargets().iterator(); iter.hasNext(); ) {
|
||||
TermConceptMapGroupElementTarget next = iter.next();
|
||||
if (StringUtils.equals(targetCodeSystem, next.getSystem())) {
|
||||
if (StringUtils.equals(targetCode, next.getCode())) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
iter.remove();
|
||||
}
|
||||
}
|
||||
|
||||
elements.add(nextElement);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
ourLastResultsFromTranslationWithReverseCache = false; // For testing.
|
||||
myTranslationWithReverseCache.get(translationQuery, k -> elements);
|
||||
retVal.addAll(elements);
|
||||
} else {
|
||||
ourLastResultsFromTranslationWithReverseCache = true; // For testing.
|
||||
retVal.addAll(cachedElements);
|
||||
}
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
void throwInvalidValueSet(String theValueSet) {
|
||||
throw new ResourceNotFoundException("Unknown ValueSet: " + UrlUtil.escapeUrlParam(theValueSet));
|
||||
}
|
||||
|
||||
// Special case for the translate operation with url and without
|
||||
// conceptMapVersion, find the latest conecptMapVersion
|
||||
private String getLatestConceptMapVersion(TranslationRequest theTranslationRequest) {
|
||||
|
||||
Pageable page = PageRequest.of(0, 1);
|
||||
List<TermConceptMap> theConceptMapList = myConceptMapDao.getTermConceptMapEntitiesByUrlOrderByMostRecentUpdate(page,
|
||||
theTranslationRequest.getUrl().asStringValue());
|
||||
if (!theConceptMapList.isEmpty()) {
|
||||
return theConceptMapList.get(0).getVersion();
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public CodeValidationResult validateCodeInValueSet(ValidationSupportContext theValidationSupportContext, ConceptValidationOptions theOptions, String theCodeSystem, String theCode, String theDisplay, @Nonnull IBaseResource theValueSet) {
|
||||
|
@ -2660,7 +2163,6 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
return createFailureCodeValidationResult(theCodeSystem, theCode);
|
||||
}
|
||||
|
||||
|
||||
IValidationSupport.CodeValidationResult validateCodeInValueSet(ValidationSupportContext theValidationSupportContext, ConceptValidationOptions theValidationOptions, String theValueSetUrl, String theCodeSystem, String theCode, String theDisplay) {
|
||||
IBaseResource valueSet = theValidationSupportContext.getRootValidationSupport().fetchValueSet(theValueSetUrl);
|
||||
|
||||
|
@ -2963,6 +2465,15 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
}
|
||||
}
|
||||
|
||||
static boolean isPlaceholder(DomainResource theResource) {
|
||||
boolean retVal = false;
|
||||
Extension extension = theResource.getExtensionByUrl(HapiExtensions.EXT_RESOURCE_PLACEHOLDER);
|
||||
if (extension != null && extension.hasValue() && extension.getValue() instanceof BooleanType) {
|
||||
retVal = ((BooleanType) extension.getValue()).booleanValue();
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* This is only used for unit tests to test failure conditions
|
||||
*/
|
||||
|
@ -3040,35 +2551,4 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
return termConcept;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is present only for unit tests, do not call from client code
|
||||
*/
|
||||
@VisibleForTesting
|
||||
public static void clearOurLastResultsFromTranslationCache() {
|
||||
ourLastResultsFromTranslationCache = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is present only for unit tests, do not call from client code
|
||||
*/
|
||||
@VisibleForTesting
|
||||
public static void clearOurLastResultsFromTranslationWithReverseCache() {
|
||||
ourLastResultsFromTranslationWithReverseCache = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is present only for unit tests, do not call from client code
|
||||
*/
|
||||
@VisibleForTesting
|
||||
static boolean isOurLastResultsFromTranslationCache() {
|
||||
return ourLastResultsFromTranslationCache;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is present only for unit tests, do not call from client code
|
||||
*/
|
||||
@VisibleForTesting
|
||||
static boolean isOurLastResultsFromTranslationWithReverseCache() {
|
||||
return ourLastResultsFromTranslationWithReverseCache;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,698 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.support.TranslateConceptResult;
|
||||
import ca.uhn.fhir.context.support.TranslateConceptResults;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationQuery;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationRequest;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptMapDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptMapGroupDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptMapGroupElementDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptMapGroupElementTargetDao;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptMap;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptMapGroup;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElement;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElementTarget;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.jpa.util.ScrollableResultsIterator;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.util.ValidateUtil;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.hibernate.ScrollMode;
|
||||
import org.hibernate.ScrollableResults;
|
||||
import org.hl7.fhir.exceptions.FHIRException;
|
||||
import org.hl7.fhir.r4.model.BooleanType;
|
||||
import org.hl7.fhir.r4.model.CodeType;
|
||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Parameters;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.hl7.fhir.r4.model.UriType;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.PersistenceContext;
|
||||
import javax.persistence.PersistenceContextType;
|
||||
import javax.persistence.TypedQuery;
|
||||
import javax.persistence.criteria.CriteriaBuilder;
|
||||
import javax.persistence.criteria.CriteriaQuery;
|
||||
import javax.persistence.criteria.Join;
|
||||
import javax.persistence.criteria.Predicate;
|
||||
import javax.persistence.criteria.Root;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
|
||||
import static ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl.isPlaceholder;
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(TermConceptMappingSvcImpl.class);
|
||||
private static boolean ourLastResultsFromTranslationCache; // For testing.
|
||||
private static boolean ourLastResultsFromTranslationWithReverseCache; // For testing.
|
||||
private final int myFetchSize = BaseTermReadSvcImpl.DEFAULT_FETCH_SIZE;
|
||||
@Autowired
|
||||
protected ITermConceptMapDao myConceptMapDao;
|
||||
@Autowired
|
||||
protected ITermConceptMapGroupDao myConceptMapGroupDao;
|
||||
@Autowired
|
||||
protected ITermConceptMapGroupElementDao myConceptMapGroupElementDao;
|
||||
@Autowired
|
||||
protected ITermConceptMapGroupElementTargetDao myConceptMapGroupElementTargetDao;
|
||||
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
||||
protected EntityManager myEntityManager;
|
||||
@Autowired
|
||||
private FhirContext myContext;
|
||||
@Autowired
|
||||
private MemoryCacheService myMemoryCacheService;
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public void deleteConceptMapAndChildren(ResourceTable theResourceTable) {
|
||||
deleteConceptMap(theResourceTable);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FhirContext getFhirContext() {
|
||||
return myContext;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public TranslateConceptResults translateConcept(TranslateCodeRequest theRequest) {
|
||||
|
||||
CodeableConcept sourceCodeableConcept = new CodeableConcept();
|
||||
sourceCodeableConcept
|
||||
.addCoding()
|
||||
.setSystem(theRequest.getSourceSystemUrl())
|
||||
.setCode(theRequest.getSourceCode());
|
||||
|
||||
TranslationRequest request = new TranslationRequest();
|
||||
request.setCodeableConcept(sourceCodeableConcept);
|
||||
request.setTargetSystem(new UriType(theRequest.getTargetSystemUrl()));
|
||||
|
||||
return translate(request);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public void storeTermConceptMapAndChildren(ResourceTable theResourceTable, ConceptMap theConceptMap) {
|
||||
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(theResourceTable != null, "No resource supplied");
|
||||
if (isPlaceholder(theConceptMap)) {
|
||||
ourLog.info("Not storing TermConceptMap for placeholder {}", theConceptMap.getIdElement().toVersionless().getValueAsString());
|
||||
return;
|
||||
}
|
||||
|
||||
ValidateUtil.isNotBlankOrThrowUnprocessableEntity(theConceptMap.getUrl(), "ConceptMap has no value for ConceptMap.url");
|
||||
ourLog.info("Storing TermConceptMap for {}", theConceptMap.getIdElement().toVersionless().getValueAsString());
|
||||
|
||||
TermConceptMap termConceptMap = new TermConceptMap();
|
||||
termConceptMap.setResource(theResourceTable);
|
||||
termConceptMap.setUrl(theConceptMap.getUrl());
|
||||
termConceptMap.setVersion(theConceptMap.getVersion());
|
||||
|
||||
String source = theConceptMap.hasSourceUriType() ? theConceptMap.getSourceUriType().getValueAsString() : null;
|
||||
String target = theConceptMap.hasTargetUriType() ? theConceptMap.getTargetUriType().getValueAsString() : null;
|
||||
|
||||
/*
|
||||
* If this is a mapping between "resources" instead of purely between
|
||||
* "concepts" (this is a weird concept that is technically possible, at least as of
|
||||
* FHIR R4), don't try to store the mappings.
|
||||
*
|
||||
* See here for a description of what that is:
|
||||
* http://hl7.org/fhir/conceptmap.html#bnr
|
||||
*/
|
||||
if ("StructureDefinition".equals(new IdType(source).getResourceType()) ||
|
||||
"StructureDefinition".equals(new IdType(target).getResourceType())) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (source == null && theConceptMap.hasSourceCanonicalType()) {
|
||||
source = theConceptMap.getSourceCanonicalType().getValueAsString();
|
||||
}
|
||||
if (target == null && theConceptMap.hasTargetCanonicalType()) {
|
||||
target = theConceptMap.getTargetCanonicalType().getValueAsString();
|
||||
}
|
||||
|
||||
/*
|
||||
* For now we always delete old versions. At some point, it would be nice to allow configuration to keep old versions.
|
||||
*/
|
||||
deleteConceptMap(theResourceTable);
|
||||
|
||||
/*
|
||||
* Do the upload.
|
||||
*/
|
||||
String conceptMapUrl = termConceptMap.getUrl();
|
||||
String conceptMapVersion = termConceptMap.getVersion();
|
||||
Optional<TermConceptMap> optionalExistingTermConceptMapByUrl;
|
||||
if (isBlank(conceptMapVersion)) {
|
||||
optionalExistingTermConceptMapByUrl = myConceptMapDao.findTermConceptMapByUrlAndNullVersion(conceptMapUrl);
|
||||
} else {
|
||||
optionalExistingTermConceptMapByUrl = myConceptMapDao.findTermConceptMapByUrlAndVersion(conceptMapUrl, conceptMapVersion);
|
||||
}
|
||||
if (!optionalExistingTermConceptMapByUrl.isPresent()) {
|
||||
try {
|
||||
if (isNotBlank(source)) {
|
||||
termConceptMap.setSource(source);
|
||||
}
|
||||
if (isNotBlank(target)) {
|
||||
termConceptMap.setTarget(target);
|
||||
}
|
||||
} catch (FHIRException fe) {
|
||||
throw new InternalErrorException(fe);
|
||||
}
|
||||
termConceptMap = myConceptMapDao.save(termConceptMap);
|
||||
int codesSaved = 0;
|
||||
|
||||
if (theConceptMap.hasGroup()) {
|
||||
TermConceptMapGroup termConceptMapGroup;
|
||||
for (ConceptMap.ConceptMapGroupComponent group : theConceptMap.getGroup()) {
|
||||
|
||||
String groupSource = group.getSource();
|
||||
if (isBlank(groupSource)) {
|
||||
groupSource = source;
|
||||
}
|
||||
if (isBlank(groupSource)) {
|
||||
throw new UnprocessableEntityException("ConceptMap[url='" + theConceptMap.getUrl() + "'] contains at least one group without a value in ConceptMap.group.source");
|
||||
}
|
||||
|
||||
String groupTarget = group.getTarget();
|
||||
if (isBlank(groupTarget)) {
|
||||
groupTarget = target;
|
||||
}
|
||||
if (isBlank(groupTarget)) {
|
||||
throw new UnprocessableEntityException("ConceptMap[url='" + theConceptMap.getUrl() + "'] contains at least one group without a value in ConceptMap.group.target");
|
||||
}
|
||||
|
||||
termConceptMapGroup = new TermConceptMapGroup();
|
||||
termConceptMapGroup.setConceptMap(termConceptMap);
|
||||
termConceptMapGroup.setSource(groupSource);
|
||||
termConceptMapGroup.setSourceVersion(group.getSourceVersion());
|
||||
termConceptMapGroup.setTarget(groupTarget);
|
||||
termConceptMapGroup.setTargetVersion(group.getTargetVersion());
|
||||
myConceptMapGroupDao.save(termConceptMapGroup);
|
||||
|
||||
if (group.hasElement()) {
|
||||
TermConceptMapGroupElement termConceptMapGroupElement;
|
||||
for (ConceptMap.SourceElementComponent element : group.getElement()) {
|
||||
if (isBlank(element.getCode())) {
|
||||
continue;
|
||||
}
|
||||
termConceptMapGroupElement = new TermConceptMapGroupElement();
|
||||
termConceptMapGroupElement.setConceptMapGroup(termConceptMapGroup);
|
||||
termConceptMapGroupElement.setCode(element.getCode());
|
||||
termConceptMapGroupElement.setDisplay(element.getDisplay());
|
||||
myConceptMapGroupElementDao.save(termConceptMapGroupElement);
|
||||
|
||||
if (element.hasTarget()) {
|
||||
TermConceptMapGroupElementTarget termConceptMapGroupElementTarget;
|
||||
for (ConceptMap.TargetElementComponent elementTarget : element.getTarget()) {
|
||||
if (isBlank(elementTarget.getCode())) {
|
||||
continue;
|
||||
}
|
||||
termConceptMapGroupElementTarget = new TermConceptMapGroupElementTarget();
|
||||
termConceptMapGroupElementTarget.setConceptMapGroupElement(termConceptMapGroupElement);
|
||||
termConceptMapGroupElementTarget.setCode(elementTarget.getCode());
|
||||
termConceptMapGroupElementTarget.setDisplay(elementTarget.getDisplay());
|
||||
termConceptMapGroupElementTarget.setEquivalence(elementTarget.getEquivalence());
|
||||
myConceptMapGroupElementTargetDao.save(termConceptMapGroupElementTarget);
|
||||
|
||||
if (++codesSaved % 250 == 0) {
|
||||
ourLog.info("Have saved {} codes in ConceptMap", codesSaved);
|
||||
myConceptMapGroupElementTargetDao.flush();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
TermConceptMap existingTermConceptMap = optionalExistingTermConceptMapByUrl.get();
|
||||
|
||||
if (isBlank(conceptMapVersion)) {
|
||||
String msg = myContext.getLocalizer().getMessage(
|
||||
BaseTermReadSvcImpl.class,
|
||||
"cannotCreateDuplicateConceptMapUrl",
|
||||
conceptMapUrl,
|
||||
existingTermConceptMap.getResource().getIdDt().toUnqualifiedVersionless().getValue());
|
||||
throw new UnprocessableEntityException(msg);
|
||||
|
||||
} else {
|
||||
String msg = myContext.getLocalizer().getMessage(
|
||||
BaseTermReadSvcImpl.class,
|
||||
"cannotCreateDuplicateConceptMapUrlAndVersion",
|
||||
conceptMapUrl, conceptMapVersion,
|
||||
existingTermConceptMap.getResource().getIdDt().toUnqualifiedVersionless().getValue());
|
||||
throw new UnprocessableEntityException(msg);
|
||||
}
|
||||
}
|
||||
|
||||
ourLog.info("Done storing TermConceptMap[{}] for {}", termConceptMap.getId(), theConceptMap.getIdElement().toVersionless().getValueAsString());
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
public TranslateConceptResults translate(TranslationRequest theTranslationRequest) {
|
||||
TranslateConceptResults retVal = new TranslateConceptResults();
|
||||
|
||||
CriteriaBuilder criteriaBuilder = myEntityManager.getCriteriaBuilder();
|
||||
CriteriaQuery<TermConceptMapGroupElementTarget> query = criteriaBuilder.createQuery(TermConceptMapGroupElementTarget.class);
|
||||
Root<TermConceptMapGroupElementTarget> root = query.from(TermConceptMapGroupElementTarget.class);
|
||||
|
||||
Join<TermConceptMapGroupElementTarget, TermConceptMapGroupElement> elementJoin = root.join("myConceptMapGroupElement");
|
||||
Join<TermConceptMapGroupElement, TermConceptMapGroup> groupJoin = elementJoin.join("myConceptMapGroup");
|
||||
Join<TermConceptMapGroup, TermConceptMap> conceptMapJoin = groupJoin.join("myConceptMap");
|
||||
|
||||
List<TranslationQuery> translationQueries = theTranslationRequest.getTranslationQueries();
|
||||
List<TranslateConceptResult> cachedTargets;
|
||||
ArrayList<Predicate> predicates;
|
||||
Coding coding;
|
||||
|
||||
//-- get the latest ConceptMapVersion if theTranslationRequest has ConceptMap url but no ConceptMap version
|
||||
String latestConceptMapVersion = null;
|
||||
if (theTranslationRequest.hasUrl() && !theTranslationRequest.hasConceptMapVersion())
|
||||
latestConceptMapVersion = getLatestConceptMapVersion(theTranslationRequest);
|
||||
|
||||
for (TranslationQuery translationQuery : translationQueries) {
|
||||
cachedTargets = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.CONCEPT_TRANSLATION, translationQuery);
|
||||
if (cachedTargets == null) {
|
||||
final List<TranslateConceptResult> targets = new ArrayList<>();
|
||||
|
||||
predicates = new ArrayList<>();
|
||||
|
||||
coding = translationQuery.getCoding();
|
||||
if (coding.hasCode()) {
|
||||
predicates.add(criteriaBuilder.equal(elementJoin.get("myCode"), coding.getCode()));
|
||||
} else {
|
||||
throw new InvalidRequestException("A code must be provided for translation to occur.");
|
||||
}
|
||||
|
||||
if (coding.hasSystem()) {
|
||||
predicates.add(criteriaBuilder.equal(groupJoin.get("mySource"), coding.getSystem()));
|
||||
}
|
||||
|
||||
if (coding.hasVersion()) {
|
||||
predicates.add(criteriaBuilder.equal(groupJoin.get("mySourceVersion"), coding.getVersion()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasTargetSystem()) {
|
||||
predicates.add(criteriaBuilder.equal(groupJoin.get("myTarget"), translationQuery.getTargetSystem().getValueAsString()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasUrl()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myUrl"), translationQuery.getUrl().getValueAsString()));
|
||||
if (translationQuery.hasConceptMapVersion()) {
|
||||
// both url and conceptMapVersion
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myVersion"), translationQuery.getConceptMapVersion().getValueAsString()));
|
||||
} else {
|
||||
if (StringUtils.isNotBlank(latestConceptMapVersion)) {
|
||||
// only url and use latestConceptMapVersion
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myVersion"), latestConceptMapVersion));
|
||||
} else {
|
||||
predicates.add(criteriaBuilder.isNull(conceptMapJoin.get("myVersion")));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (translationQuery.hasSource()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("mySource"), translationQuery.getSource().getValueAsString()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasTarget()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myTarget"), translationQuery.getTarget().getValueAsString()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasResourceId()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myResourcePid"), translationQuery.getResourceId()));
|
||||
}
|
||||
|
||||
Predicate outerPredicate = criteriaBuilder.and(predicates.toArray(new Predicate[0]));
|
||||
query.where(outerPredicate);
|
||||
|
||||
// Use scrollable results.
|
||||
final TypedQuery<TermConceptMapGroupElementTarget> typedQuery = myEntityManager.createQuery(query.select(root));
|
||||
org.hibernate.query.Query<TermConceptMapGroupElementTarget> hibernateQuery = (org.hibernate.query.Query<TermConceptMapGroupElementTarget>) typedQuery;
|
||||
hibernateQuery.setFetchSize(myFetchSize);
|
||||
ScrollableResults scrollableResults = hibernateQuery.scroll(ScrollMode.FORWARD_ONLY);
|
||||
try (ScrollableResultsIterator<TermConceptMapGroupElementTarget> scrollableResultsIterator = new ScrollableResultsIterator<>(scrollableResults)) {
|
||||
|
||||
Set<TermConceptMapGroupElementTarget> matches = new HashSet<>();
|
||||
while (scrollableResultsIterator.hasNext()) {
|
||||
TermConceptMapGroupElementTarget next = scrollableResultsIterator.next();
|
||||
if (matches.add(next)) {
|
||||
|
||||
TranslateConceptResult translationMatch = new TranslateConceptResult();
|
||||
if (next.getEquivalence() != null) {
|
||||
translationMatch.setEquivalence(next.getEquivalence().toCode());
|
||||
}
|
||||
|
||||
translationMatch.setCode(next.getCode());
|
||||
translationMatch.setSystem(next.getSystem());
|
||||
translationMatch.setSystemVersion(next.getSystemVersion());
|
||||
translationMatch.setDisplay(next.getDisplay());
|
||||
translationMatch.setValueSet(next.getValueSet());
|
||||
translationMatch.setSystemVersion(next.getSystemVersion());
|
||||
translationMatch.setConceptMapUrl(next.getConceptMapUrl());
|
||||
|
||||
targets.add(translationMatch);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
ourLastResultsFromTranslationCache = false; // For testing.
|
||||
myMemoryCacheService.put(MemoryCacheService.CacheEnum.CONCEPT_TRANSLATION, translationQuery, targets);
|
||||
retVal.getResults().addAll(targets);
|
||||
} else {
|
||||
ourLastResultsFromTranslationCache = true; // For testing.
|
||||
retVal.getResults().addAll(cachedTargets);
|
||||
}
|
||||
}
|
||||
|
||||
buildTranslationResult(retVal);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
public TranslateConceptResults translateWithReverse(TranslationRequest theTranslationRequest) {
|
||||
TranslateConceptResults retVal = new TranslateConceptResults();
|
||||
|
||||
CriteriaBuilder criteriaBuilder = myEntityManager.getCriteriaBuilder();
|
||||
CriteriaQuery<TermConceptMapGroupElement> query = criteriaBuilder.createQuery(TermConceptMapGroupElement.class);
|
||||
Root<TermConceptMapGroupElement> root = query.from(TermConceptMapGroupElement.class);
|
||||
|
||||
Join<TermConceptMapGroupElement, TermConceptMapGroupElementTarget> targetJoin = root.join("myConceptMapGroupElementTargets");
|
||||
Join<TermConceptMapGroupElement, TermConceptMapGroup> groupJoin = root.join("myConceptMapGroup");
|
||||
Join<TermConceptMapGroup, TermConceptMap> conceptMapJoin = groupJoin.join("myConceptMap");
|
||||
|
||||
List<TranslationQuery> translationQueries = theTranslationRequest.getTranslationQueries();
|
||||
List<TranslateConceptResult> cachedElements;
|
||||
ArrayList<Predicate> predicates;
|
||||
Coding coding;
|
||||
|
||||
//-- get the latest ConceptMapVersion if theTranslationRequest has ConceptMap url but no ConceptMap version
|
||||
String latestConceptMapVersion = null;
|
||||
if (theTranslationRequest.hasUrl() && !theTranslationRequest.hasConceptMapVersion())
|
||||
latestConceptMapVersion = getLatestConceptMapVersion(theTranslationRequest);
|
||||
|
||||
for (TranslationQuery translationQuery : translationQueries) {
|
||||
cachedElements = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.CONCEPT_TRANSLATION_REVERSE, translationQuery);
|
||||
if (cachedElements == null) {
|
||||
final List<TranslateConceptResult> elements = new ArrayList<>();
|
||||
|
||||
predicates = new ArrayList<>();
|
||||
|
||||
coding = translationQuery.getCoding();
|
||||
String targetCode;
|
||||
String targetCodeSystem = null;
|
||||
if (coding.hasCode()) {
|
||||
predicates.add(criteriaBuilder.equal(targetJoin.get("myCode"), coding.getCode()));
|
||||
targetCode = coding.getCode();
|
||||
} else {
|
||||
throw new InvalidRequestException("A code must be provided for translation to occur.");
|
||||
}
|
||||
|
||||
if (coding.hasSystem()) {
|
||||
predicates.add(criteriaBuilder.equal(groupJoin.get("myTarget"), coding.getSystem()));
|
||||
targetCodeSystem = coding.getSystem();
|
||||
}
|
||||
|
||||
if (coding.hasVersion()) {
|
||||
predicates.add(criteriaBuilder.equal(groupJoin.get("myTargetVersion"), coding.getVersion()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasUrl()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myUrl"), translationQuery.getUrl().getValueAsString()));
|
||||
if (translationQuery.hasConceptMapVersion()) {
|
||||
// both url and conceptMapVersion
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myVersion"), translationQuery.getConceptMapVersion().getValueAsString()));
|
||||
} else {
|
||||
if (StringUtils.isNotBlank(latestConceptMapVersion)) {
|
||||
// only url and use latestConceptMapVersion
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myVersion"), latestConceptMapVersion));
|
||||
} else {
|
||||
predicates.add(criteriaBuilder.isNull(conceptMapJoin.get("myVersion")));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (translationQuery.hasTargetSystem()) {
|
||||
predicates.add(criteriaBuilder.equal(groupJoin.get("mySource"), translationQuery.getTargetSystem().getValueAsString()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasSource()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myTarget"), translationQuery.getSource().getValueAsString()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasTarget()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("mySource"), translationQuery.getTarget().getValueAsString()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasResourceId()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myResourcePid"), translationQuery.getResourceId()));
|
||||
}
|
||||
|
||||
Predicate outerPredicate = criteriaBuilder.and(predicates.toArray(new Predicate[0]));
|
||||
query.where(outerPredicate);
|
||||
|
||||
// Use scrollable results.
|
||||
final TypedQuery<TermConceptMapGroupElement> typedQuery = myEntityManager.createQuery(query.select(root));
|
||||
org.hibernate.query.Query<TermConceptMapGroupElement> hibernateQuery = (org.hibernate.query.Query<TermConceptMapGroupElement>) typedQuery;
|
||||
hibernateQuery.setFetchSize(myFetchSize);
|
||||
ScrollableResults scrollableResults = hibernateQuery.scroll(ScrollMode.FORWARD_ONLY);
|
||||
try (ScrollableResultsIterator<TermConceptMapGroupElement> scrollableResultsIterator = new ScrollableResultsIterator<>(scrollableResults)) {
|
||||
|
||||
Set<TermConceptMapGroupElementTarget> matches = new HashSet<>();
|
||||
while (scrollableResultsIterator.hasNext()) {
|
||||
TermConceptMapGroupElement nextElement = scrollableResultsIterator.next();
|
||||
|
||||
// TODO: The invocation of the size() below does not seem to be necessary but for some reason, removing it causes tests in TerminologySvcImplR4Test to fail.
|
||||
nextElement.getConceptMapGroupElementTargets().size();
|
||||
|
||||
myEntityManager.detach(nextElement);
|
||||
|
||||
if (isNotBlank(targetCode)) {
|
||||
for (TermConceptMapGroupElementTarget next : nextElement.getConceptMapGroupElementTargets()) {
|
||||
if (matches.add(next)) {
|
||||
if (isBlank(targetCodeSystem) || StringUtils.equals(targetCodeSystem, next.getSystem())) {
|
||||
if (StringUtils.equals(targetCode, next.getCode())) {
|
||||
TranslateConceptResult translationMatch = new TranslateConceptResult();
|
||||
translationMatch.setCode(nextElement.getCode());
|
||||
translationMatch.setSystem(nextElement.getSystem());
|
||||
translationMatch.setSystemVersion(nextElement.getSystemVersion());
|
||||
translationMatch.setDisplay(nextElement.getDisplay());
|
||||
translationMatch.setValueSet(nextElement.getValueSet());
|
||||
translationMatch.setSystemVersion(nextElement.getSystemVersion());
|
||||
translationMatch.setConceptMapUrl(nextElement.getConceptMapUrl());
|
||||
if (next.getEquivalence() != null) {
|
||||
translationMatch.setEquivalence(next.getEquivalence().toCode());
|
||||
}
|
||||
|
||||
if (alreadyContainsMapping(elements, translationMatch) || alreadyContainsMapping(retVal.getResults(), translationMatch)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
elements.add(translationMatch);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
ourLastResultsFromTranslationWithReverseCache = false; // For testing.
|
||||
myMemoryCacheService.put(MemoryCacheService.CacheEnum.CONCEPT_TRANSLATION_REVERSE, translationQuery, elements);
|
||||
retVal.getResults().addAll(elements);
|
||||
} else {
|
||||
ourLastResultsFromTranslationWithReverseCache = true; // For testing.
|
||||
retVal.getResults().addAll(cachedElements);
|
||||
}
|
||||
}
|
||||
|
||||
buildTranslationResult(retVal);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private boolean alreadyContainsMapping(List<TranslateConceptResult> elements, TranslateConceptResult translationMatch) {
|
||||
for (TranslateConceptResult nextExistingElement : elements) {
|
||||
if (nextExistingElement.getSystem().equals(translationMatch.getSystem())) {
|
||||
if (nextExistingElement.getSystemVersion().equals(translationMatch.getSystemVersion())) {
|
||||
if (nextExistingElement.getCode().equals(translationMatch.getCode())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public void deleteConceptMap(ResourceTable theResourceTable) {
|
||||
// Get existing entity so it can be deleted.
|
||||
Optional<TermConceptMap> optionalExistingTermConceptMapById = myConceptMapDao.findTermConceptMapByResourcePid(theResourceTable.getId());
|
||||
|
||||
if (optionalExistingTermConceptMapById.isPresent()) {
|
||||
TermConceptMap existingTermConceptMap = optionalExistingTermConceptMapById.get();
|
||||
|
||||
ourLog.info("Deleting existing TermConceptMap[{}] and its children...", existingTermConceptMap.getId());
|
||||
for (TermConceptMapGroup group : existingTermConceptMap.getConceptMapGroups()) {
|
||||
|
||||
for (TermConceptMapGroupElement element : group.getConceptMapGroupElements()) {
|
||||
|
||||
for (TermConceptMapGroupElementTarget target : element.getConceptMapGroupElementTargets()) {
|
||||
|
||||
myConceptMapGroupElementTargetDao.deleteTermConceptMapGroupElementTargetById(target.getId());
|
||||
}
|
||||
|
||||
myConceptMapGroupElementDao.deleteTermConceptMapGroupElementById(element.getId());
|
||||
}
|
||||
|
||||
myConceptMapGroupDao.deleteTermConceptMapGroupById(group.getId());
|
||||
}
|
||||
|
||||
myConceptMapDao.deleteTermConceptMapById(existingTermConceptMap.getId());
|
||||
ourLog.info("Done deleting existing TermConceptMap[{}] and its children.", existingTermConceptMap.getId());
|
||||
}
|
||||
}
|
||||
|
||||
// Special case for the translate operation with url and without
|
||||
// conceptMapVersion, find the latest conecptMapVersion
|
||||
private String getLatestConceptMapVersion(TranslationRequest theTranslationRequest) {
|
||||
|
||||
Pageable page = PageRequest.of(0, 1);
|
||||
List<TermConceptMap> theConceptMapList = myConceptMapDao.getTermConceptMapEntitiesByUrlOrderByMostRecentUpdate(page,
|
||||
theTranslationRequest.getUrl().asStringValue());
|
||||
if (!theConceptMapList.isEmpty()) {
|
||||
return theConceptMapList.get(0).getVersion();
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private void buildTranslationResult(TranslateConceptResults theTranslationResult) {
|
||||
|
||||
String msg;
|
||||
if (theTranslationResult.getResults().isEmpty()) {
|
||||
theTranslationResult.setResult(false);
|
||||
msg = myContext.getLocalizer().getMessage(TermConceptMappingSvcImpl.class, "noMatchesFound");
|
||||
theTranslationResult.setMessage(msg);
|
||||
} else {
|
||||
theTranslationResult.setResult(true);
|
||||
msg = myContext.getLocalizer().getMessage(TermConceptMappingSvcImpl.class, "matchesFound");
|
||||
theTranslationResult.setMessage(msg);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* This method is present only for unit tests, do not call from client code
|
||||
*/
|
||||
@VisibleForTesting
|
||||
public static void clearOurLastResultsFromTranslationCache() {
|
||||
ourLastResultsFromTranslationCache = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is present only for unit tests, do not call from client code
|
||||
*/
|
||||
@VisibleForTesting
|
||||
public static void clearOurLastResultsFromTranslationWithReverseCache() {
|
||||
ourLastResultsFromTranslationWithReverseCache = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is present only for unit tests, do not call from client code
|
||||
*/
|
||||
@VisibleForTesting
|
||||
static boolean isOurLastResultsFromTranslationCache() {
|
||||
return ourLastResultsFromTranslationCache;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is present only for unit tests, do not call from client code
|
||||
*/
|
||||
@VisibleForTesting
|
||||
static boolean isOurLastResultsFromTranslationWithReverseCache() {
|
||||
return ourLastResultsFromTranslationWithReverseCache;
|
||||
}
|
||||
|
||||
public static Parameters toParameters(TranslateConceptResults theTranslationResult) {
|
||||
Parameters retVal = new Parameters();
|
||||
|
||||
retVal.addParameter().setName("result").setValue(new BooleanType(theTranslationResult.getResult()));
|
||||
|
||||
if (theTranslationResult.getMessage() != null) {
|
||||
retVal.addParameter().setName("message").setValue(new StringType(theTranslationResult.getMessage()));
|
||||
}
|
||||
|
||||
for (TranslateConceptResult translationMatch : theTranslationResult.getResults()) {
|
||||
Parameters.ParametersParameterComponent matchParam = retVal.addParameter().setName("match");
|
||||
populateTranslateMatchParts(translationMatch, matchParam);
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private static void populateTranslateMatchParts(TranslateConceptResult theTranslationMatch, Parameters.ParametersParameterComponent theParam) {
|
||||
if (theTranslationMatch.getEquivalence() != null) {
|
||||
theParam.addPart().setName("equivalence").setValue(new CodeType(theTranslationMatch.getEquivalence()));
|
||||
}
|
||||
|
||||
if (isNotBlank(theTranslationMatch.getSystem()) || isNotBlank(theTranslationMatch.getCode()) || isNotBlank(theTranslationMatch.getDisplay())) {
|
||||
Coding value = new Coding(theTranslationMatch.getSystem(), theTranslationMatch.getCode(), theTranslationMatch.getDisplay());
|
||||
|
||||
if (isNotBlank(theTranslationMatch.getSystemVersion())) {
|
||||
value.setVersion(theTranslationMatch.getSystemVersion());
|
||||
}
|
||||
|
||||
theParam.addPart().setName("concept").setValue(value);
|
||||
}
|
||||
|
||||
if (isNotBlank(theTranslationMatch.getConceptMapUrl())) {
|
||||
theParam.addPart().setName("source").setValue(new UriType(theTranslationMatch.getConceptMapUrl()));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -22,6 +22,7 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
|
|||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
import static org.hl7.fhir.convertors.conv30_40.ValueSet30_40.convertValueSet;
|
||||
|
@ -61,7 +62,7 @@ public class TermReadSvcDstu3 extends BaseTermReadSvcImpl implements IValidation
|
|||
|
||||
|
||||
@Override
|
||||
public ValueSetExpansionOutcome expandValueSet(ValidationSupportContext theValidationSupportContext, ValueSetExpansionOptions theExpansionOptions, IBaseResource theValueSetToExpand) {
|
||||
public ValueSetExpansionOutcome expandValueSet(ValidationSupportContext theValidationSupportContext, ValueSetExpansionOptions theExpansionOptions, @Nonnull IBaseResource theValueSetToExpand) {
|
||||
try {
|
||||
org.hl7.fhir.r4.model.ValueSet valueSetToExpandR4;
|
||||
valueSetToExpandR4 = toCanonicalValueSet(theValueSetToExpand);
|
||||
|
|
|
@ -18,6 +18,7 @@ import org.hl7.fhir.r4.model.ValueSet;
|
|||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.transaction.Transactional;
|
||||
|
||||
/*
|
||||
|
@ -59,7 +60,7 @@ public class TermReadSvcR4 extends BaseTermReadSvcImpl implements ITermReadSvcR4
|
|||
|
||||
@Transactional(dontRollbackOn = {ExpansionTooCostlyException.class})
|
||||
@Override
|
||||
public IValidationSupport.ValueSetExpansionOutcome expandValueSet(ValidationSupportContext theValidationSupportContext, ValueSetExpansionOptions theExpansionOptions, IBaseResource theValueSetToExpand) {
|
||||
public IValidationSupport.ValueSetExpansionOutcome expandValueSet(ValidationSupportContext theValidationSupportContext, ValueSetExpansionOptions theExpansionOptions, @Nonnull IBaseResource theValueSetToExpand) {
|
||||
ValueSet expanded = super.expandValueSet(theExpansionOptions, (ValueSet) theValueSetToExpand);
|
||||
return new IValidationSupport.ValueSetExpansionOutcome(expanded);
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.hl7.fhir.utilities.validation.ValidationOptions;
|
|||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import javax.transaction.Transactional;
|
||||
|
||||
|
@ -56,7 +57,7 @@ public class TermReadSvcR5 extends BaseTermReadSvcImpl implements IValidationSup
|
|||
|
||||
@Override
|
||||
@Transactional(dontRollbackOn = {ExpansionTooCostlyException.class})
|
||||
public ValueSetExpansionOutcome expandValueSet(ValidationSupportContext theValidationSupportContext, ValueSetExpansionOptions theExpansionOptions, IBaseResource theValueSetToExpand) {
|
||||
public ValueSetExpansionOutcome expandValueSet(ValidationSupportContext theValidationSupportContext, ValueSetExpansionOptions theExpansionOptions, @Nonnull IBaseResource theValueSetToExpand) {
|
||||
ValueSet valueSetToExpand = (ValueSet) theValueSetToExpand;
|
||||
org.hl7.fhir.r4.model.ValueSet expandedR4 = super.expandValueSet(theExpansionOptions, org.hl7.fhir.convertors.conv40_50.ValueSet40_50.convertValueSet(valueSetToExpand));
|
||||
return new ValueSetExpansionOutcome(org.hl7.fhir.convertors.conv40_50.ValueSet40_50.convertValueSet(expandedR4));
|
||||
|
|
|
@ -0,0 +1,42 @@
|
|||
package ca.uhn.fhir.jpa.term.api;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationRequest;
|
||||
import ca.uhn.fhir.context.support.TranslateConceptResults;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
|
||||
public interface ITermConceptMappingSvc extends IValidationSupport {
|
||||
|
||||
|
||||
TranslateConceptResults translate(TranslationRequest theTranslationRequest);
|
||||
|
||||
TranslateConceptResults translateWithReverse(TranslationRequest theTranslationRequest);
|
||||
|
||||
void deleteConceptMapAndChildren(ResourceTable theResourceTable);
|
||||
|
||||
void storeTermConceptMapAndChildren(ResourceTable theResourceTable, ConceptMap theConceptMap);
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -91,18 +91,10 @@ public interface ITermReadSvc extends IValidationSupport {
|
|||
|
||||
CodeSystem fetchCanonicalCodeSystemFromCompleteContext(String theSystem);
|
||||
|
||||
void deleteConceptMapAndChildren(ResourceTable theResourceTable);
|
||||
|
||||
void deleteValueSetAndChildren(ResourceTable theResourceTable);
|
||||
|
||||
void storeTermConceptMapAndChildren(ResourceTable theResourceTable, ConceptMap theConceptMap);
|
||||
|
||||
void storeTermValueSet(ResourceTable theResourceTable, ValueSet theValueSet);
|
||||
|
||||
List<TermConceptMapGroupElementTarget> translate(TranslationRequest theTranslationRequest);
|
||||
|
||||
List<TermConceptMapGroupElement> translateWithReverse(TranslationRequest theTranslationRequest);
|
||||
|
||||
IFhirResourceDaoCodeSystem.SubsumesResult subsumes(IPrimitiveType<String> theCodeA, IPrimitiveType<String> theCodeB, IPrimitiveType<String> theSystem, IBaseCoding theCodingA, IBaseCoding theCodingB);
|
||||
|
||||
void preExpandDeferredValueSetsToTerminologyTables();
|
||||
|
|
|
@ -20,8 +20,10 @@ package ca.uhn.fhir.jpa.util;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import com.github.benmanes.caffeine.cache.Cache;
|
||||
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import java.util.EnumMap;
|
||||
|
@ -39,6 +41,9 @@ import java.util.function.Function;
|
|||
*/
|
||||
public class MemoryCacheService {
|
||||
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
|
||||
private EnumMap<CacheEnum, Cache<?, ?>> myCaches;
|
||||
|
||||
@PostConstruct
|
||||
|
@ -47,7 +52,23 @@ public class MemoryCacheService {
|
|||
myCaches = new EnumMap<>(CacheEnum.class);
|
||||
|
||||
for (CacheEnum next : CacheEnum.values()) {
|
||||
Cache<Object, Object> nextCache = Caffeine.newBuilder().expireAfterWrite(1, TimeUnit.MINUTES).maximumSize(10000).build();
|
||||
|
||||
long timeoutSeconds;
|
||||
switch (next) {
|
||||
case CONCEPT_TRANSLATION:
|
||||
case CONCEPT_TRANSLATION_REVERSE:
|
||||
timeoutSeconds = myDaoConfig.getTranslationCachesExpireAfterWriteInMinutes() * 1000;
|
||||
break;
|
||||
case TAG_DEFINITION:
|
||||
case PERSISTENT_ID:
|
||||
case RESOURCE_LOOKUP:
|
||||
case FORCED_ID:
|
||||
default:
|
||||
timeoutSeconds = 60;
|
||||
break;
|
||||
}
|
||||
|
||||
Cache<Object, Object> nextCache = Caffeine.newBuilder().expireAfterWrite(timeoutSeconds, TimeUnit.MINUTES).maximumSize(10000).build();
|
||||
myCaches.put(next, nextCache);
|
||||
}
|
||||
|
||||
|
@ -85,6 +106,8 @@ public class MemoryCacheService {
|
|||
PERSISTENT_ID,
|
||||
RESOURCE_LOOKUP,
|
||||
FORCED_ID,
|
||||
CONCEPT_TRANSLATION,
|
||||
CONCEPT_TRANSLATION_REVERSE
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.validation;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.jpa.packages.NpmJpaValidationSupport;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import org.hl7.fhir.common.hapi.validation.support.CommonCodeSystemsTerminologyService;
|
||||
import org.hl7.fhir.common.hapi.validation.support.SnapshotGeneratingValidationSupport;
|
||||
|
@ -49,6 +50,8 @@ public class JpaValidationSupportChain extends ValidationSupportChain {
|
|||
private ITermReadSvc myTerminologyService;
|
||||
@Autowired
|
||||
private NpmJpaValidationSupport myNpmJpaValidationSupport;
|
||||
@Autowired
|
||||
private ITermConceptMappingSvc myConceptMappingSvc;
|
||||
|
||||
public JpaValidationSupportChain(FhirContext theFhirContext) {
|
||||
myFhirContext = theFhirContext;
|
||||
|
@ -74,6 +77,7 @@ public class JpaValidationSupportChain extends ValidationSupportChain {
|
|||
addValidationSupport(new InMemoryTerminologyServerValidationSupport(myFhirContext));
|
||||
addValidationSupport(myNpmJpaValidationSupport);
|
||||
addValidationSupport(new CommonCodeSystemsTerminologyService(myFhirContext));
|
||||
addValidationSupport(myConceptMappingSvc);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -19,27 +19,28 @@ import ca.uhn.fhir.jpa.entity.BulkExportCollectionEntity;
|
|||
import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity;
|
||||
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
|
||||
import ca.uhn.fhir.jpa.entity.MdmLink;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.mdm.api.MdmLinkSourceEnum;
|
||||
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.util.HapiExtensions;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.collect.Sets;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Binary;
|
||||
import org.hl7.fhir.r4.model.CareTeam;
|
||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||
import org.hl7.fhir.r4.model.Enumerations;
|
||||
import org.hl7.fhir.r4.model.Extension;
|
||||
import org.hl7.fhir.r4.model.Group;
|
||||
import org.hl7.fhir.r4.model.Immunization;
|
||||
import org.hl7.fhir.r4.model.InstantType;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Parameters;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
@ -55,7 +56,6 @@ import org.springframework.batch.core.explore.JobExplorer;
|
|||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Date;
|
||||
|
@ -639,6 +639,61 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
|
|||
assertThat(nextContents, is(containsString("IMM6")));
|
||||
assertThat(nextContents, is(containsString("IMM8")));
|
||||
}
|
||||
@Test
|
||||
public void testGroupBatchJobMdmExpansionIdentifiesGoldenResources() throws Exception {
|
||||
createResources();
|
||||
|
||||
// Create a bulk job
|
||||
BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions();
|
||||
bulkDataExportOptions.setOutputFormat(null);
|
||||
bulkDataExportOptions.setResourceTypes(Sets.newHashSet("Immunization", "Patient"));
|
||||
bulkDataExportOptions.setSince(null);
|
||||
bulkDataExportOptions.setFilters(null);
|
||||
bulkDataExportOptions.setGroupId(myPatientGroupId);
|
||||
bulkDataExportOptions.setExpandMdm(true);
|
||||
bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
|
||||
IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions);
|
||||
|
||||
myBulkDataExportSvc.buildExportFiles();
|
||||
awaitAllBulkJobCompletions();
|
||||
|
||||
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
|
||||
|
||||
assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE));
|
||||
assertThat(jobInfo.getFiles().size(), equalTo(2));
|
||||
assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization")));
|
||||
|
||||
//Ensure that all immunizations refer to the golden resource via extension
|
||||
assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization")));
|
||||
List<Immunization> immunizations = readBulkExportContentsIntoResources(getBinaryContents(jobInfo, 0), Immunization.class);
|
||||
immunizations
|
||||
.stream().filter(immu -> !immu.getIdElement().getIdPart().equals("PAT999"))//Skip the golden resource
|
||||
.forEach(immunization -> {
|
||||
Extension extensionByUrl = immunization.getExtensionByUrl(HapiExtensions.ASSOCIATED_GOLDEN_RESOURCE_EXTENSION_URL);
|
||||
String reference = ((Reference) extensionByUrl.getValue()).getReference();
|
||||
assertThat(reference, is(equalTo("Patient/PAT999")));
|
||||
});
|
||||
|
||||
//Ensure all patients are linked to their golden resource.
|
||||
assertThat(jobInfo.getFiles().get(1).getResourceType(), is(equalTo("Patient")));
|
||||
List<Patient> patients = readBulkExportContentsIntoResources(getBinaryContents(jobInfo, 1), Patient.class);
|
||||
patients.stream()
|
||||
.filter(patient -> patient.getIdElement().getIdPart().equals("PAT999"))
|
||||
.forEach(patient -> {
|
||||
Extension extensionByUrl = patient.getExtensionByUrl(HapiExtensions.ASSOCIATED_GOLDEN_RESOURCE_EXTENSION_URL);
|
||||
String reference = ((Reference) extensionByUrl.getValue()).getReference();
|
||||
assertThat(reference, is(equalTo("Patient/PAT999")));
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
private <T extends IBaseResource> List<T> readBulkExportContentsIntoResources(String theContents, Class<T> theClass) {
|
||||
IParser iParser = myFhirCtx.newJsonParser();
|
||||
return Arrays.stream(theContents.split("\n"))
|
||||
.map(iParser::parseResource)
|
||||
.map(theClass::cast)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPatientLevelExportWorks() throws JobParametersInvalidException {
|
||||
|
@ -1013,7 +1068,6 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
|
|||
|
||||
//Non-cached should all have unique IDs
|
||||
List<String> jobIds = Stream.of(jobInfo5, jobInfo6, jobInfo7, jobInfo8, jobInfo9).map(IBulkDataExportSvc.JobInfo::getJobId).collect(Collectors.toList());
|
||||
ourLog.info("ZOOP {}", String.join(", ", jobIds));
|
||||
Set<String> uniqueJobIds = new HashSet<>(jobIds);
|
||||
assertEquals(uniqueJobIds.size(), jobIds.size());
|
||||
|
||||
|
|
|
@ -42,6 +42,7 @@ import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
|||
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
|
||||
import ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.TermConceptMappingSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
|
||||
|
@ -363,8 +364,8 @@ public abstract class BaseJpaDstu3Test extends BaseJpaTest {
|
|||
public void afterClearTerminologyCaches() {
|
||||
BaseTermReadSvcImpl baseHapiTerminologySvc = AopTestUtils.getTargetObject(myTermSvc);
|
||||
baseHapiTerminologySvc.clearCaches();
|
||||
BaseTermReadSvcImpl.clearOurLastResultsFromTranslationCache();
|
||||
BaseTermReadSvcImpl.clearOurLastResultsFromTranslationWithReverseCache();
|
||||
TermConceptMappingSvcImpl.clearOurLastResultsFromTranslationCache();
|
||||
TermConceptMappingSvcImpl.clearOurLastResultsFromTranslationWithReverseCache();
|
||||
TermDeferredStorageSvcImpl deferredSvc = AopTestUtils.getTargetObject(myTerminologyDeferredStorageSvc);
|
||||
deferredSvc.clearDeferred();
|
||||
}
|
||||
|
|
|
@ -1,17 +1,12 @@
|
|||
package ca.uhn.fhir.jpa.dao.dstu3;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import ca.uhn.fhir.context.support.TranslateConceptResult;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationRequest;
|
||||
import ca.uhn.fhir.context.support.TranslateConceptResults;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptMap;
|
||||
import org.hl7.fhir.dstu3.model.ConceptMap;
|
||||
import org.hl7.fhir.dstu3.model.Enumerations.PublicationStatus;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.Enumerations;
|
||||
import org.hl7.fhir.r4.model.UriType;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
|
@ -23,10 +18,13 @@ import org.springframework.transaction.annotation.Transactional;
|
|||
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationMatch;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationRequest;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationResult;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptMap;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
public class FhirResourceDaoDstu3ConceptMapTest extends BaseJpaDstu3Test {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu3ConceptMapTest.class);
|
||||
|
@ -52,9 +50,9 @@ public class FhirResourceDaoDstu3ConceptMapTest extends BaseJpaDstu3Test {
|
|||
.setCode("12345");
|
||||
translationRequest.setTargetSystem(new UriType(CS_URL_3));
|
||||
|
||||
TranslationResult translationResult = myConceptMapDao.translate(translationRequest, null);
|
||||
TranslateConceptResults translationResult = myConceptMapDao.translate(translationRequest, null);
|
||||
|
||||
assertFalse(translationResult.getResult().booleanValue());
|
||||
assertFalse(translationResult.getResult());
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -76,32 +74,28 @@ public class FhirResourceDaoDstu3ConceptMapTest extends BaseJpaDstu3Test {
|
|||
.setCode("12345");
|
||||
translationRequest.setTargetSystem(new UriType(CS_URL_3));
|
||||
|
||||
TranslationResult translationResult = myConceptMapDao.translate(translationRequest, null);
|
||||
TranslateConceptResults translationResult = myConceptMapDao.translate(translationRequest, null);
|
||||
|
||||
assertTrue(translationResult.getResult().booleanValue());
|
||||
assertEquals("Matches found!", translationResult.getMessage().getValueAsString());
|
||||
assertTrue(translationResult.getResult());
|
||||
assertEquals("Matches found", translationResult.getMessage());
|
||||
|
||||
assertEquals(2, translationResult.getMatches().size());
|
||||
assertEquals(2, translationResult.getResults().size());
|
||||
|
||||
TranslationMatch translationMatch = translationResult.getMatches().get(0);
|
||||
assertEquals(Enumerations.ConceptMapEquivalence.EQUAL.toCode(), translationMatch.getEquivalence().getCode());
|
||||
Coding concept = translationMatch.getConcept();
|
||||
assertEquals("56789", concept.getCode());
|
||||
assertEquals("Target Code 56789", concept.getDisplay());
|
||||
assertEquals(CS_URL_3, concept.getSystem());
|
||||
assertEquals("Version 4", concept.getVersion());
|
||||
assertFalse(concept.getUserSelected());
|
||||
assertEquals(CM_URL, translationMatch.getSource().getValueAsString());
|
||||
TranslateConceptResult translationMatch = translationResult.getResults().get(0);
|
||||
assertEquals(Enumerations.ConceptMapEquivalence.EQUAL.toCode(), translationMatch.getEquivalence());
|
||||
assertEquals("56789", translationMatch.getCode());
|
||||
assertEquals("Target Code 56789", translationMatch.getDisplay());
|
||||
assertEquals(CS_URL_3, translationMatch.getSystem());
|
||||
assertEquals("Version 4", translationMatch.getSystemVersion());
|
||||
assertEquals(CM_URL, translationMatch.getConceptMapUrl());
|
||||
|
||||
translationMatch = translationResult.getMatches().get(1);
|
||||
assertEquals(Enumerations.ConceptMapEquivalence.WIDER.toCode(), translationMatch.getEquivalence().getCode());
|
||||
concept = translationMatch.getConcept();
|
||||
assertEquals("67890", concept.getCode());
|
||||
assertEquals("Target Code 67890", concept.getDisplay());
|
||||
assertEquals(CS_URL_3, concept.getSystem());
|
||||
assertEquals("Version 4", concept.getVersion());
|
||||
assertFalse(concept.getUserSelected());
|
||||
assertEquals(CM_URL, translationMatch.getSource().getValueAsString());
|
||||
translationMatch = translationResult.getResults().get(1);
|
||||
assertEquals(Enumerations.ConceptMapEquivalence.WIDER.toCode(), translationMatch.getEquivalence());
|
||||
assertEquals("67890", translationMatch.getCode());
|
||||
assertEquals("Target Code 67890", translationMatch.getDisplay());
|
||||
assertEquals(CS_URL_3, translationMatch.getSystem());
|
||||
assertEquals("Version 4", translationMatch.getSystemVersion());
|
||||
assertEquals(CM_URL, translationMatch.getConceptMapUrl());
|
||||
// </editor-fold>
|
||||
}
|
||||
});
|
||||
|
@ -109,34 +103,34 @@ public class FhirResourceDaoDstu3ConceptMapTest extends BaseJpaDstu3Test {
|
|||
|
||||
@Test
|
||||
public void testConceptMapFindTermConceptMapByUrl() {
|
||||
|
||||
|
||||
Pageable page = PageRequest.of(0, 1);
|
||||
List<TermConceptMap> theExpConceptMapList = myTermConceptMapDao.getTermConceptMapEntitiesByUrlOrderByMostRecentUpdate(page, CM_URL);
|
||||
assertEquals(1, theExpConceptMapList.size());
|
||||
assertEquals(CM_URL, theExpConceptMapList.get(0).getUrl());
|
||||
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testConceptMapTwoConceptMapWithSameUrlDifferentVersion() {
|
||||
|
||||
|
||||
String theUrl = "http://loinc.org/property/analyte-suffix";
|
||||
ConceptMap theConceptMap1 = new ConceptMap();
|
||||
ConceptMap theConceptMap2 = new ConceptMap();
|
||||
|
||||
|
||||
theConceptMap1.setUrl(theUrl).setStatus(PublicationStatus.ACTIVE).setName("name1").setVersion("v1");
|
||||
theConceptMap2.setUrl(theUrl).setStatus(PublicationStatus.ACTIVE).setName("name2").setVersion("v2");
|
||||
|
||||
|
||||
myConceptMapDao.create(theConceptMap1);
|
||||
myConceptMapDao.create(theConceptMap2);
|
||||
|
||||
|
||||
Optional<TermConceptMap> theExpConceptMapV1 = myTermConceptMapDao.findTermConceptMapByUrlAndVersion(theUrl, "v1");
|
||||
Optional<TermConceptMap> theExpConceptMapV2 = myTermConceptMapDao.findTermConceptMapByUrlAndVersion(theUrl, "v2");
|
||||
|
||||
|
||||
assertTrue(theExpConceptMapV1.isPresent());
|
||||
assertEquals(theUrl, theExpConceptMapV1.get().getUrl());
|
||||
assertEquals("v1", theExpConceptMapV1.get().getVersion());
|
||||
|
||||
|
||||
assertTrue(theExpConceptMapV2.isPresent());
|
||||
assertEquals(theUrl, theExpConceptMapV2.get().getUrl());
|
||||
assertEquals("v2", theExpConceptMapV2.get().getVersion());
|
||||
|
@ -144,7 +138,7 @@ public class FhirResourceDaoDstu3ConceptMapTest extends BaseJpaDstu3Test {
|
|||
// should return the latest one which is v2
|
||||
Pageable page = PageRequest.of(0, 1);
|
||||
List<TermConceptMap> theExpSecondOne = myTermConceptMapDao.getTermConceptMapEntitiesByUrlOrderByMostRecentUpdate(page, theUrl);
|
||||
|
||||
|
||||
assertEquals(1, theExpSecondOne.size());
|
||||
assertEquals(theUrl, theExpSecondOne.get(0).getUrl());
|
||||
assertEquals("v2", theExpSecondOne.get(0).getVersion());
|
||||
|
@ -152,29 +146,29 @@ public class FhirResourceDaoDstu3ConceptMapTest extends BaseJpaDstu3Test {
|
|||
|
||||
@Test
|
||||
public void testConceptMapTwoConceptMapWithSameUrlOneWithoutVersion() {
|
||||
|
||||
|
||||
String theUrl = "http://loinc.org/property/analyte-suffix";
|
||||
ConceptMap theConceptMap1 = new ConceptMap();
|
||||
ConceptMap theConceptMap2 = new ConceptMap();
|
||||
|
||||
|
||||
theConceptMap1.setUrl(theUrl).setStatus(PublicationStatus.ACTIVE).setName("name1").setVersion("v1");
|
||||
theConceptMap2.setUrl(theUrl).setStatus(PublicationStatus.ACTIVE).setName("name2");
|
||||
|
||||
|
||||
myConceptMapDao.create(theConceptMap1);
|
||||
myConceptMapDao.create(theConceptMap2);
|
||||
|
||||
|
||||
Optional<TermConceptMap> theExpConceptMapV1 = myTermConceptMapDao.findTermConceptMapByUrlAndVersion(theUrl, "v1");
|
||||
|
||||
|
||||
assertTrue(theExpConceptMapV1.isPresent());
|
||||
assertEquals(theUrl, theExpConceptMapV1.get().getUrl());
|
||||
assertEquals("v1", theExpConceptMapV1.get().getVersion());
|
||||
|
||||
|
||||
// should return the latest one which in this case is not versioned
|
||||
Pageable page = PageRequest.of(0, 1);
|
||||
List<TermConceptMap> theExpSecondOne = myTermConceptMapDao.getTermConceptMapEntitiesByUrlOrderByMostRecentUpdate(page, theUrl);
|
||||
|
||||
|
||||
assertEquals(1, theExpSecondOne.size());
|
||||
assertEquals(theUrl, theExpSecondOne.get(0).getUrl());
|
||||
assertNull(theExpSecondOne.get(0).getVersion());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -74,9 +74,11 @@ import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
|||
import ca.uhn.fhir.jpa.search.warm.ICacheWarmingSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryImpl;
|
||||
import ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.TermConceptMappingSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.ValueSetExpansionR4Test;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
|
@ -198,6 +200,8 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
|
|||
private static IValidationSupport ourJpaValidationSupportChainR4;
|
||||
private static IFhirResourceDaoValueSet<ValueSet, Coding, CodeableConcept> ourValueSetDao;
|
||||
|
||||
@Autowired
|
||||
protected ITermConceptMappingSvc myConceptMappingSvc;
|
||||
@Autowired
|
||||
protected IPartitionLookupSvc myPartitionConfigSvc;
|
||||
@Autowired
|
||||
|
@ -514,8 +518,8 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
|
|||
public void afterClearTerminologyCaches() {
|
||||
BaseTermReadSvcImpl baseHapiTerminologySvc = AopTestUtils.getTargetObject(myTermSvc);
|
||||
baseHapiTerminologySvc.clearCaches();
|
||||
BaseTermReadSvcImpl.clearOurLastResultsFromTranslationCache();
|
||||
BaseTermReadSvcImpl.clearOurLastResultsFromTranslationWithReverseCache();
|
||||
TermConceptMappingSvcImpl.clearOurLastResultsFromTranslationCache();
|
||||
TermConceptMappingSvcImpl.clearOurLastResultsFromTranslationWithReverseCache();
|
||||
TermDeferredStorageSvcImpl termDeferredStorageSvc = AopTestUtils.getTargetObject(myTerminologyDeferredStorageSvc);
|
||||
termDeferredStorageSvc.clearDeferred();
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -276,6 +276,8 @@ public class FhirResourceDaoR4SearchCustomSearchParamTest extends BaseJpaR4Test
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
@Disabled
|
||||
public void testCreateInvalidParamInvalidResourceName() {
|
||||
|
|
|
@ -59,6 +59,7 @@ import ca.uhn.fhir.jpa.search.warm.ICacheWarmingSvc;
|
|||
import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryImpl;
|
||||
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
|
||||
import ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.TermConceptMappingSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.ValueSetExpansionR4Test;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||
|
@ -437,8 +438,8 @@ public abstract class BaseJpaR5Test extends BaseJpaTest {
|
|||
public void afterClearTerminologyCaches() {
|
||||
BaseTermReadSvcImpl baseHapiTerminologySvc = AopTestUtils.getTargetObject(myTermSvc);
|
||||
baseHapiTerminologySvc.clearCaches();
|
||||
BaseTermReadSvcImpl.clearOurLastResultsFromTranslationCache();
|
||||
BaseTermReadSvcImpl.clearOurLastResultsFromTranslationWithReverseCache();
|
||||
TermConceptMappingSvcImpl.clearOurLastResultsFromTranslationCache();
|
||||
TermConceptMappingSvcImpl.clearOurLastResultsFromTranslationWithReverseCache();
|
||||
TermDeferredStorageSvcImpl deferredStorageSvc = AopTestUtils.getTargetObject(myTermDeferredStorageSvc);
|
||||
deferredStorageSvc.clearDeferred();
|
||||
}
|
||||
|
|
|
@ -2,14 +2,17 @@ package ca.uhn.fhir.jpa.interceptor;
|
|||
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.provider.r4.BaseResourceProviderR4Test;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpDelete;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.CarePlan;
|
||||
import org.hl7.fhir.r4.model.Condition;
|
||||
|
@ -21,14 +24,21 @@ import org.hl7.fhir.r4.model.Patient;
|
|||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class CascadingDeleteInterceptorTest extends BaseResourceProviderR4Test {
|
||||
|
||||
|
@ -88,6 +98,42 @@ public class CascadingDeleteInterceptorTest extends BaseResourceProviderR4Test {
|
|||
myConditionId = myClient.create().resource(condition).execute().getId().toUnqualifiedVersionless();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeleteWithInterceptorVerifyTheRequestGetsPassedToDao() throws IOException {
|
||||
// The whole and ONLY point of this Cascade Delete Unit Test is to make sure that a non-NULL RequestDetails param
|
||||
// is passed to the dao.read() method from inside the CascadingDeleteInterceptor.handleDeleteConflicts() method
|
||||
// For details see: https://gitlab.com/simpatico.ai/cdr/-/issues/1643
|
||||
DaoRegistry mockDaoRegistry = mock(DaoRegistry.class);
|
||||
IFhirResourceDao mockResourceDao = mock (IFhirResourceDao.class);
|
||||
IBaseResource mockResource = mock(IBaseResource.class);
|
||||
CascadingDeleteInterceptor aDeleteInterceptor = new CascadingDeleteInterceptor(myFhirCtx, mockDaoRegistry, myInterceptorBroadcaster);
|
||||
ourRestServer.getInterceptorService().unregisterInterceptor(myDeleteInterceptor);
|
||||
ourRestServer.getInterceptorService().registerInterceptor(aDeleteInterceptor);
|
||||
when(mockDaoRegistry.getResourceDao(any(String.class))).thenReturn(mockResourceDao);
|
||||
when(mockResourceDao.read(any(IIdType.class), any(RequestDetails.class))).thenReturn(mockResource);
|
||||
ArgumentCaptor<RequestDetails> theRequestDetailsCaptor = ArgumentCaptor.forClass(RequestDetails.class);
|
||||
|
||||
Patient p = new Patient();
|
||||
p.setActive(true);
|
||||
myPatientId = myClient.create().resource(p).execute().getId().toUnqualifiedVersionless();
|
||||
Encounter e = new Encounter();
|
||||
e.setSubject(new Reference(myPatientId));
|
||||
myEncounterId = myClient.create().resource(e).execute().getId().toUnqualifiedVersionless();
|
||||
|
||||
HttpDelete delete = new HttpDelete(ourServerBase + "/" + myPatientId.getValue() + "?" + Constants.PARAMETER_CASCADE_DELETE + "=" + Constants.CASCADE_DELETE + "&_pretty=true");
|
||||
delete.addHeader(Constants.HEADER_ACCEPT, Constants.CT_FHIR_JSON_NEW);
|
||||
try (CloseableHttpResponse response = ourHttpClient.execute(delete)) {
|
||||
String deleteResponse = IOUtils.toString(response.getEntity().getContent(), Charsets.UTF_8);
|
||||
ourLog.info("Response: {}", deleteResponse);
|
||||
}
|
||||
|
||||
verify(mockResourceDao).read(any(IIdType.class), theRequestDetailsCaptor.capture());
|
||||
List<RequestDetails> capturedRequestDetailsParam = theRequestDetailsCaptor.getAllValues();
|
||||
for (RequestDetails requestDetails : capturedRequestDetailsParam) {
|
||||
assertNotNull(requestDetails);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeleteWithNoInterceptorAndConstraints() {
|
||||
createResources();
|
||||
|
@ -252,7 +298,4 @@ public class CascadingDeleteInterceptorTest extends BaseResourceProviderR4Test {
|
|||
// good
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,133 @@
|
|||
package ca.uhn.fhir.jpa.interceptor;
|
||||
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorService;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.provider.r4.BaseResourceProviderR4Test;
|
||||
import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationInterceptor;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
public class ResponseTerminologyTranslationInterceptorTest extends BaseResourceProviderR4Test {
|
||||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResponseTerminologyTranslationInterceptorTest.class);
|
||||
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
@Autowired
|
||||
private IInterceptorService myInterceptorBroadcaster;
|
||||
@Autowired
|
||||
private ResponseTerminologyTranslationInterceptor myResponseTerminologyTranslationInterceptor;
|
||||
|
||||
@BeforeEach
|
||||
public void beforeEach() {
|
||||
myConceptMapDao.create(createConceptMap());
|
||||
ourRestServer.registerInterceptor(myResponseTerminologyTranslationInterceptor);
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
public void afterEach() {
|
||||
myResponseTerminologyTranslationInterceptor.clearMappingSpecifications();
|
||||
ourRestServer.unregisterInterceptor(myResponseTerminologyTranslationInterceptor);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMapConcept_MappingFound() {
|
||||
myResponseTerminologyTranslationInterceptor.addMappingSpecification(CS_URL, CS_URL_2);
|
||||
|
||||
Observation observation = new Observation();
|
||||
observation.setStatus(Observation.ObservationStatus.AMENDED);
|
||||
observation .getCode()
|
||||
.addCoding(new Coding(CS_URL, "12345", null));
|
||||
IIdType id = myObservationDao.create(observation).getId();
|
||||
|
||||
// Read it back
|
||||
observation = myClient.read().resource(Observation.class).withId(id).execute();
|
||||
|
||||
assertThat(toCodeStrings(observation).toString(), toCodeStrings(observation), Matchers.contains(
|
||||
"[system=http://example.com/my_code_system, code=12345, display=null]",
|
||||
"[system=http://example.com/my_code_system2, code=34567, display=Target Code 34567]"
|
||||
));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMapConcept_MultipleMappingsFound() {
|
||||
myResponseTerminologyTranslationInterceptor.addMappingSpecification(CS_URL, CS_URL_3);
|
||||
|
||||
Observation observation = new Observation();
|
||||
observation.setStatus(Observation.ObservationStatus.AMENDED);
|
||||
observation .getCode()
|
||||
.addCoding(new Coding(CS_URL, "12345", null));
|
||||
IIdType id = myObservationDao.create(observation).getId();
|
||||
|
||||
// Read it back
|
||||
observation = myClient.read().resource(Observation.class).withId(id).execute();
|
||||
|
||||
assertThat(toCodeStrings(observation).toString(), toCodeStrings(observation), Matchers.contains(
|
||||
"[system=http://example.com/my_code_system, code=12345, display=null]",
|
||||
"[system=http://example.com/my_code_system3, code=56789, display=Target Code 56789]",
|
||||
"[system=http://example.com/my_code_system3, code=67890, display=Target Code 67890]"
|
||||
));
|
||||
}
|
||||
|
||||
/**
|
||||
* Don't map if we already have a code in the desired target
|
||||
*/
|
||||
@Test
|
||||
public void testMapConcept_MappingNotNeeded() {
|
||||
myResponseTerminologyTranslationInterceptor.addMappingSpecification(CS_URL, CS_URL_2);
|
||||
|
||||
Observation observation = new Observation();
|
||||
observation.setStatus(Observation.ObservationStatus.AMENDED);
|
||||
observation .getCode()
|
||||
.addCoding(new Coding(CS_URL, "12345", null))
|
||||
.addCoding(new Coding(CS_URL_2, "9999", "Display 9999"));
|
||||
IIdType id = myObservationDao.create(observation).getId();
|
||||
|
||||
// Read it back
|
||||
observation = myClient.read().resource(Observation.class).withId(id).execute();
|
||||
|
||||
assertThat(toCodeStrings(observation).toString(), toCodeStrings(observation), Matchers.contains(
|
||||
"[system=http://example.com/my_code_system, code=12345, display=null]",
|
||||
"[system=http://example.com/my_code_system2, code=9999, display=Display 9999]"
|
||||
));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMapConcept_NoMappingExists() {
|
||||
myResponseTerminologyTranslationInterceptor.addMappingSpecification(CS_URL, CS_URL_2);
|
||||
|
||||
Observation observation = new Observation();
|
||||
observation.setStatus(Observation.ObservationStatus.AMENDED);
|
||||
observation .getCode()
|
||||
.addCoding(new Coding(CS_URL, "FOO", null));
|
||||
IIdType id = myObservationDao.create(observation).getId();
|
||||
|
||||
// Read it back
|
||||
observation = myClient.read().resource(Observation.class).withId(id).execute();
|
||||
|
||||
assertThat(toCodeStrings(observation).toString(), toCodeStrings(observation), Matchers.contains(
|
||||
"[system=http://example.com/my_code_system, code=FOO, display=null]"
|
||||
));
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private List<String> toCodeStrings(Observation observation) {
|
||||
return observation.getCode().getCoding().stream().map(t -> "[system=" + t.getSystem() + ", code=" + t.getCode() + ", display=" + t.getDisplay() + "]").collect(Collectors.toList());
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -95,7 +95,7 @@ public class ResourceProviderDstu3ConceptMapTest extends BaseResourceProviderDst
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(2, getNumberOfParametersByName(respParams, "match"));
|
||||
param = getParametersByName(respParams, "match").get(0);
|
||||
|
@ -156,7 +156,7 @@ public class ResourceProviderDstu3ConceptMapTest extends BaseResourceProviderDst
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(1, getNumberOfParametersByName(respParams, "match"));
|
||||
param = getParametersByName(respParams, "match").get(0);
|
||||
|
@ -205,7 +205,7 @@ public class ResourceProviderDstu3ConceptMapTest extends BaseResourceProviderDst
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(1, getNumberOfParametersByName(respParams, "match"));
|
||||
param = getParametersByName(respParams, "match").get(0);
|
||||
|
@ -255,7 +255,7 @@ public class ResourceProviderDstu3ConceptMapTest extends BaseResourceProviderDst
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(1, getNumberOfParametersByName(respParams, "match"));
|
||||
param = getParametersByName(respParams, "match").get(0);
|
||||
|
@ -304,7 +304,7 @@ public class ResourceProviderDstu3ConceptMapTest extends BaseResourceProviderDst
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(1, getNumberOfParametersByName(respParams, "match"));
|
||||
param = getParametersByName(respParams, "match").get(0);
|
||||
|
@ -352,7 +352,7 @@ public class ResourceProviderDstu3ConceptMapTest extends BaseResourceProviderDst
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(1, getNumberOfParametersByName(respParams, "match"));
|
||||
param = getParametersByName(respParams, "match").get(0);
|
||||
|
@ -400,7 +400,7 @@ public class ResourceProviderDstu3ConceptMapTest extends BaseResourceProviderDst
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(1, getNumberOfParametersByName(respParams, "match"));
|
||||
param = getParametersByName(respParams, "match").get(0);
|
||||
|
@ -446,7 +446,7 @@ public class ResourceProviderDstu3ConceptMapTest extends BaseResourceProviderDst
|
|||
assertFalse(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("No matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("No Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
}
|
||||
|
||||
|
@ -480,7 +480,7 @@ public class ResourceProviderDstu3ConceptMapTest extends BaseResourceProviderDst
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(1, getNumberOfParametersByName(respParams, "match"));
|
||||
param = getParametersByName(respParams, "match").get(0);
|
||||
|
@ -525,7 +525,7 @@ public class ResourceProviderDstu3ConceptMapTest extends BaseResourceProviderDst
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(1, getNumberOfParametersByName(respParams, "match"));
|
||||
param = getParametersByName(respParams, "match").get(0);
|
||||
|
@ -570,7 +570,7 @@ public class ResourceProviderDstu3ConceptMapTest extends BaseResourceProviderDst
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(1, getNumberOfParametersByName(respParams, "match"));
|
||||
param = getParametersByName(respParams, "match").get(0);
|
||||
|
@ -615,7 +615,7 @@ public class ResourceProviderDstu3ConceptMapTest extends BaseResourceProviderDst
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(1, getNumberOfParametersByName(respParams, "match"));
|
||||
param = getParametersByName(respParams, "match").get(0);
|
||||
|
|
|
@ -567,11 +567,6 @@ public class ConsentInterceptorResourceProviderR4Test extends BaseResourceProvid
|
|||
assertEquals(1, response.getEntry().size());
|
||||
assertNull(response.getTotalElement().getValue());
|
||||
|
||||
// Load next page
|
||||
response = myClient.loadPage().next(response).execute();
|
||||
assertEquals(1, response.getEntry().size());
|
||||
assertNull(response.getTotalElement().getValue());
|
||||
|
||||
StopWatch sw = new StopWatch();
|
||||
while(true) {
|
||||
SearchStatusEnum status = runInTransaction(() -> {
|
||||
|
@ -586,6 +581,11 @@ public class ConsentInterceptorResourceProviderR4Test extends BaseResourceProvid
|
|||
}
|
||||
}
|
||||
|
||||
// Load next page
|
||||
response = myClient.loadPage().next(response).execute();
|
||||
assertEquals(1, response.getEntry().size());
|
||||
assertNull(response.getTotalElement().getValue());
|
||||
|
||||
runInTransaction(() -> {
|
||||
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(searchId).orElseThrow(() -> new IllegalStateException());
|
||||
assertEquals(3, search.getNumFound());
|
||||
|
@ -594,7 +594,12 @@ public class ConsentInterceptorResourceProviderR4Test extends BaseResourceProvid
|
|||
});
|
||||
|
||||
// The paging should have ended now - but the last redacted female result is an empty existing page which should never have been there.
|
||||
assertNull(BundleUtil.getLinkUrlOfType(myFhirCtx, response, "next"));
|
||||
String next = BundleUtil.getLinkUrlOfType(myFhirCtx, response, "next");
|
||||
if (next != null) {
|
||||
response = myClient.loadPage().next(response).execute();
|
||||
fail(myFhirCtx.newJsonParser().encodeResourceToString(response));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -58,8 +58,6 @@ import java.util.stream.Collectors;
|
|||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
|
@ -423,6 +421,58 @@ public class ResourceProviderCustomSearchParamR4Test extends BaseResourceProvide
|
|||
}
|
||||
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@Test
|
||||
public void testSearchWithCustomParamInvalidDateFormat() {
|
||||
|
||||
SearchParameter dateParameter = new SearchParameter();
|
||||
dateParameter.setId("explanationofbenefit-service-date");
|
||||
dateParameter.setName("ExplanationOfBenefit_ServiceDate");
|
||||
dateParameter.setCode("service-date");
|
||||
dateParameter.setDescription("test");
|
||||
dateParameter.setUrl("http://integer");
|
||||
dateParameter.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
dateParameter.addBase("ExplanationOfBenefit");
|
||||
dateParameter.setType(Enumerations.SearchParamType.DATE);
|
||||
dateParameter.setExpression("ExplanationOfBenefit.billablePeriod | ExplanationOfBenefit.item.serviced as Date | ExplanationOfBenefit.item.serviced as Period");
|
||||
dateParameter.setXpath("f:ExplanationOfBenefit/f:billablePeriod | f:ExplanationOfBenefit/f:item/f:serviced/f:servicedDate | f:ExplanationOfBenefit/f:item/f:serviced/f:servicedPeriod");
|
||||
dateParameter.setXpathUsage(SearchParameter.XPathUsageType.NORMAL);
|
||||
mySearchParameterDao.update(dateParameter);
|
||||
|
||||
mySearchParamRegistry.forceRefresh();
|
||||
|
||||
IBundleProvider results;
|
||||
List<String> foundResources;
|
||||
Bundle result;
|
||||
|
||||
|
||||
//Try with builtin SP
|
||||
try {
|
||||
myClient
|
||||
.search()
|
||||
.forResource(ExplanationOfBenefit.class)
|
||||
.where(new StringClientParam("created").matches().value("01-01-2020"))
|
||||
.returnBundle(Bundle.class)
|
||||
.execute();
|
||||
|
||||
} catch (Exception e) {
|
||||
assertThat(e.getMessage(), is(equalTo("HTTP 400 Bad Request: Invalid date/time format: \"01-01-2020\"")));
|
||||
}
|
||||
|
||||
//Now with custom SP
|
||||
try {
|
||||
myClient
|
||||
.search()
|
||||
.forResource(ExplanationOfBenefit.class)
|
||||
.where(new StringClientParam("service-date").matches().value("01-01-2020"))
|
||||
.returnBundle(Bundle.class)
|
||||
.execute();
|
||||
|
||||
} catch (Exception e) {
|
||||
assertThat(e.getMessage(), is(equalTo("HTTP 400 Bad Request: Invalid date/time format: \"01-01-2020\"")));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* See #1300
|
||||
*/
|
||||
|
|
|
@ -94,7 +94,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(2, getNumberOfParametersByName(respParams, "match"));
|
||||
param = getParametersByName(respParams, "match").get(0);
|
||||
|
@ -152,7 +152,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(1, getNumberOfParametersByName(respParams, "match"));
|
||||
|
||||
|
@ -197,7 +197,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertFalse(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("No matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("No Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertFalse(hasParameterByName(respParams, "match"));
|
||||
}
|
||||
|
@ -230,7 +230,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(3, getNumberOfParametersByName(respParams, "match"));
|
||||
|
||||
|
@ -308,7 +308,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(1, getNumberOfParametersByName(respParams, "match"));
|
||||
|
||||
|
@ -361,7 +361,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(4, getNumberOfParametersByName(respParams, "match"));
|
||||
|
||||
|
@ -452,7 +452,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(3, getNumberOfParametersByName(respParams, "match"));
|
||||
|
||||
|
@ -531,7 +531,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(1, getNumberOfParametersByName(respParams, "match"));
|
||||
|
||||
|
@ -582,7 +582,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(2, getNumberOfParametersByName(respParams, "match"));
|
||||
|
||||
|
@ -647,7 +647,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(1, getNumberOfParametersByName(respParams, "match"));
|
||||
|
||||
|
@ -698,7 +698,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(2, getNumberOfParametersByName(respParams, "match"));
|
||||
|
||||
|
@ -761,7 +761,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(3, getNumberOfParametersByName(respParams, "match"));
|
||||
|
||||
|
@ -838,7 +838,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(3, getNumberOfParametersByName(respParams, "match"));
|
||||
|
||||
|
@ -949,7 +949,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(1, getNumberOfParametersByName(respParams, "match"));
|
||||
param = getParametersByName(respParams, "match").get(0);
|
||||
|
@ -993,7 +993,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(3, getNumberOfParametersByName(respParams, "match"));
|
||||
|
||||
|
@ -1074,7 +1074,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(1, getNumberOfParametersByName(respParams, "match"));
|
||||
|
||||
|
@ -1120,7 +1120,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertFalse(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("No matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("No Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertFalse(hasParameterByName(respParams, "match"));
|
||||
}
|
||||
|
@ -1155,7 +1155,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(2, getNumberOfParametersByName(respParams, "match"));
|
||||
|
||||
|
@ -1221,7 +1221,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(2, getNumberOfParametersByName(respParams, "match"));
|
||||
|
||||
|
@ -1291,7 +1291,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(4, getNumberOfParametersByName(respParams, "match"));
|
||||
|
||||
|
@ -1324,9 +1324,9 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertEquals(CM_URL, ((UriType) part.getValue()).getValueAsString());
|
||||
|
||||
param = getParametersByName(respParams, "match").get(2);
|
||||
assertEquals(2, param.getPart().size());
|
||||
assertEquals(3, param.getPart().size());
|
||||
part = getPartByName(param, "equivalence");
|
||||
assertFalse(part.hasValue());
|
||||
assertEquals("wider", ((CodeType)part.getValue()).getCode());
|
||||
part = getPartByName(param, "concept");
|
||||
coding = (Coding) part.getValue();
|
||||
assertEquals("23456", coding.getCode());
|
||||
|
@ -1384,7 +1384,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(2, getNumberOfParametersByName(respParams, "match"));
|
||||
|
||||
|
@ -1451,7 +1451,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(2, getNumberOfParametersByName(respParams, "match"));
|
||||
|
||||
|
@ -1518,7 +1518,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(1, getNumberOfParametersByName(respParams, "match"));
|
||||
|
||||
|
@ -1571,7 +1571,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(1, getNumberOfParametersByName(respParams, "match"));
|
||||
|
||||
|
@ -1622,7 +1622,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(2, getNumberOfParametersByName(respParams, "match"));
|
||||
|
||||
|
@ -1687,7 +1687,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(2, getNumberOfParametersByName(respParams, "match"));
|
||||
|
||||
|
@ -1783,7 +1783,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(1, getNumberOfParametersByName(respParams, "match"));
|
||||
param = getParametersByName(respParams, "match").get(0);
|
||||
|
@ -1828,7 +1828,7 @@ public class ResourceProviderR4ConceptMapTest extends BaseResourceProviderR4Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(2, getNumberOfParametersByName(respParams, "match"));
|
||||
|
||||
|
|
|
@ -541,7 +541,6 @@ public class ResourceProviderR4ValueSetNoVerCSNoVerTest extends BaseResourceProv
|
|||
ValueSet expanded = (ValueSet) respParam.getParameter().get(0).getResource();
|
||||
|
||||
String resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(expanded);
|
||||
ourLog.info("zoop");
|
||||
ourLog.info(resp);
|
||||
|
||||
assertThat(resp, is(containsStringIgnoringCase("<code value=\"M\"/>")));
|
||||
|
|
|
@ -88,7 +88,7 @@ public class ResourceProviderR5ConceptMapTest extends BaseResourceProviderR5Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(1, getNumberOfParametersByName(respParams, "match"));
|
||||
param = getParametersByName(respParams, "match").get(0);
|
||||
|
@ -170,7 +170,7 @@ public class ResourceProviderR5ConceptMapTest extends BaseResourceProviderR5Test
|
|||
assertTrue(((BooleanType) param.getValue()).booleanValue());
|
||||
|
||||
param = getParameterByName(respParams, "message");
|
||||
assertEquals("Matches found!", ((StringType) param.getValue()).getValueAsString());
|
||||
assertEquals("Matches found", ((StringType) param.getValue()).getValueAsString());
|
||||
|
||||
assertEquals(1, getNumberOfParametersByName(respParams, "match"));
|
||||
param = getParametersByName(respParams, "match").get(0);
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ public class BatchJobConfig {
|
|||
private StepBuilderFactory myStepBuilderFactory;
|
||||
|
||||
|
||||
|
||||
@Bean
|
||||
public Job testJob() {
|
||||
return myJobBuilderFactory.get("testJob")
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -144,13 +144,13 @@
|
|||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-test-utilities</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-jpaserver-test-utilities</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -55,13 +55,13 @@
|
|||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-test-utilities</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-jpaserver-test-utilities</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -71,53 +71,54 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
init501(); // 20200514 - 20200515
|
||||
init510(); // 20200516 - 20201028
|
||||
init520(); // 20201029 -
|
||||
init530();
|
||||
init530();
|
||||
init540(); // 20210218 -
|
||||
}
|
||||
|
||||
private void init540() {
|
||||
|
||||
|
||||
Builder version = forVersion(VersionEnum.V5_4_0);
|
||||
|
||||
|
||||
//-- add index on HFJ_SPIDX_DATE
|
||||
version.onTable("HFJ_SPIDX_DATE").addIndex("20210309.1", "IDX_SP_DATE_HASH_HIGH")
|
||||
.unique(false).withColumns("HASH_IDENTITY", "SP_VALUE_HIGH");
|
||||
|
||||
version.onTable("HFJ_SPIDX_DATE").addIndex("20210309.1", "IDX_SP_DATE_HASH_HIGH")
|
||||
.unique(false).withColumns("HASH_IDENTITY", "SP_VALUE_HIGH");
|
||||
|
||||
//-- add index on HFJ_FORCED_ID
|
||||
version.onTable("HFJ_FORCED_ID").addIndex("20210309.2", "IDX_FORCEID_FID")
|
||||
.unique(false).withColumns("FORCED_ID");
|
||||
version.onTable("HFJ_FORCED_ID").addIndex("20210309.2", "IDX_FORCEID_FID")
|
||||
.unique(false).withColumns("FORCED_ID");
|
||||
|
||||
}
|
||||
|
||||
|
||||
private void init530() {
|
||||
Builder version = forVersion(VersionEnum.V5_3_0);
|
||||
|
||||
|
||||
//-- TRM
|
||||
version
|
||||
.onTable("TRM_VALUESET_CONCEPT")
|
||||
.dropIndex("20210104.1", "IDX_VS_CONCEPT_CS_CODE");
|
||||
|
||||
version
|
||||
.onTable("TRM_VALUESET_CONCEPT")
|
||||
.addIndex("20210104.2", "IDX_VS_CONCEPT_CSCD").unique(true).withColumns("VALUESET_PID", "SYSTEM_URL", "CODEVAL");
|
||||
|
||||
|
||||
version
|
||||
.onTable("TRM_VALUESET_CONCEPT")
|
||||
.addIndex("20210104.2", "IDX_VS_CONCEPT_CSCD").unique(true).withColumns("VALUESET_PID", "SYSTEM_URL", "CODEVAL");
|
||||
|
||||
//-- Add new Table, HFJ_SPIDX_QUANTITY_NRML
|
||||
version.addIdGenerator("20210109.1", "SEQ_SPIDX_QUANTITY_NRML");
|
||||
Builder.BuilderAddTableByColumns pkg = version.addTableByColumns("20210109.2", "HFJ_SPIDX_QUANTITY_NRML", "SP_ID");
|
||||
pkg.addColumn("RES_ID").nonNullable().type(ColumnTypeEnum.LONG);
|
||||
pkg.addColumn("RES_TYPE").nonNullable().type(ColumnTypeEnum.STRING, 100);
|
||||
pkg.addColumn("SP_UPDATED").nullable().type(ColumnTypeEnum.DATE_TIMESTAMP);
|
||||
pkg.addColumn("SP_MISSING").nonNullable().type(ColumnTypeEnum.BOOLEAN);
|
||||
pkg.addColumn("RES_ID").nonNullable().type(ColumnTypeEnum.LONG);
|
||||
pkg.addColumn("RES_TYPE").nonNullable().type(ColumnTypeEnum.STRING, 100);
|
||||
pkg.addColumn("SP_UPDATED").nullable().type(ColumnTypeEnum.DATE_TIMESTAMP);
|
||||
pkg.addColumn("SP_MISSING").nonNullable().type(ColumnTypeEnum.BOOLEAN);
|
||||
pkg.addColumn("SP_NAME").nonNullable().type(ColumnTypeEnum.STRING, 100);
|
||||
pkg.addColumn("SP_ID").nonNullable().type(ColumnTypeEnum.LONG);
|
||||
pkg.addColumn("SP_ID").nonNullable().type(ColumnTypeEnum.LONG);
|
||||
pkg.addColumn("SP_SYSTEM").nullable().type(ColumnTypeEnum.STRING, 200);
|
||||
pkg.addColumn("SP_UNITS").nullable().type(ColumnTypeEnum.STRING, 200);
|
||||
pkg.addColumn("HASH_IDENTITY_AND_UNITS").nullable().type(ColumnTypeEnum.LONG);
|
||||
pkg.addColumn("HASH_IDENTITY_SYS_UNITS").nullable().type(ColumnTypeEnum.LONG);
|
||||
pkg.addColumn("HASH_IDENTITY").nullable().type(ColumnTypeEnum.LONG);
|
||||
pkg.addColumn("SP_VALUE").nullable().type(ColumnTypeEnum.FLOAT);
|
||||
pkg.addIndex("20210109.3", "IDX_SP_QNTY_NRML_HASH").unique(false).withColumns("HASH_IDENTITY","SP_VALUE");
|
||||
pkg.addIndex("20210109.4", "IDX_SP_QNTY_NRML_HASH_UN").unique(false).withColumns("HASH_IDENTITY_AND_UNITS","SP_VALUE");
|
||||
pkg.addIndex("20210109.5", "IDX_SP_QNTY_NRML_HASH_SYSUN").unique(false).withColumns("HASH_IDENTITY_SYS_UNITS","SP_VALUE");
|
||||
pkg.addIndex("20210109.3", "IDX_SP_QNTY_NRML_HASH").unique(false).withColumns("HASH_IDENTITY", "SP_VALUE");
|
||||
pkg.addIndex("20210109.4", "IDX_SP_QNTY_NRML_HASH_UN").unique(false).withColumns("HASH_IDENTITY_AND_UNITS", "SP_VALUE");
|
||||
pkg.addIndex("20210109.5", "IDX_SP_QNTY_NRML_HASH_SYSUN").unique(false).withColumns("HASH_IDENTITY_SYS_UNITS", "SP_VALUE");
|
||||
pkg.addIndex("20210109.6", "IDX_SP_QNTY_NRML_UPDATED").unique(false).withColumns("SP_UPDATED");
|
||||
pkg.addIndex("20210109.7", "IDX_SP_QNTY_NRML_RESID").unique(false).withColumns("RES_ID");
|
||||
|
||||
|
@ -154,7 +155,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
.toColumn("GOLDEN_RESOURCE_PID")
|
||||
.references("HFJ_RESOURCE", "RES_ID");
|
||||
}
|
||||
|
||||
|
||||
protected void init510() {
|
||||
Builder version = forVersion(VersionEnum.V5_1_0);
|
||||
|
||||
|
@ -224,7 +225,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
init510_20200725();
|
||||
|
||||
//EMPI Target Type
|
||||
empiLink.addColumn("20200727.1","TARGET_TYPE").nullable().type(ColumnTypeEnum.STRING, 40);
|
||||
empiLink.addColumn("20200727.1", "TARGET_TYPE").nullable().type(ColumnTypeEnum.STRING, 40);
|
||||
|
||||
//ConceptMap add version for search
|
||||
Builder.BuilderWithTableName trmConceptMap = version.onTable("TRM_CONCEPT_MAP");
|
||||
|
@ -1022,8 +1023,8 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
spidxUri
|
||||
.addTask(new CalculateHashesTask(VersionEnum.V3_5_0, "20180903.44")
|
||||
.setColumnName("HASH_IDENTITY")
|
||||
.addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(new PartitionSettings(), (RequestPartitionId)null, t.getResourceType(), t.getString("SP_NAME")))
|
||||
.addCalculator("HASH_URI", t -> ResourceIndexedSearchParamUri.calculateHashUri(new PartitionSettings(), (RequestPartitionId)null, t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_URI")))
|
||||
.addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(new PartitionSettings(), (RequestPartitionId) null, t.getResourceType(), t.getString("SP_NAME")))
|
||||
.addCalculator("HASH_URI", t -> ResourceIndexedSearchParamUri.calculateHashUri(new PartitionSettings(), (RequestPartitionId) null, t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_URI")))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1056,7 +1057,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
Boolean present = columnToBoolean(t.get("SP_PRESENT"));
|
||||
String resType = (String) t.get("RES_TYPE");
|
||||
String paramName = (String) t.get("PARAM_NAME");
|
||||
Long hash = SearchParamPresent.calculateHashPresence(new PartitionSettings(), (RequestPartitionId)null, resType, paramName, present);
|
||||
Long hash = SearchParamPresent.calculateHashPresence(new PartitionSettings(), (RequestPartitionId) null, resType, paramName, present);
|
||||
consolidateSearchParamPresenceIndexesTask.executeSql("HFJ_RES_PARAM_PRESENT", "update HFJ_RES_PARAM_PRESENT set HASH_PRESENCE = ? where PID = ?", hash, pid);
|
||||
});
|
||||
version.addTask(consolidateSearchParamPresenceIndexesTask);
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -169,7 +169,7 @@
|
|||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-converter</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE1-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
package ca.uhn.fhir.mdm.rules.matcher;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.util.ExtensionUtil;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseExtension;
|
||||
import org.hl7.fhir.instance.model.api.IBaseHasExtensions;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class ExtensionMatcher implements IMdmFieldMatcher {
|
||||
|
||||
@Override
|
||||
public boolean matches(FhirContext theFhirContext, IBase theLeftBase, IBase theRightBase, boolean theExact, String theIdentifierSystem) {
|
||||
if (!(theLeftBase instanceof IBaseHasExtensions && theRightBase instanceof IBaseHasExtensions)) {
|
||||
return false;
|
||||
}
|
||||
List<? extends IBaseExtension<?, ?>> leftExtension = ((IBaseHasExtensions) theLeftBase).getExtension();
|
||||
List<? extends IBaseExtension<?, ?>> rightExtension = ((IBaseHasExtensions) theRightBase).getExtension();
|
||||
|
||||
boolean match = false;
|
||||
for (IBaseExtension leftExtensionValue : leftExtension) {
|
||||
for (IBaseExtension rightExtensionValue : rightExtension) {
|
||||
match |= ExtensionUtil.equals(leftExtensionValue, rightExtensionValue);
|
||||
}
|
||||
}
|
||||
return match;
|
||||
}
|
||||
}
|
|
@ -50,7 +50,8 @@ public enum MdmMatcherEnum {
|
|||
|
||||
IDENTIFIER(new IdentifierMatcher()),
|
||||
|
||||
EMPTY_FIELD(new EmptyFieldMatcher());
|
||||
EMPTY_FIELD(new EmptyFieldMatcher()),
|
||||
EXTENSION_ANY_ORDER(new ExtensionMatcher());
|
||||
|
||||
private final IMdmFieldMatcher myMdmFieldMatcher;
|
||||
|
||||
|
|
|
@ -151,6 +151,16 @@ public class MdmRuleValidatorTest extends BaseR4Test {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMatcherExtensionJson() throws IOException {
|
||||
try {
|
||||
setMdmRuleJson("rules-extension-search.json");
|
||||
}
|
||||
catch (ConfigurationException e){
|
||||
fail("Unable to validate extension matcher");
|
||||
}
|
||||
}
|
||||
|
||||
private void setMdmRuleJson(String theTheS) throws IOException {
|
||||
MdmRuleValidator mdmRuleValidator = new MdmRuleValidator(ourFhirContext, mySearchParamRetriever);
|
||||
MdmSettings mdmSettings = new MdmSettings(mdmRuleValidator);
|
||||
|
|
|
@ -0,0 +1,79 @@
|
|||
package ca.uhn.fhir.mdm.rules.matcher;
|
||||
|
||||
|
||||
import org.hl7.fhir.r4.model.IntegerType;
|
||||
import org.hl7.fhir.r4.model.Organization;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
public class ExtensionMatcherR4Test extends BaseMatcherR4Test {
|
||||
@Test
|
||||
public void testPatientWithMatchingExtension(){
|
||||
Patient patient1 = new Patient();
|
||||
Patient patient2 = new Patient();
|
||||
|
||||
patient1.addExtension("asd",new StringType("Patient1"));
|
||||
patient2.addExtension("asd",new StringType("Patient1"));
|
||||
|
||||
assertTrue(MdmMatcherEnum.EXTENSION_ANY_ORDER.match(ourFhirContext, patient1, patient2, false, null));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPatientWithoutMatchingExtension(){
|
||||
Patient patient1 = new Patient();
|
||||
Patient patient2 = new Patient();
|
||||
|
||||
patient1.addExtension("asd",new StringType("Patient1"));
|
||||
patient2.addExtension("asd",new StringType("Patient2"));
|
||||
|
||||
assertFalse(MdmMatcherEnum.EXTENSION_ANY_ORDER.match(ourFhirContext, patient1, patient2, false, null));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPatientSameValueDifferentUrl(){
|
||||
Patient patient1 = new Patient();
|
||||
Patient patient2 = new Patient();
|
||||
|
||||
patient1.addExtension("asd",new StringType("Patient1"));
|
||||
patient2.addExtension("asd1",new StringType("Patient1"));
|
||||
|
||||
assertFalse(MdmMatcherEnum.EXTENSION_ANY_ORDER.match(ourFhirContext, patient1, patient2, false, null));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPatientWithMultipleExtensionOneMatching(){
|
||||
Patient patient1 = new Patient();
|
||||
Patient patient2 = new Patient();
|
||||
|
||||
patient1.addExtension("asd",new StringType("Patient1"));
|
||||
patient1.addExtension("url1", new StringType("asd"));
|
||||
patient2.addExtension("asd",new StringType("Patient1"));
|
||||
patient2.addExtension("asdasd", new StringType("some value"));
|
||||
|
||||
assertTrue(MdmMatcherEnum.EXTENSION_ANY_ORDER.match(ourFhirContext, patient1, patient2, false, null));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPatientWithoutIntExtension(){
|
||||
Patient patient1 = new Patient();
|
||||
Patient patient2 = new Patient();
|
||||
|
||||
patient1.addExtension("asd", new IntegerType(123));
|
||||
patient2.addExtension("asd", new IntegerType(123));
|
||||
|
||||
assertTrue(MdmMatcherEnum.EXTENSION_ANY_ORDER.match(ourFhirContext, patient1, patient2, false, null));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPatientWithNoExtension(){
|
||||
Patient patient1 = new Patient();
|
||||
Patient patient2 = new Patient();
|
||||
|
||||
assertFalse(MdmMatcherEnum.EXTENSION_ANY_ORDER.match(ourFhirContext, patient1, patient2, false, null));
|
||||
}
|
||||
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue