Implement Terminology Delta Operations (#1536)
* Start refactoring terminology delta operations * Work on delta operations * Work on concept saving * Split term services into smaller services * Work on term delta operations * Work on term svcs * Work on term operations * More work on delta uploader * Add a test * Wrk on term service * Fix compile error * Some refactoring * Test fix * Test fix * Test fixes * Test fix * Test fixes * Test fixes * Work on delta * Work on tests# * Test fixes * Improve resequencing logic * Build test * More testing * More build testing * More work on tests * CHange test logging * Fix term service PID issue * Update src/changes/changes.xml Co-Authored-By: Diederik Muylwyk <diederik.muylwyk@gmail.com> * Address review comment * Some cleanup * Test fix * Fix some tests * Test fixes
This commit is contained in:
parent
92c6b88964
commit
2725797610
|
@ -1,15 +1,13 @@
|
|||
package example;
|
||||
|
||||
import ca.uhn.fhir.model.api.Tag;
|
||||
import ca.uhn.fhir.rest.annotation.Search;
|
||||
import org.hl7.fhir.r4.model.InstantType;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
|
||||
import ca.uhn.fhir.model.api.Tag;
|
||||
import ca.uhn.fhir.model.api.TagList;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Patient;
|
||||
import ca.uhn.fhir.model.primitive.InstantDt;
|
||||
import ca.uhn.fhir.rest.annotation.Search;
|
||||
|
||||
public class ServerMetadataExamples {
|
||||
|
||||
// START SNIPPET: serverMethod
|
||||
|
@ -21,34 +19,21 @@ public class ServerMetadataExamples {
|
|||
Patient patient = new Patient();
|
||||
retVal.add(patient);
|
||||
patient.setId("Patient/123");
|
||||
patient.addName().addFamily("Smith").addGiven("John");
|
||||
|
||||
// Create a tag list and add it to the resource
|
||||
TagList tags = new TagList();
|
||||
tags.addTag(Tag.HL7_ORG_FHIR_TAG, "http://foo/tag1.html", "Some tag");
|
||||
tags.addTag(Tag.HL7_ORG_FHIR_TAG, "http://foo/tag2.html", "Another tag");
|
||||
ResourceMetadataKeyEnum.TAG_LIST.put(patient, tags);
|
||||
|
||||
// Set some links (these can be provided as relative links or absolute)
|
||||
// and the server will convert to absolute as appropriate
|
||||
String linkAlternate = "Patient/7736";
|
||||
ResourceMetadataKeyEnum.LINK_ALTERNATE.put(patient, linkAlternate);
|
||||
String linkSearch = "Patient?name=smith&name=john";
|
||||
ResourceMetadataKeyEnum.LINK_SEARCH.put(patient, linkSearch);
|
||||
|
||||
// Set the published and updated dates
|
||||
InstantDt pubDate = new InstantDt("2011-02-22");
|
||||
ResourceMetadataKeyEnum.PUBLISHED.put(patient, pubDate);
|
||||
InstantDt updatedDate = new InstantDt("2014-07-12T11:22:27Z");
|
||||
ResourceMetadataKeyEnum.UPDATED.put(patient, updatedDate);
|
||||
|
||||
// Set the resource title (note that if you are using HAPI's narrative
|
||||
// generation capability, the narrative generator will often create
|
||||
// useful titles automatically, and the server will create a default
|
||||
// title if none is provided)
|
||||
String title = "Patient John SMITH";
|
||||
ResourceMetadataKeyEnum.TITLE.put(patient, title);
|
||||
patient.addName().setFamily("Smith").addGiven("John");
|
||||
|
||||
// Add tags
|
||||
patient.getMeta().addTag()
|
||||
.setSystem(Tag.HL7_ORG_FHIR_TAG)
|
||||
.setCode("some_tag")
|
||||
.setDisplay("Some tag");
|
||||
patient.getMeta().addTag()
|
||||
.setSystem(Tag.HL7_ORG_FHIR_TAG)
|
||||
.setCode("another_tag")
|
||||
.setDisplay("Another tag");
|
||||
|
||||
// Set the last updated date
|
||||
patient.getMeta().setLastUpdatedElement(new InstantType("2011-02-22T11:22:00.0122Z"));
|
||||
|
||||
return retVal;
|
||||
}
|
||||
// END SNIPPET: serverMethod
|
||||
|
|
|
@ -120,6 +120,11 @@
|
|||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.awaitility</groupId>
|
||||
<artifactId>awaitility</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@ import ca.uhn.fhir.util.ReflectionUtil;
|
|||
import ca.uhn.fhir.util.VersionUtil;
|
||||
import ca.uhn.fhir.validation.FhirValidator;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.jena.riot.Lang;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
|
@ -896,6 +897,11 @@ public class FhirContext {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "FhirContext[" + myVersion.getVersion().name() + "]";
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates and returns a new FhirContext with version {@link FhirVersionEnum#DSTU2 DSTU2}
|
||||
*/
|
||||
|
|
|
@ -21,7 +21,6 @@ package ca.uhn.fhir.model.api;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.model.base.composite.BaseCodingDt;
|
||||
import ca.uhn.fhir.model.primitive.DecimalDt;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.model.primitive.InstantDt;
|
||||
import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum;
|
||||
|
@ -29,7 +28,6 @@ import ca.uhn.fhir.model.valueset.BundleEntryTransactionMethodEnum;
|
|||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
@ -92,29 +90,7 @@ public abstract class ResourceMetadataKeyEnum<T> implements Serializable {
|
|||
theResource.setUserData(DELETED_AT.name(), theObject);
|
||||
}
|
||||
};
|
||||
/**
|
||||
* Denotes the search score which a given resource should match in a transaction. See the FHIR transaction definition for information about this. Corresponds to the value in
|
||||
* <code>Bundle.entry.score</code> in a Bundle resource.
|
||||
* <p>
|
||||
* Note that search URL is only used in FHIR DSTU2 and later.
|
||||
* </p>
|
||||
* <p>
|
||||
* Values for this key are of type <b>{@link DecimalDt}</b>
|
||||
* </p>
|
||||
*/
|
||||
public static final ResourceMetadataKeyEnum<DecimalDt> ENTRY_SCORE = new ResourceMetadataKeyEnum<DecimalDt>("ENTRY_SCORE") {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@Override
|
||||
public DecimalDt get(IResource theResource) {
|
||||
return getDecimalFromMetadataOrNullIfNone(theResource.getResourceMetadata(), ENTRY_SCORE);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void put(IResource theResource, DecimalDt theObject) {
|
||||
theResource.getResourceMetadata().put(ENTRY_SCORE, theObject);
|
||||
}
|
||||
};
|
||||
/**
|
||||
* If present and populated with a {@link BundleEntrySearchModeEnum}, contains the "bundle entry search mode", which is the value of the status field in the Bundle entry containing this resource.
|
||||
* The value for this key corresponds to field <code>Bundle.entry.search.mode</code>. This value can be set to provide a status value of "include" for included resources being returned by a
|
||||
|
@ -187,67 +163,7 @@ public abstract class ResourceMetadataKeyEnum<T> implements Serializable {
|
|||
}
|
||||
|
||||
};
|
||||
/**
|
||||
* If present and populated with a string, provides the "alternate link" (the link element in the bundle entry with <code>rel="alternate"</code>). Server implementations may populate this with a
|
||||
* complete URL, in which case the URL will be placed as-is in the bundle. They may alternately specify a resource relative URL (e.g. "Patient/1243") in which case the server will convert this to
|
||||
* an absolute URL at runtime.
|
||||
* <p>
|
||||
* Values for this key are of type <b>{@link String}</b>
|
||||
* </p>
|
||||
*/
|
||||
public static final ResourceMetadataKeyEnum<String> LINK_ALTERNATE = new ResourceMetadataKeyEnum<String>("LINK_ALTERNATE") {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@Override
|
||||
public String get(IResource theResource) {
|
||||
return getStringFromMetadataOrNullIfNone(theResource.getResourceMetadata(), LINK_ALTERNATE);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void put(IResource theResource, String theObject) {
|
||||
theResource.getResourceMetadata().put(LINK_ALTERNATE, theObject);
|
||||
}
|
||||
};
|
||||
/**
|
||||
* If present and populated with a string, provides the "search link" (the link element in the bundle entry with <code>rel="search"</code>). Server implementations may populate this with a
|
||||
* complete URL, in which case the URL will be placed as-is in the bundle. They may alternately specify a resource relative URL (e.g. "Patient?name=tester") in which case the server will convert
|
||||
* this to an absolute URL at runtime.
|
||||
* <p>
|
||||
* Values for this key are of type <b>{@link String}</b>
|
||||
* </p>
|
||||
*/
|
||||
public static final ResourceMetadataKeyEnum<String> LINK_SEARCH = new ResourceMetadataKeyEnum<String>("LINK_SEARCH") {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@Override
|
||||
public String get(IResource theResource) {
|
||||
return getStringFromMetadataOrNullIfNone(theResource.getResourceMetadata(), LINK_SEARCH);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void put(IResource theResource, String theObject) {
|
||||
theResource.getResourceMetadata().put(LINK_SEARCH, theObject);
|
||||
}
|
||||
};
|
||||
/**
|
||||
* The value for this key represents a previous ID used to identify this resource. This key is currently only used internally during transaction method processing.
|
||||
* <p>
|
||||
* Values for this key are of type <b>{@link IdDt}</b>
|
||||
* </p>
|
||||
*/
|
||||
public static final ResourceMetadataKeyEnum<IdDt> PREVIOUS_ID = new ResourceMetadataKeyEnum<IdDt>("PREVIOUS_ID") {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@Override
|
||||
public IdDt get(IResource theResource) {
|
||||
return getIdFromMetadataOrNullIfNone(theResource.getResourceMetadata(), PREVIOUS_ID);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void put(IResource theResource, IdDt theObject) {
|
||||
theResource.getResourceMetadata().put(PREVIOUS_ID, theObject);
|
||||
}
|
||||
};
|
||||
/**
|
||||
* The value for this key represents a {@link List} of profile IDs that this resource claims to conform to.
|
||||
* <p>
|
||||
|
@ -301,18 +217,8 @@ public abstract class ResourceMetadataKeyEnum<T> implements Serializable {
|
|||
if (obj == null) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
@SuppressWarnings("unchecked")
|
||||
List<BaseCodingDt> securityLabels = (List<BaseCodingDt>) obj;
|
||||
if (securityLabels.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
return securityLabels;
|
||||
} catch (ClassCastException e) {
|
||||
throw new InternalErrorException("Found an object of type '" + obj.getClass().getCanonicalName() + "' in resource metadata for key SECURITY_LABELS - Expected "
|
||||
+ BaseCodingDt.class.getCanonicalName());
|
||||
}
|
||||
|
||||
//noinspection unchecked
|
||||
return (List<BaseCodingDt>) obj;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -337,14 +243,9 @@ public abstract class ResourceMetadataKeyEnum<T> implements Serializable {
|
|||
Object retValObj = theResource.getResourceMetadata().get(TAG_LIST);
|
||||
if (retValObj == null) {
|
||||
return null;
|
||||
} else if (retValObj instanceof TagList) {
|
||||
if (((TagList) retValObj).isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
} else {
|
||||
return (TagList) retValObj;
|
||||
}
|
||||
throw new InternalErrorException("Found an object of type '" + retValObj.getClass().getCanonicalName() + "' in resource metadata for key " + TAG_LIST.name() + " - Expected "
|
||||
+ TagList.class.getCanonicalName());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -352,25 +253,6 @@ public abstract class ResourceMetadataKeyEnum<T> implements Serializable {
|
|||
theResource.getResourceMetadata().put(TAG_LIST, theObject);
|
||||
}
|
||||
};
|
||||
/**
|
||||
* If present and populated with a string (as an instance of {@link String}), this value contains the title for this resource, as supplied in any bundles containing the resource.
|
||||
* <p>
|
||||
* Values for this key are of type <b>{@link String}</b>
|
||||
* </p>
|
||||
*/
|
||||
public static final ResourceMetadataKeyEnum<String> TITLE = new ResourceMetadataKeyEnum<String>("TITLE") {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@Override
|
||||
public String get(IResource theResource) {
|
||||
return getStringFromMetadataOrNullIfNone(theResource.getResourceMetadata(), TITLE);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void put(IResource theResource, String theObject) {
|
||||
theResource.getResourceMetadata().put(TITLE, theObject);
|
||||
}
|
||||
};
|
||||
/**
|
||||
* The value for this key is the bundle entry <b>Updated</b> time. This is defined by FHIR as "Last Updated for resource". This value is also used for populating the "Last-Modified" header in the
|
||||
* case of methods that return a single resource (read, vread, etc.)
|
||||
|
@ -398,7 +280,10 @@ public abstract class ResourceMetadataKeyEnum<T> implements Serializable {
|
|||
* <p>
|
||||
* Values for this key are of type <b>{@link String}</b>
|
||||
* </p>
|
||||
*
|
||||
* @deprecated The {@link IResource#getId()} resource ID will now be populated with the version ID via the {@link IdDt#getVersionIdPart()} method
|
||||
*/
|
||||
@Deprecated
|
||||
public static final ResourceMetadataKeyEnum<String> VERSION = new ResourceMetadataKeyEnum<String>("VERSION") {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
|
@ -426,7 +311,7 @@ public abstract class ResourceMetadataKeyEnum<T> implements Serializable {
|
|||
|
||||
@Override
|
||||
public IdDt get(IResource theResource) {
|
||||
return getIdFromMetadataOrNullIfNone(theResource.getResourceMetadata(), VERSION_ID);
|
||||
return getIdFromMetadataOrNullIfNone(theResource.getResourceMetadata());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -474,32 +359,45 @@ public abstract class ResourceMetadataKeyEnum<T> implements Serializable {
|
|||
|
||||
public abstract void put(IResource theResource, T theObject);
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return myValue;
|
||||
public static abstract class ResourceMetadataKeySupportingAnyResource<T, T2> extends ResourceMetadataKeyEnum<T> {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
public ResourceMetadataKeySupportingAnyResource(String theValue) {
|
||||
super(theValue);
|
||||
}
|
||||
|
||||
public abstract T2 get(IAnyResource theResource);
|
||||
|
||||
public abstract void put(IAnyResource theResource, T2 theObject);
|
||||
|
||||
}
|
||||
|
||||
private static DecimalDt getDecimalFromMetadataOrNullIfNone(Map<ResourceMetadataKeyEnum<?>, Object> theResourceMetadata, ResourceMetadataKeyEnum<DecimalDt> theKey) {
|
||||
Object retValObj = theResourceMetadata.get(theKey);
|
||||
if (retValObj == null) {
|
||||
return null;
|
||||
} else if (retValObj instanceof DecimalDt) {
|
||||
if (((DecimalDt) retValObj).isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
return (DecimalDt) retValObj;
|
||||
} else if (retValObj instanceof String) {
|
||||
if (StringUtils.isBlank((String) retValObj)) {
|
||||
return null;
|
||||
}
|
||||
return new DecimalDt((String) retValObj);
|
||||
} else if (retValObj instanceof Double) {
|
||||
return new DecimalDt((Double) retValObj);
|
||||
public static final class ExtensionResourceMetadataKey extends ResourceMetadataKeyEnum<ExtensionDt> {
|
||||
public ExtensionResourceMetadataKey(String theUrl) {
|
||||
super(theUrl);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExtensionDt get(IResource theResource) {
|
||||
Object retValObj = theResource.getResourceMetadata().get(this);
|
||||
if (retValObj == null) {
|
||||
return null;
|
||||
} else if (retValObj instanceof ExtensionDt) {
|
||||
return (ExtensionDt) retValObj;
|
||||
}
|
||||
throw new InternalErrorException("Found an object of type '" + retValObj.getClass().getCanonicalName()
|
||||
+ "' in resource metadata for key " + this.name() + " - Expected "
|
||||
+ ExtensionDt.class.getCanonicalName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void put(IResource theResource, ExtensionDt theObject) {
|
||||
theResource.getResourceMetadata().put(this, theObject);
|
||||
}
|
||||
throw new InternalErrorException("Found an object of type '" + retValObj.getClass().getCanonicalName() + "' in resource metadata for key " + theKey.name() + " - Expected "
|
||||
+ InstantDt.class.getCanonicalName());
|
||||
}
|
||||
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static <T extends Enum<?>> T getEnumFromMetadataOrNullIfNone(Map<ResourceMetadataKeyEnum<?>, Object> theResourceMetadata, ResourceMetadataKeyEnum<T> theKey, Class<T> theEnumType,
|
||||
IValueSetEnumBinder<T> theBinder) {
|
||||
|
@ -515,8 +413,8 @@ public abstract class ResourceMetadataKeyEnum<T> implements Serializable {
|
|||
+ InstantDt.class.getCanonicalName());
|
||||
}
|
||||
|
||||
private static IdDt getIdFromMetadataOrNullIfNone(Map<ResourceMetadataKeyEnum<?>, Object> theResourceMetadata, ResourceMetadataKeyEnum<?> theKey) {
|
||||
return toId(theKey, theResourceMetadata.get(theKey));
|
||||
private static IdDt getIdFromMetadataOrNullIfNone(Map<ResourceMetadataKeyEnum<?>, Object> theResourceMetadata) {
|
||||
return toId(ResourceMetadataKeyEnum.VERSION_ID, theResourceMetadata.get(ResourceMetadataKeyEnum.VERSION_ID));
|
||||
}
|
||||
|
||||
private static List<IdDt> getIdListFromMetadataOrNullIfNone(Map<ResourceMetadataKeyEnum<?>, Object> theResourceMetadata, ResourceMetadataKeyEnum<?> theKey) {
|
||||
|
@ -586,49 +484,11 @@ public abstract class ResourceMetadataKeyEnum<T> implements Serializable {
|
|||
}
|
||||
return (IdDt) retValObj;
|
||||
} else if (retValObj instanceof Number) {
|
||||
return new IdDt(((Number) retValObj).toString());
|
||||
return new IdDt(retValObj.toString());
|
||||
}
|
||||
throw new InternalErrorException("Found an object of type '" + retValObj.getClass().getCanonicalName() + "' in resource metadata for key " + theKey.name() + " - Expected "
|
||||
+ IdDt.class.getCanonicalName());
|
||||
}
|
||||
|
||||
public static abstract class ResourceMetadataKeySupportingAnyResource<T, T2> extends ResourceMetadataKeyEnum<T> {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
public ResourceMetadataKeySupportingAnyResource(String theValue) {
|
||||
super(theValue);
|
||||
}
|
||||
|
||||
public abstract T2 get(IAnyResource theResource);
|
||||
|
||||
public abstract void put(IAnyResource theResource, T2 theObject);
|
||||
|
||||
}
|
||||
|
||||
public static final class ExtensionResourceMetadataKey extends ResourceMetadataKeyEnum<ExtensionDt> {
|
||||
public ExtensionResourceMetadataKey(String url) {
|
||||
super(url);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExtensionDt get(IResource theResource) {
|
||||
Object retValObj = theResource.getResourceMetadata().get(this);
|
||||
if (retValObj == null) {
|
||||
return null;
|
||||
} else if (retValObj instanceof ExtensionDt) {
|
||||
return (ExtensionDt) retValObj;
|
||||
}
|
||||
throw new InternalErrorException("Found an object of type '" + retValObj.getClass().getCanonicalName()
|
||||
+ "' in resource metadata for key " + this.name() + " - Expected "
|
||||
+ ExtensionDt.class.getCanonicalName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void put(IResource theResource, ExtensionDt theObject) {
|
||||
theResource.getResourceMetadata().put(this, theObject);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -168,18 +168,6 @@ public class Tag extends BaseElement implements IElement, IBaseCoding {
|
|||
return this;
|
||||
}
|
||||
|
||||
public String toHeaderValue() {
|
||||
StringBuilder b = new StringBuilder();
|
||||
b.append(this.getTerm());
|
||||
if (isNotBlank(this.getLabel())) {
|
||||
b.append("; label=\"").append(this.getLabel()).append('"');
|
||||
}
|
||||
if (isNotBlank(this.getScheme())) {
|
||||
b.append("; scheme=\"").append(this.getScheme()).append('"');
|
||||
}
|
||||
return b.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE);
|
||||
|
|
|
@ -33,12 +33,6 @@ import java.util.Map;
|
|||
*/
|
||||
public interface IHttpResponse {
|
||||
|
||||
/**
|
||||
* @deprecated This method was deprecated in HAPI FHIR 2.2 because its name has a typo. Use {@link #bufferEntity()} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
void bufferEntitity() throws IOException;
|
||||
|
||||
/**
|
||||
* Buffer the message entity data.
|
||||
* <p>
|
||||
|
|
|
@ -248,26 +248,30 @@ public class TokenParam extends BaseParam /*implements IQueryParameterType*/ {
|
|||
|
||||
@Override
|
||||
public boolean equals(Object theO) {
|
||||
if (this == theO) return true;
|
||||
if (this == theO) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (theO == null || getClass() != theO.getClass()) return false;
|
||||
if (theO == null || getClass() != theO.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
TokenParam that = (TokenParam) theO;
|
||||
|
||||
return new EqualsBuilder()
|
||||
.append(myModifier, that.myModifier)
|
||||
.append(mySystem, that.mySystem)
|
||||
.append(myValue, that.myValue)
|
||||
.isEquals();
|
||||
EqualsBuilder b = new EqualsBuilder();
|
||||
b.append(myModifier, that.myModifier);
|
||||
b.append(mySystem, that.mySystem);
|
||||
b.append(myValue, that.myValue);
|
||||
return b.isEquals();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return new HashCodeBuilder(17, 37)
|
||||
.append(myModifier)
|
||||
.append(mySystem)
|
||||
.append(myValue)
|
||||
.toHashCode();
|
||||
HashCodeBuilder b = new HashCodeBuilder(17, 37);
|
||||
b.append(myModifier);
|
||||
b.append(mySystem);
|
||||
b.append(myValue);
|
||||
return b.toHashCode();
|
||||
}
|
||||
|
||||
private static String toSystemValue(UriDt theSystem) {
|
||||
|
|
|
@ -0,0 +1,54 @@
|
|||
package ca.uhn.fhir.util;
|
||||
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public class AsyncUtil {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(AsyncUtil.class);
|
||||
|
||||
/**
|
||||
* Non instantiable
|
||||
*/
|
||||
private AsyncUtil() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls Thread.sleep and if an InterruptedException occurs, logs a warning but otherwise continues
|
||||
*
|
||||
* @param theMillis The number of millis to sleep
|
||||
* @return Did we sleep the whole amount
|
||||
*/
|
||||
public static boolean sleep(long theMillis) {
|
||||
try {
|
||||
Thread.sleep(theMillis);
|
||||
return true;
|
||||
} catch (InterruptedException theE) {
|
||||
Thread.currentThread().interrupt();
|
||||
ourLog.warn("Sleep for {}ms was interrupted", theMillis);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static boolean awaitLatchAndThrowInternalErrorExceptionOnInterrupt(CountDownLatch theInitialCollectionLatch, long theTime, TimeUnit theTimeUnit) {
|
||||
try {
|
||||
return theInitialCollectionLatch.await(theTime, theTimeUnit);
|
||||
} catch (InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public static boolean awaitLatchAndIgnoreInterrupt(CountDownLatch theInitialCollectionLatch, long theTime, TimeUnit theTimeUnit) {
|
||||
try {
|
||||
return theInitialCollectionLatch.await(theTime, theTimeUnit);
|
||||
} catch (InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
ourLog.warn("Interrupted while waiting for latch");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -20,9 +20,8 @@ package ca.uhn.fhir.util;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.BaseRuntimeChildDefinition;
|
||||
import ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.*;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.ICompositeType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
|
@ -35,12 +34,10 @@ public class AttachmentUtil {
|
|||
* Fetches the base64Binary value of Attachment.data, creating it if it does not
|
||||
* already exist.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public static IPrimitiveType<byte[]> getOrCreateData(FhirContext theContext, ICompositeType theAttachment) {
|
||||
return getOrCreateChild(theContext, theAttachment, "data", "base64Binary");
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static IPrimitiveType<String> getOrCreateContentType(FhirContext theContext, ICompositeType theAttachment) {
|
||||
return getOrCreateChild(theContext, theAttachment, "contentType", "string");
|
||||
}
|
||||
|
@ -64,6 +61,16 @@ public class AttachmentUtil {
|
|||
});
|
||||
}
|
||||
|
||||
public static void setUrl(FhirContext theContext, ICompositeType theAttachment, String theUrl) {
|
||||
BaseRuntimeChildDefinition entryChild = getChild(theContext, theAttachment, "url");
|
||||
assert entryChild != null : "Version " + theContext + " has no child " + "url";
|
||||
String typeName = "uri";
|
||||
if (theContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.R4)) {
|
||||
typeName = "url";
|
||||
}
|
||||
entryChild.getMutator().setValue(theAttachment, newPrimitive(theContext, typeName, theUrl));
|
||||
}
|
||||
|
||||
public static void setContentType(FhirContext theContext, ICompositeType theAttachment, String theContentType) {
|
||||
BaseRuntimeChildDefinition entryChild = getChild(theContext, theAttachment, "contentType");
|
||||
entryChild.getMutator().setValue(theAttachment, newPrimitive(theContext, "code", theContentType));
|
||||
|
@ -88,7 +95,9 @@ public class AttachmentUtil {
|
|||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
static <T> IPrimitiveType<T> newPrimitive(FhirContext theContext, String theType, T theValue) {
|
||||
IPrimitiveType<T> primitive = (IPrimitiveType<T>) theContext.getElementDefinition(theType).newInstance();
|
||||
BaseRuntimeElementDefinition<?> elementDefinition = theContext.getElementDefinition(theType);
|
||||
Validate.notNull(elementDefinition, "Unknown type %s for %s", theType, theContext);
|
||||
IPrimitiveType<T> primitive = (IPrimitiveType<T>) elementDefinition.newInstance();
|
||||
primitive.setValue(theValue);
|
||||
return primitive;
|
||||
}
|
||||
|
@ -100,4 +109,8 @@ public class AttachmentUtil {
|
|||
BaseRuntimeElementCompositeDefinition<?> def = (BaseRuntimeElementCompositeDefinition<?>) theContext.getElementDefinition(theElement.getClass());
|
||||
return def.getChildByName(theName);
|
||||
}
|
||||
|
||||
public static ICompositeType newInstance(FhirContext theFhirCtx) {
|
||||
return (ICompositeType) theFhirCtx.getElementDefinition("Attachment").newInstance();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -72,6 +72,8 @@ public class ParametersUtil {
|
|||
}
|
||||
|
||||
private static void addClientParameter(FhirContext theContext, Object theValue, IBaseResource theTargetResource, BaseRuntimeChildDefinition paramChild, BaseRuntimeElementCompositeDefinition<?> paramChildElem, String theName) {
|
||||
Validate.notNull(theValue, "theValue must not be null");
|
||||
|
||||
if (theValue instanceof IBaseResource) {
|
||||
IBase parameter = createParameterRepetition(theContext, theTargetResource, paramChild, paramChildElem, theName);
|
||||
paramChildElem.getChildByName("resource").getMutator().addValue(parameter, (IBaseResource) theValue);
|
||||
|
@ -162,7 +164,6 @@ public class ParametersUtil {
|
|||
IPrimitiveType<Boolean> value = (IPrimitiveType<Boolean>) theCtx.getElementDefinition("boolean").newInstance();
|
||||
value.setValue(theValue);
|
||||
addParameterToParameters(theCtx, theParameters, theName, value);
|
||||
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package ca.uhn.fhir.util;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
|
||||
import java.text.DecimalFormat;
|
||||
|
@ -9,8 +10,6 @@ import java.util.Date;
|
|||
import java.util.LinkedList;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR - Core Library
|
||||
|
@ -48,12 +47,14 @@ public class StopWatch {
|
|||
private long myStarted = now();
|
||||
private TaskTiming myCurrentTask;
|
||||
private LinkedList<TaskTiming> myTasks;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public StopWatch() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
|
@ -63,7 +64,13 @@ public class StopWatch {
|
|||
myStarted = theStart.getTime();
|
||||
}
|
||||
|
||||
public StopWatch(long theL) {
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @param theStart The time that the stopwatch was started
|
||||
*/
|
||||
public StopWatch(long theStart) {
|
||||
myStarted = theStart;
|
||||
}
|
||||
|
||||
private void addNewlineIfContentExists(StringBuilder theB) {
|
||||
|
@ -120,6 +127,8 @@ public class StopWatch {
|
|||
b.append(": ");
|
||||
b.append(formatMillis(delta));
|
||||
}
|
||||
} else {
|
||||
b.append("No tasks");
|
||||
}
|
||||
|
||||
TaskTiming last = null;
|
||||
|
@ -257,12 +266,11 @@ public class StopWatch {
|
|||
*/
|
||||
public void startTask(String theTaskName) {
|
||||
endCurrentTask();
|
||||
if (isNotBlank(theTaskName)) {
|
||||
myCurrentTask = new TaskTiming()
|
||||
.setTaskName(theTaskName)
|
||||
.setStart(now());
|
||||
myTasks.add(myCurrentTask);
|
||||
}
|
||||
Validate.notBlank(theTaskName, "Task name must not be blank");
|
||||
myCurrentTask = new TaskTiming()
|
||||
.setTaskName(theTaskName)
|
||||
.setStart(now());
|
||||
myTasks.add(myCurrentTask);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -331,18 +339,18 @@ public class StopWatch {
|
|||
/**
|
||||
* Append a right-aligned and zero-padded numeric value to a `StringBuilder`.
|
||||
*/
|
||||
static private void append(StringBuilder tgt, String pfx, int dgt, long val) {
|
||||
tgt.append(pfx);
|
||||
if (dgt > 1) {
|
||||
int pad = (dgt - 1);
|
||||
for (long xa = val; xa > 9 && pad > 0; xa /= 10) {
|
||||
static void appendRightAlignedNumber(StringBuilder theStringBuilder, String thePrefix, int theNumberOfDigits, long theValueToAppend) {
|
||||
theStringBuilder.append(thePrefix);
|
||||
if (theNumberOfDigits > 1) {
|
||||
int pad = (theNumberOfDigits - 1);
|
||||
for (long xa = theValueToAppend; xa > 9 && pad > 0; xa /= 10) {
|
||||
pad--;
|
||||
}
|
||||
for (int xa = 0; xa < pad; xa++) {
|
||||
tgt.append('0');
|
||||
theStringBuilder.append('0');
|
||||
}
|
||||
}
|
||||
tgt.append(val);
|
||||
theStringBuilder.append(theValueToAppend);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -399,11 +407,11 @@ public class StopWatch {
|
|||
}
|
||||
} else {
|
||||
long millisAsLong = (long) theMillis;
|
||||
append(buf, "", 2, ((millisAsLong % DateUtils.MILLIS_PER_DAY) / DateUtils.MILLIS_PER_HOUR));
|
||||
append(buf, ":", 2, ((millisAsLong % DateUtils.MILLIS_PER_HOUR) / DateUtils.MILLIS_PER_MINUTE));
|
||||
append(buf, ":", 2, ((millisAsLong % DateUtils.MILLIS_PER_MINUTE) / DateUtils.MILLIS_PER_SECOND));
|
||||
appendRightAlignedNumber(buf, "", 2, ((millisAsLong % DateUtils.MILLIS_PER_DAY) / DateUtils.MILLIS_PER_HOUR));
|
||||
appendRightAlignedNumber(buf, ":", 2, ((millisAsLong % DateUtils.MILLIS_PER_HOUR) / DateUtils.MILLIS_PER_MINUTE));
|
||||
appendRightAlignedNumber(buf, ":", 2, ((millisAsLong % DateUtils.MILLIS_PER_MINUTE) / DateUtils.MILLIS_PER_SECOND));
|
||||
if (theMillis <= DateUtils.MILLIS_PER_MINUTE) {
|
||||
append(buf, ".", 3, (millisAsLong % DateUtils.MILLIS_PER_SECOND));
|
||||
appendRightAlignedNumber(buf, ".", 3, (millisAsLong % DateUtils.MILLIS_PER_SECOND));
|
||||
}
|
||||
}
|
||||
return buf.toString();
|
||||
|
|
|
@ -19,26 +19,32 @@ package ca.uhn.fhir.validation;
|
|||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
import java.io.*;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.*;
|
||||
|
||||
import javax.xml.XMLConstants;
|
||||
import javax.xml.transform.Source;
|
||||
import javax.xml.transform.stream.StreamSource;
|
||||
import javax.xml.validation.*;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.io.input.BOMInputStream;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.w3c.dom.ls.LSInput;
|
||||
import org.w3c.dom.ls.LSResourceResolver;
|
||||
import org.xml.sax.*;
|
||||
|
||||
import ca.uhn.fhir.context.ConfigurationException;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.rest.api.EncodingEnum;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.io.input.BOMInputStream;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.w3c.dom.ls.LSInput;
|
||||
import org.w3c.dom.ls.LSResourceResolver;
|
||||
import org.xml.sax.SAXException;
|
||||
import org.xml.sax.SAXNotRecognizedException;
|
||||
import org.xml.sax.SAXParseException;
|
||||
|
||||
import javax.xml.XMLConstants;
|
||||
import javax.xml.transform.Source;
|
||||
import javax.xml.transform.stream.StreamSource;
|
||||
import javax.xml.validation.Schema;
|
||||
import javax.xml.validation.SchemaFactory;
|
||||
import javax.xml.validation.Validator;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.StringReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.*;
|
||||
|
||||
public class SchemaBaseValidator implements IValidatorModule {
|
||||
public static final String RESOURCES_JAR_NOTE = "Note that as of HAPI FHIR 1.2, DSTU2 validation files are kept in a separate JAR (hapi-fhir-validation-resources-XXX.jar) which must be added to your classpath. See the HAPI FHIR download page for more information.";
|
||||
|
@ -47,7 +53,7 @@ public class SchemaBaseValidator implements IValidatorModule {
|
|||
private static final Set<String> SCHEMA_NAMES;
|
||||
|
||||
static {
|
||||
HashSet<String> sn = new HashSet<String>();
|
||||
HashSet<String> sn = new HashSet<>();
|
||||
sn.add("xml.xsd");
|
||||
sn.add("xhtml1-strict.xsd");
|
||||
sn.add("fhir-single.xsd");
|
||||
|
@ -59,15 +65,15 @@ public class SchemaBaseValidator implements IValidatorModule {
|
|||
SCHEMA_NAMES = Collections.unmodifiableSet(sn);
|
||||
}
|
||||
|
||||
private Map<String, Schema> myKeyToSchema = new HashMap<String, Schema>();
|
||||
private final Map<String, Schema> myKeyToSchema = new HashMap<>();
|
||||
private FhirContext myCtx;
|
||||
|
||||
public SchemaBaseValidator(FhirContext theContext) {
|
||||
myCtx = theContext;
|
||||
}
|
||||
|
||||
private void doValidate(IValidationContext<?> theContext, String schemaName) {
|
||||
Schema schema = loadSchema("dstu", schemaName);
|
||||
private void doValidate(IValidationContext<?> theContext) {
|
||||
Schema schema = loadSchema();
|
||||
|
||||
try {
|
||||
Validator validator = schema.newValidator();
|
||||
|
@ -81,14 +87,14 @@ public class SchemaBaseValidator implements IValidatorModule {
|
|||
}
|
||||
|
||||
try {
|
||||
/*
|
||||
* See https://github.com/jamesagnew/hapi-fhir/issues/339
|
||||
* https://www.owasp.org/index.php/XML_External_Entity_(XXE)_Processing
|
||||
*/
|
||||
/*
|
||||
* See https://github.com/jamesagnew/hapi-fhir/issues/339
|
||||
* https://www.owasp.org/index.php/XML_External_Entity_(XXE)_Processing
|
||||
*/
|
||||
validator.setProperty(XMLConstants.ACCESS_EXTERNAL_DTD, "");
|
||||
validator.setProperty(XMLConstants.ACCESS_EXTERNAL_SCHEMA, "");
|
||||
}catch (SAXNotRecognizedException ex){
|
||||
ourLog.warn("Jaxp 1.5 Support not found.",ex);
|
||||
} catch (SAXNotRecognizedException ex) {
|
||||
ourLog.warn("Jaxp 1.5 Support not found.", ex);
|
||||
}
|
||||
|
||||
validator.validate(new StreamSource(new StringReader(encodedResource)));
|
||||
|
@ -99,17 +105,14 @@ public class SchemaBaseValidator implements IValidatorModule {
|
|||
message.setMessage(e.getLocalizedMessage());
|
||||
message.setSeverity(ResultSeverityEnum.FATAL);
|
||||
theContext.addValidationMessage(message);
|
||||
} catch (SAXException e) {
|
||||
// Catch all
|
||||
throw new ConfigurationException("Could not load/parse schema file", e);
|
||||
} catch (IOException e) {
|
||||
} catch (SAXException | IOException e) {
|
||||
// Catch all
|
||||
throw new ConfigurationException("Could not load/parse schema file", e);
|
||||
}
|
||||
}
|
||||
|
||||
private Schema loadSchema(String theVersion, String theSchemaName) {
|
||||
String key = theVersion + "-" + theSchemaName;
|
||||
private Schema loadSchema() {
|
||||
String key = "fhir-single.xsd";
|
||||
|
||||
synchronized (myKeyToSchema) {
|
||||
Schema schema = myKeyToSchema.get(key);
|
||||
|
@ -117,81 +120,52 @@ public class SchemaBaseValidator implements IValidatorModule {
|
|||
return schema;
|
||||
}
|
||||
|
||||
Source baseSource = loadXml(null, theSchemaName);
|
||||
Source baseSource = loadXml("fhir-single.xsd");
|
||||
|
||||
SchemaFactory schemaFactory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
|
||||
schemaFactory.setResourceResolver(new MyResourceResolver());
|
||||
|
||||
try {
|
||||
try {
|
||||
/*
|
||||
* See https://github.com/jamesagnew/hapi-fhir/issues/339
|
||||
* https://www.owasp.org/index.php/XML_External_Entity_(XXE)_Processing
|
||||
*/
|
||||
/*
|
||||
* See https://github.com/jamesagnew/hapi-fhir/issues/339
|
||||
* https://www.owasp.org/index.php/XML_External_Entity_(XXE)_Processing
|
||||
*/
|
||||
schemaFactory.setProperty(XMLConstants.ACCESS_EXTERNAL_DTD, "");
|
||||
}catch (SAXNotRecognizedException snex){
|
||||
ourLog.warn("Jaxp 1.5 Support not found.",snex);
|
||||
} catch (SAXNotRecognizedException e) {
|
||||
ourLog.warn("Jaxp 1.5 Support not found.", e);
|
||||
}
|
||||
schema = schemaFactory.newSchema(new Source[] { baseSource });
|
||||
schema = schemaFactory.newSchema(new Source[]{baseSource});
|
||||
} catch (SAXException e) {
|
||||
throw new ConfigurationException("Could not load/parse schema file: " + theSchemaName, e);
|
||||
throw new ConfigurationException("Could not load/parse schema file: " + "fhir-single.xsd", e);
|
||||
}
|
||||
myKeyToSchema.put(key, schema);
|
||||
return schema;
|
||||
}
|
||||
}
|
||||
|
||||
private Source loadXml(String theSystemId, String theSchemaName) {
|
||||
Source loadXml(String theSchemaName) {
|
||||
String pathToBase = myCtx.getVersion().getPathToSchemaDefinitions() + '/' + theSchemaName;
|
||||
ourLog.debug("Going to load resource: {}", pathToBase);
|
||||
InputStream baseIs = FhirValidator.class.getResourceAsStream(pathToBase);
|
||||
if (baseIs == null) {
|
||||
throw new InternalErrorException("Schema not found. " + RESOURCES_JAR_NOTE);
|
||||
try (InputStream baseIs = FhirValidator.class.getResourceAsStream(pathToBase)) {
|
||||
if (baseIs == null) {
|
||||
throw new InternalErrorException("Schema not found. " + RESOURCES_JAR_NOTE);
|
||||
}
|
||||
try (BOMInputStream bomInputStream = new BOMInputStream(baseIs, false)) {
|
||||
try (InputStreamReader baseReader = new InputStreamReader(bomInputStream, StandardCharsets.UTF_8)) {
|
||||
// Buffer so that we can close the input stream
|
||||
String contents = IOUtils.toString(baseReader);
|
||||
return new StreamSource(new StringReader(contents), null);
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
baseIs = new BOMInputStream(baseIs, false);
|
||||
InputStreamReader baseReader = new InputStreamReader(baseIs, Charset.forName("UTF-8"));
|
||||
Source baseSource = new StreamSource(baseReader, theSystemId);
|
||||
//FIXME resource leak
|
||||
return baseSource;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void validateResource(IValidationContext<IBaseResource> theContext) {
|
||||
doValidate(theContext, "fhir-single.xsd");
|
||||
}
|
||||
|
||||
private static class MyErrorHandler implements org.xml.sax.ErrorHandler {
|
||||
|
||||
private IValidationContext<?> myContext;
|
||||
|
||||
public MyErrorHandler(IValidationContext<?> theContext) {
|
||||
myContext = theContext;
|
||||
}
|
||||
|
||||
private void addIssue(SAXParseException theException, ResultSeverityEnum theSeverity) {
|
||||
SingleValidationMessage message = new SingleValidationMessage();
|
||||
message.setLocationLine(theException.getLineNumber());
|
||||
message.setLocationCol(theException.getColumnNumber());
|
||||
message.setMessage(theException.getLocalizedMessage());
|
||||
message.setSeverity(theSeverity);
|
||||
myContext.addValidationMessage(message);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void error(SAXParseException theException) {
|
||||
addIssue(theException, ResultSeverityEnum.ERROR);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void fatalError(SAXParseException theException) {
|
||||
addIssue(theException, ResultSeverityEnum.FATAL);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void warning(SAXParseException theException) {
|
||||
addIssue(theException, ResultSeverityEnum.WARNING);
|
||||
}
|
||||
|
||||
doValidate(theContext);
|
||||
}
|
||||
|
||||
private final class MyResourceResolver implements LSResourceResolver {
|
||||
|
@ -225,4 +199,38 @@ public class SchemaBaseValidator implements IValidatorModule {
|
|||
}
|
||||
}
|
||||
|
||||
private static class MyErrorHandler implements org.xml.sax.ErrorHandler {
|
||||
|
||||
private IValidationContext<?> myContext;
|
||||
|
||||
MyErrorHandler(IValidationContext<?> theContext) {
|
||||
myContext = theContext;
|
||||
}
|
||||
|
||||
private void addIssue(SAXParseException theException, ResultSeverityEnum theSeverity) {
|
||||
SingleValidationMessage message = new SingleValidationMessage();
|
||||
message.setLocationLine(theException.getLineNumber());
|
||||
message.setLocationCol(theException.getColumnNumber());
|
||||
message.setMessage(theException.getLocalizedMessage());
|
||||
message.setSeverity(theSeverity);
|
||||
myContext.addValidationMessage(message);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void error(SAXParseException theException) {
|
||||
addIssue(theException, ResultSeverityEnum.ERROR);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void fatalError(SAXParseException theException) {
|
||||
addIssue(theException, ResultSeverityEnum.FATAL);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void warning(SAXParseException theException) {
|
||||
addIssue(theException, ResultSeverityEnum.WARNING);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -125,9 +125,9 @@ ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.unknownPath=Unable to find content
|
|||
ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.unknownType=Content in resource of type {0} at path {1} is not appropriate for binary storage: {2}
|
||||
|
||||
|
||||
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.cannotCreateDuplicateCodeSystemUrl=Can not create multiple CodeSystem resources with CodeSystem.url "{0}", already have one with resource ID: {1}
|
||||
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.cannotCreateDuplicateConceptMapUrl=Can not create multiple ConceptMap resources with ConceptMap.url "{0}", already have one with resource ID: {1}
|
||||
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.cannotCreateDuplicateValueSetUrl=Can not create multiple ValueSet resources with ValueSet.url "{0}", already have one with resource ID: {1}
|
||||
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.expansionTooLarge=Expansion of ValueSet produced too many codes (maximum {0}) - Operation aborted!
|
||||
ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl.cannotCreateDuplicateCodeSystemUrl=Can not create multiple CodeSystem resources with CodeSystem.url "{0}", already have one with resource ID: {1}
|
||||
ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl.cannotCreateDuplicateConceptMapUrl=Can not create multiple ConceptMap resources with ConceptMap.url "{0}", already have one with resource ID: {1}
|
||||
ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl.cannotCreateDuplicateValueSetUrl=Can not create multiple ValueSet resources with ValueSet.url "{0}", already have one with resource ID: {1}
|
||||
ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl.expansionTooLarge=Expansion of ValueSet produced too many codes (maximum {0}) - Operation aborted!
|
||||
|
||||
ca.uhn.fhir.jpa.util.jsonpatch.JsonPatchUtils.failedToApplyPatch=Failed to apply JSON patch to {0}: {1}
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
package ca.uhn.fhir.model.api;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class ResourceMetadataKeyEnumTest {
|
||||
|
||||
@Test
|
||||
public void testHashCode() {
|
||||
assertEquals(-60968467, ResourceMetadataKeyEnum.PUBLISHED.hashCode());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEquals() {
|
||||
assertNotEquals(ResourceMetadataKeyEnum.PROFILES, null);
|
||||
assertNotEquals(ResourceMetadataKeyEnum.PROFILES, "");
|
||||
assertNotEquals(ResourceMetadataKeyEnum.PROFILES, ResourceMetadataKeyEnum.PUBLISHED);
|
||||
assertEquals(ResourceMetadataKeyEnum.PROFILES, ResourceMetadataKeyEnum.PROFILES);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testExtensionResourceEquals() {
|
||||
assertNotEquals(new ResourceMetadataKeyEnum.ExtensionResourceMetadataKey("http://foo"), new ResourceMetadataKeyEnum.ExtensionResourceMetadataKey("http://bar"));
|
||||
assertNotEquals(new ResourceMetadataKeyEnum.ExtensionResourceMetadataKey("http://foo"), null);
|
||||
assertNotEquals(new ResourceMetadataKeyEnum.ExtensionResourceMetadataKey("http://foo"), "");
|
||||
assertEquals(new ResourceMetadataKeyEnum.ExtensionResourceMetadataKey("http://foo"), new ResourceMetadataKeyEnum.ExtensionResourceMetadataKey("http://foo"));
|
||||
|
||||
ResourceMetadataKeyEnum.ExtensionResourceMetadataKey foo = new ResourceMetadataKeyEnum.ExtensionResourceMetadataKey("http://foo");
|
||||
assertEquals(foo, foo);
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,46 @@
|
|||
package ca.uhn.fhir.model.api;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class TagTest {
|
||||
|
||||
@Test
|
||||
public void testEquals() {
|
||||
Tag tag1 = new Tag().setScheme("scheme").setTerm("term").setLabel("label");
|
||||
Tag tag2 = new Tag().setScheme("scheme").setTerm("term").setLabel("label");
|
||||
Tag tag3 = new Tag().setScheme("scheme2").setTerm("term").setLabel("label");
|
||||
Tag tag4 = new Tag().setScheme("scheme").setTerm("term2").setLabel("label");
|
||||
|
||||
assertEquals(tag1, tag1);
|
||||
assertEquals(tag1, tag2);
|
||||
assertNotEquals(tag1, tag3);
|
||||
assertNotEquals(tag1, tag4);
|
||||
assertNotEquals(tag1, null);
|
||||
assertNotEquals(tag1, "");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHashCode() {
|
||||
Tag tag1 = new Tag().setScheme("scheme").setTerm("term").setLabel("label");
|
||||
assertEquals(1920714536, tag1.hashCode());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testConstructors() throws URISyntaxException {
|
||||
assertTrue(new Tag().isEmpty());
|
||||
assertFalse(new Tag("http://foo").isEmpty());
|
||||
assertFalse(new Tag("http://foo", "http://bar").isEmpty());
|
||||
assertFalse(new Tag(new URI("http://foo"), new URI("http://bar"), "Label").isEmpty());
|
||||
assertTrue(new Tag((URI)null, null, "Label").isEmpty());
|
||||
|
||||
assertEquals("http://foo", new Tag(new URI("http://foo"), new URI("http://bar"), "Label").getSystem());
|
||||
assertEquals("http://bar", new Tag(new URI("http://foo"), new URI("http://bar"), "Label").getCode());
|
||||
assertEquals("Label", new Tag(new URI("http://foo"), new URI("http://bar"), "Label").getDisplay());
|
||||
}
|
||||
|
||||
}
|
|
@ -2,13 +2,34 @@ package ca.uhn.fhir.rest.param;
|
|||
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class TokenParamTest {
|
||||
@Test
|
||||
public void testEquals() {
|
||||
TokenParam tokenParam1 = new TokenParam("foo", "bar");
|
||||
TokenParam tokenParam2 = new TokenParam("foo", "bar");
|
||||
TokenParam tokenParam3 = new TokenParam("foo", "baz");
|
||||
assertEquals(tokenParam1, tokenParam1);
|
||||
assertEquals(tokenParam1, tokenParam2);
|
||||
assertNotEquals(tokenParam1, tokenParam3);
|
||||
assertNotEquals(tokenParam1, null);
|
||||
assertNotEquals(tokenParam1, "");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHashCode() {
|
||||
TokenParam tokenParam1 = new TokenParam("foo", "bar");
|
||||
assertEquals(4716638, tokenParam1.hashCode());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testIsEmpty() {
|
||||
assertFalse(new TokenParam("foo", "bar").isEmpty());
|
||||
assertTrue(new TokenParam("", "").isEmpty());
|
||||
assertTrue(new TokenParam().isEmpty());
|
||||
assertEquals("", new TokenParam().getValueNotNull());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,62 @@
|
|||
package ca.uhn.fhir.util;
|
||||
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
import static org.awaitility.Awaitility.await;
|
||||
|
||||
public class AsyncUtilTest {
|
||||
|
||||
@Test
|
||||
public void testSleep() {
|
||||
AsyncUtil.sleep(10);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSleepWithInterrupt() {
|
||||
AtomicBoolean outcomeHolder = new AtomicBoolean(true);
|
||||
Thread thread = new Thread(() -> {
|
||||
boolean outcome = AsyncUtil.sleep(10000);
|
||||
outcomeHolder.set(outcome);
|
||||
});
|
||||
thread.start();
|
||||
thread.interrupt();
|
||||
await().until(()-> outcomeHolder.get() == false);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAwaitLatchAndThrowInternalErrorException() {
|
||||
AtomicBoolean outcomeHolder = new AtomicBoolean(false);
|
||||
|
||||
CountDownLatch latch = new CountDownLatch(1);
|
||||
Thread thread = new Thread(() -> {
|
||||
try {
|
||||
AsyncUtil.awaitLatchAndThrowInternalErrorExceptionOnInterrupt(latch, 10, TimeUnit.SECONDS);
|
||||
} catch (InternalErrorException e) {
|
||||
outcomeHolder.set(true);
|
||||
}
|
||||
});
|
||||
thread.start();
|
||||
thread.interrupt();
|
||||
await().until(()-> outcomeHolder.get());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAwaitLatchIgnoreInterruption() {
|
||||
AtomicBoolean outcomeHolder = new AtomicBoolean(true);
|
||||
|
||||
CountDownLatch latch = new CountDownLatch(1);
|
||||
Thread thread = new Thread(() -> {
|
||||
boolean outcome = AsyncUtil.awaitLatchAndIgnoreInterrupt(latch, 10, TimeUnit.SECONDS);
|
||||
outcomeHolder.set(outcome);
|
||||
});
|
||||
thread.start();
|
||||
thread.interrupt();
|
||||
await().until(()-> outcomeHolder.get() == false);
|
||||
}
|
||||
|
||||
}
|
|
@ -128,6 +128,56 @@ public class StopWatchTest {
|
|||
assertEquals("TASK1: 500ms\nTASK2: 100ms", taskDurations);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFormatTaskDurationsDelayBetweenTasks() {
|
||||
StopWatch sw = new StopWatch();
|
||||
|
||||
StopWatch.setNowForUnitTestForUnitTest(1000L);
|
||||
sw.startTask("TASK1");
|
||||
|
||||
StopWatch.setNowForUnitTestForUnitTest(1500L);
|
||||
sw.endCurrentTask();
|
||||
|
||||
StopWatch.setNowForUnitTestForUnitTest(2000L);
|
||||
sw.startTask("TASK2");
|
||||
|
||||
StopWatch.setNowForUnitTestForUnitTest(2100L);
|
||||
sw.endCurrentTask();
|
||||
|
||||
StopWatch.setNowForUnitTestForUnitTest(2200L);
|
||||
String taskDurations = sw.formatTaskDurations();
|
||||
ourLog.info(taskDurations);
|
||||
assertEquals("TASK1: 500ms\n" +
|
||||
"Between: 500ms\n" +
|
||||
"TASK2: 100ms\n" +
|
||||
"After last task: 100ms", taskDurations);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFormatTaskDurationsLongDelayBeforeStart() {
|
||||
StopWatch sw = new StopWatch(0);
|
||||
|
||||
StopWatch.setNowForUnitTestForUnitTest(1000L);
|
||||
sw.startTask("TASK1");
|
||||
|
||||
StopWatch.setNowForUnitTestForUnitTest(1500L);
|
||||
sw.startTask("TASK2");
|
||||
|
||||
StopWatch.setNowForUnitTestForUnitTest(1600L);
|
||||
String taskDurations = sw.formatTaskDurations();
|
||||
ourLog.info(taskDurations);
|
||||
assertEquals("Before first task: 1000ms\nTASK1: 500ms\nTASK2: 100ms", taskDurations);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFormatTaskDurationsNoTasks() {
|
||||
StopWatch sw = new StopWatch(0);
|
||||
|
||||
String taskDurations = sw.formatTaskDurations();
|
||||
ourLog.info(taskDurations);
|
||||
assertEquals("No tasks", taskDurations);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFormatThroughput60Ops4Min() {
|
||||
StopWatch sw = new StopWatch(DateUtils.addMinutes(new Date(), -4));
|
||||
|
@ -210,4 +260,34 @@ public class StopWatchTest {
|
|||
assertThat(string, matchesPattern("^[0-9]{3,4}ms$"));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testAppendRightAlignedNumber() {
|
||||
StringBuilder b= new StringBuilder();
|
||||
|
||||
b.setLength(0);
|
||||
StopWatch.appendRightAlignedNumber(b, "PFX", 0, 100);
|
||||
assertEquals("PFX100", b.toString());
|
||||
|
||||
b.setLength(0);
|
||||
StopWatch.appendRightAlignedNumber(b, "PFX", 1, 100);
|
||||
assertEquals("PFX100", b.toString());
|
||||
|
||||
b.setLength(0);
|
||||
StopWatch.appendRightAlignedNumber(b, "PFX", 2, 100);
|
||||
assertEquals("PFX100", b.toString());
|
||||
|
||||
b.setLength(0);
|
||||
StopWatch.appendRightAlignedNumber(b, "PFX", 3, 100);
|
||||
assertEquals("PFX100", b.toString());
|
||||
|
||||
b.setLength(0);
|
||||
StopWatch.appendRightAlignedNumber(b, "PFX", 4, 100);
|
||||
assertEquals("PFX0100", b.toString());
|
||||
|
||||
b.setLength(0);
|
||||
StopWatch.appendRightAlignedNumber(b, "PFX", 10, 100);
|
||||
assertEquals("PFX0000000100", b.toString());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ package ca.uhn.fhir.cli;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
|
@ -44,7 +44,7 @@ public class ToggleSearchParametersCommand extends BaseCommand {
|
|||
Options options = new Options();
|
||||
addFhirVersionOption(options);
|
||||
addBaseUrlOption(options);
|
||||
addRequiredOption(options, "u", "url", true, "The code system URL associated with this upload (e.g. " + IHapiTerminologyLoaderSvc.SCT_URI + ")");
|
||||
addRequiredOption(options, "u", "url", true, "The code system URL associated with this upload (e.g. " + ITermLoaderSvc.SCT_URI + ")");
|
||||
addBasicAuthOption(options);
|
||||
return options;
|
||||
}
|
||||
|
|
|
@ -20,35 +20,32 @@ package ca.uhn.fhir.cli;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.TerminologyLoaderSvcImpl;
|
||||
import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||
import ca.uhn.fhir.rest.client.api.IGenericClient;
|
||||
import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
|
||||
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
|
||||
import ca.uhn.fhir.util.AttachmentUtil;
|
||||
import ca.uhn.fhir.util.ParametersUtil;
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.instance.model.api.ICompositeType;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipOutputStream;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
|
||||
public class UploadTerminologyCommand extends BaseCommand {
|
||||
public static final String UPLOAD_TERMINOLOGY = "upload-terminology";
|
||||
static final String UPLOAD_TERMINOLOGY = "upload-terminology";
|
||||
// TODO: Don't use qualified names for loggers in HAPI CLI.
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(UploadTerminologyCommand.class);
|
||||
|
||||
|
@ -68,9 +65,8 @@ public class UploadTerminologyCommand extends BaseCommand {
|
|||
|
||||
addFhirVersionOption(options);
|
||||
addBaseUrlOption(options);
|
||||
addRequiredOption(options, "u", "url", true, "The code system URL associated with this upload (e.g. " + IHapiTerminologyLoaderSvc.SCT_URI + ")");
|
||||
addRequiredOption(options, "u", "url", true, "The code system URL associated with this upload (e.g. " + ITermLoaderSvc.SCT_URI + ")");
|
||||
addOptionalOption(options, "d", "data", true, "Local file to use to upload (can be a raw file or a ZIP containing the raw file)");
|
||||
addOptionalOption(options, null, "custom", false, "Indicates that this upload uses the HAPI FHIR custom external terminology format");
|
||||
addOptionalOption(options, "m", "mode", true, "The upload mode: SNAPSHOT (default), ADD, REMOVE");
|
||||
addBasicAuthOption(options);
|
||||
addVerboseLoggingOption(options);
|
||||
|
@ -109,104 +105,86 @@ public class UploadTerminologyCommand extends BaseCommand {
|
|||
|
||||
switch (mode) {
|
||||
case SNAPSHOT:
|
||||
uploadSnapshot(inputParameters, termUrl, datafile, theCommandLine, client);
|
||||
invokeOperation(theCommandLine, termUrl, datafile, client, inputParameters, JpaConstants.OPERATION_UPLOAD_EXTERNAL_CODE_SYSTEM);
|
||||
break;
|
||||
case ADD:
|
||||
uploadDelta(theCommandLine, termUrl, datafile, client, inputParameters, JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD, false);
|
||||
invokeOperation(theCommandLine, termUrl, datafile, client, inputParameters, JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD);
|
||||
break;
|
||||
case REMOVE:
|
||||
uploadDelta(theCommandLine, termUrl, datafile, client, inputParameters, JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_REMOVE, true);
|
||||
invokeOperation(theCommandLine, termUrl, datafile, client, inputParameters, JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_REMOVE);
|
||||
break;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void uploadDelta(CommandLine theCommandLine, String theTermUrl, String[] theDatafile, IGenericClient theClient, IBaseParameters theInputParameters, String theOperationName, boolean theFlatten) {
|
||||
ParametersUtil.addParameterToParametersUri(myFhirCtx, theInputParameters, "url", theTermUrl);
|
||||
private void invokeOperation(CommandLine theCommandLine, String theTermUrl, String[] theDatafile, IGenericClient theClient, IBaseParameters theInputParameters, String theOperationName) throws ParseException {
|
||||
ParametersUtil.addParameterToParametersUri(myFhirCtx, theInputParameters, TerminologyUploaderProvider.PARAM_SYSTEM, theTermUrl);
|
||||
|
||||
List<IHapiTerminologyLoaderSvc.FileDescriptor> fileDescriptors = new ArrayList<>();
|
||||
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
|
||||
ZipOutputStream zipOutputStream = new ZipOutputStream(byteArrayOutputStream, Charsets.UTF_8);
|
||||
boolean haveCompressedContents = false;
|
||||
try {
|
||||
for (String nextDataFile : theDatafile) {
|
||||
|
||||
try (FileInputStream fileInputStream = new FileInputStream(nextDataFile)) {
|
||||
if (!nextDataFile.endsWith(".zip")) {
|
||||
|
||||
ourLog.info("Compressing and adding file: {}", nextDataFile);
|
||||
ZipEntry nextEntry = new ZipEntry(stripPath(nextDataFile));
|
||||
zipOutputStream.putNextEntry(nextEntry);
|
||||
|
||||
IOUtils.copy(fileInputStream, zipOutputStream);
|
||||
haveCompressedContents = true;
|
||||
|
||||
zipOutputStream.flush();
|
||||
ourLog.info("Finished compressing {} into {}", nextEntry.getSize(), nextEntry.getCompressedSize());
|
||||
|
||||
} else {
|
||||
|
||||
ourLog.info("Adding file: {}", nextDataFile);
|
||||
ICompositeType attachment = AttachmentUtil.newInstance(myFhirCtx);
|
||||
AttachmentUtil.setUrl(myFhirCtx, attachment, "file:" + nextDataFile);
|
||||
AttachmentUtil.setData(myFhirCtx, attachment, IOUtils.toByteArray(fileInputStream));
|
||||
ParametersUtil.addParameterToParameters(myFhirCtx, theInputParameters, TerminologyUploaderProvider.PARAM_FILE, attachment);
|
||||
|
||||
for (String next : theDatafile) {
|
||||
try (FileInputStream inputStream = new FileInputStream(next)) {
|
||||
byte[] bytes = IOUtils.toByteArray(inputStream);
|
||||
fileDescriptors.add(new IHapiTerminologyLoaderSvc.FileDescriptor() {
|
||||
@Override
|
||||
public String getFilename() {
|
||||
return next;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public InputStream getInputStream() {
|
||||
return new ByteArrayInputStream(bytes);
|
||||
}
|
||||
});
|
||||
} catch (IOException e) {
|
||||
throw new CommandFailureException("Failed to read from file \"" + next + "\": " + e.getMessage());
|
||||
}
|
||||
zipOutputStream.flush();
|
||||
zipOutputStream.close();
|
||||
} catch (IOException e) {
|
||||
throw new ParseException(e.toString());
|
||||
}
|
||||
|
||||
TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion();
|
||||
TerminologyLoaderSvcImpl.LoadedFileDescriptors descriptors = new TerminologyLoaderSvcImpl.LoadedFileDescriptors(fileDescriptors);
|
||||
TerminologyLoaderSvcImpl.processCustomTerminologyFiles(descriptors, codeSystemVersion);
|
||||
|
||||
CodeSystem codeSystem = new CodeSystem();
|
||||
codeSystem.setUrl(theTermUrl);
|
||||
addCodesToCodeSystem(codeSystemVersion.getConcepts(), codeSystem.getConcept(), theFlatten);
|
||||
|
||||
ParametersUtil.addParameterToParameters(myFhirCtx, theInputParameters, "value", codeSystem);
|
||||
|
||||
if (theCommandLine.hasOption("custom")) {
|
||||
ParametersUtil.addParameterToParametersCode(myFhirCtx, theInputParameters, "contentMode", "custom");
|
||||
if (haveCompressedContents) {
|
||||
ICompositeType attachment = AttachmentUtil.newInstance(myFhirCtx);
|
||||
AttachmentUtil.setUrl(myFhirCtx, attachment, "file:/files.zip");
|
||||
AttachmentUtil.setData(myFhirCtx, attachment, byteArrayOutputStream.toByteArray());
|
||||
ParametersUtil.addParameterToParameters(myFhirCtx, theInputParameters, TerminologyUploaderProvider.PARAM_FILE, attachment);
|
||||
}
|
||||
|
||||
ourLog.info("Beginning upload - This may take a while...");
|
||||
|
||||
IBaseParameters response = theClient
|
||||
.operation()
|
||||
.onType(myFhirCtx.getResourceDefinition("CodeSystem").getImplementingClass())
|
||||
.named(theOperationName)
|
||||
.withParameters(theInputParameters)
|
||||
.execute();
|
||||
if (ourLog.isDebugEnabled() || "true".equals(System.getProperty("test"))) {
|
||||
ourLog.info("Submitting parameters: {}", myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(theInputParameters));
|
||||
}
|
||||
|
||||
ourLog.info("Upload complete!");
|
||||
ourLog.info("Response:\n{}", myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(response));
|
||||
}
|
||||
|
||||
private void addCodesToCodeSystem(Collection<TermConcept> theSourceConcepts, List<CodeSystem.ConceptDefinitionComponent> theTargetConcept, boolean theFlatten) {
|
||||
for (TermConcept nextSourceConcept : theSourceConcepts) {
|
||||
|
||||
CodeSystem.ConceptDefinitionComponent nextTarget = new CodeSystem.ConceptDefinitionComponent();
|
||||
nextTarget.setCode(nextSourceConcept.getCode());
|
||||
nextTarget.setDisplay(nextSourceConcept.getDisplay());
|
||||
theTargetConcept.add(nextTarget);
|
||||
|
||||
List<TermConcept> children = nextSourceConcept.getChildren().stream().map(t -> t.getChild()).collect(Collectors.toList());
|
||||
if (theFlatten) {
|
||||
addCodesToCodeSystem(children, theTargetConcept, theFlatten);
|
||||
} else {
|
||||
addCodesToCodeSystem(children, nextTarget.getConcept(), theFlatten);
|
||||
IBaseParameters response;
|
||||
try {
|
||||
response = theClient
|
||||
.operation()
|
||||
.onType(myFhirCtx.getResourceDefinition("CodeSystem").getImplementingClass())
|
||||
.named(theOperationName)
|
||||
.withParameters(theInputParameters)
|
||||
.execute();
|
||||
} catch (BaseServerResponseException e) {
|
||||
if (e.getOperationOutcome() != null) {
|
||||
ourLog.error("Received the following response:\n{}", myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(e.getOperationOutcome()));
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
private void uploadSnapshot(IBaseParameters theInputparameters, String theTermUrl, String[] theDatafile, CommandLine theCommandLine, IGenericClient theClient) {
|
||||
ParametersUtil.addParameterToParametersUri(myFhirCtx, theInputparameters, "url", theTermUrl);
|
||||
for (String next : theDatafile) {
|
||||
ParametersUtil.addParameterToParametersString(myFhirCtx, theInputparameters, "localfile", next);
|
||||
}
|
||||
if (theCommandLine.hasOption("custom")) {
|
||||
ParametersUtil.addParameterToParametersCode(myFhirCtx, theInputparameters, "contentMode", "custom");
|
||||
throw e;
|
||||
}
|
||||
|
||||
ourLog.info("Beginning upload - This may take a while...");
|
||||
|
||||
IBaseParameters response = theClient
|
||||
.operation()
|
||||
.onType(myFhirCtx.getResourceDefinition("CodeSystem").getImplementingClass())
|
||||
.named(JpaConstants.OPERATION_UPLOAD_EXTERNAL_CODE_SYSTEM)
|
||||
.withParameters(theInputparameters)
|
||||
.execute();
|
||||
|
||||
ourLog.info("Upload complete!");
|
||||
ourLog.info("Response:\n{}", myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(response));
|
||||
|
@ -216,4 +194,12 @@ public class UploadTerminologyCommand extends BaseCommand {
|
|||
SNAPSHOT, ADD, REMOVE
|
||||
}
|
||||
|
||||
public static String stripPath(String thePath) {
|
||||
String retVal = thePath;
|
||||
if (retVal.contains("/")) {
|
||||
retVal = retVal.substring(retVal.lastIndexOf("/"));
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -34,7 +34,7 @@
|
|||
<!--
|
||||
It's useful to have this log when uploading big terminologies
|
||||
-->
|
||||
<logger name="ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl" additivity="false" level="info">
|
||||
<logger name="ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl" additivity="false" level="info">
|
||||
<appender-ref ref="STDOUT" />
|
||||
</logger>
|
||||
|
||||
|
|
|
@ -1,18 +1,20 @@
|
|||
package ca.uhn.fhir.cli;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.BaseTest;
|
||||
import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
|
||||
import ca.uhn.fhir.jpa.term.UploadStatistics;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.custom.CustomTerminologySet;
|
||||
import ca.uhn.fhir.rest.server.RestfulServer;
|
||||
import ca.uhn.fhir.test.utilities.JettyUtil;
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.eclipse.jetty.server.Server;
|
||||
import org.eclipse.jetty.servlet.ServletHandler;
|
||||
import org.eclipse.jetty.servlet.ServletHolder;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
@ -25,15 +27,19 @@ import org.mockito.junit.MockitoJUnitRunner;
|
|||
|
||||
import java.io.*;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipOutputStream;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.matchesPattern;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class UploadTerminologyCommandTest {
|
||||
public class UploadTerminologyCommandTest extends BaseTest {
|
||||
|
||||
static {
|
||||
System.setProperty("test", "true");
|
||||
|
@ -42,26 +48,24 @@ public class UploadTerminologyCommandTest {
|
|||
private Server myServer;
|
||||
private FhirContext myCtx = FhirContext.forR4();
|
||||
@Mock
|
||||
private IHapiTerminologyLoaderSvc myTerminologyLoaderSvc;
|
||||
@Mock
|
||||
private IHapiTerminologySvc myTerminologySvc;
|
||||
private ITermLoaderSvc myTermLoaderSvc;
|
||||
@Captor
|
||||
private ArgumentCaptor<List<IHapiTerminologyLoaderSvc.FileDescriptor>> myDescriptorList;
|
||||
@Captor
|
||||
private ArgumentCaptor<CodeSystem> myCodeSystemCaptor;
|
||||
private ArgumentCaptor<List<ITermLoaderSvc.FileDescriptor>> myDescriptorListCaptor;
|
||||
|
||||
private int myPort;
|
||||
private String myConceptsFileName = "target/concepts.csv";
|
||||
private String myHierarchyFileName = "target/hierarchy.csv";
|
||||
private File myConceptsFile = new File(myConceptsFileName);
|
||||
private File myHierarchyFile = new File(myHierarchyFileName);
|
||||
private File myArchiveFile;
|
||||
private String myArchiveFileName;
|
||||
|
||||
@Test
|
||||
public void testTerminologyUpload_AddDelta() throws IOException {
|
||||
public void testAddDelta() throws IOException {
|
||||
|
||||
writeConceptAndHierarchyFiles();
|
||||
|
||||
when(myTerminologySvc.applyDeltaCodesystemsAdd(eq("http://foo"), any(), any())).thenReturn(new AtomicInteger(100));
|
||||
when(myTermLoaderSvc.loadDeltaAdd(eq("http://foo"), anyList(), any())).thenReturn(new UploadStatistics(100, new IdType("CodeSystem/101")));
|
||||
|
||||
App.main(new String[]{
|
||||
UploadTerminologyCommand.UPLOAD_TERMINOLOGY,
|
||||
|
@ -73,25 +77,69 @@ public class UploadTerminologyCommandTest {
|
|||
"-d", myHierarchyFileName
|
||||
});
|
||||
|
||||
verify(myTerminologySvc, times(1)).applyDeltaCodesystemsAdd(any(), isNull(), myCodeSystemCaptor.capture());
|
||||
verify(myTermLoaderSvc, times(1)).loadDeltaAdd(eq("http://foo"), myDescriptorListCaptor.capture(), any());
|
||||
|
||||
CodeSystem codeSystem = myCodeSystemCaptor.getValue();
|
||||
assertEquals(1, codeSystem.getConcept().size());
|
||||
assertEquals("http://foo", codeSystem.getUrl());
|
||||
assertEquals("ANIMALS", codeSystem.getConcept().get(0).getCode());
|
||||
assertEquals("Animals", codeSystem.getConcept().get(0).getDisplay());
|
||||
assertEquals(2, codeSystem.getConcept().get(0).getConcept().size());
|
||||
assertEquals("CATS", codeSystem.getConcept().get(0).getConcept().get(0).getCode());
|
||||
assertEquals("Cats", codeSystem.getConcept().get(0).getConcept().get(0).getDisplay());
|
||||
assertEquals("DOGS", codeSystem.getConcept().get(0).getConcept().get(1).getCode());
|
||||
assertEquals("Dogs", codeSystem.getConcept().get(0).getConcept().get(1).getDisplay());
|
||||
List<ITermLoaderSvc.FileDescriptor> listOfDescriptors = myDescriptorListCaptor.getValue();
|
||||
assertEquals(1, listOfDescriptors.size());
|
||||
assertEquals("file:/files.zip", listOfDescriptors.get(0).getFilename());
|
||||
assertThat(IOUtils.toByteArray(listOfDescriptors.get(0).getInputStream()).length, greaterThan(100));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTerminologyUpload_RemoveDelta() throws IOException {
|
||||
public void testAddDeltaUsingCompressedFile() throws IOException {
|
||||
|
||||
writeConceptAndHierarchyFiles();
|
||||
writeArchiveFile(myConceptsFile, myHierarchyFile);
|
||||
|
||||
when(myTermLoaderSvc.loadDeltaAdd(eq("http://foo"), anyList(), any())).thenReturn(new UploadStatistics(100, new IdType("CodeSystem/101")));
|
||||
|
||||
App.main(new String[]{
|
||||
UploadTerminologyCommand.UPLOAD_TERMINOLOGY,
|
||||
"-v", "r4",
|
||||
"-m", "ADD",
|
||||
"-t", "http://localhost:" + myPort,
|
||||
"-u", "http://foo",
|
||||
"-d", myArchiveFileName
|
||||
});
|
||||
|
||||
verify(myTermLoaderSvc, times(1)).loadDeltaAdd(eq("http://foo"), myDescriptorListCaptor.capture(), any());
|
||||
|
||||
List<ITermLoaderSvc.FileDescriptor> listOfDescriptors = myDescriptorListCaptor.getValue();
|
||||
assertEquals(1, listOfDescriptors.size());
|
||||
assertThat(listOfDescriptors.get(0).getFilename(), matchesPattern("^file:.*temp.*\\.zip$"));
|
||||
assertThat(IOUtils.toByteArray(listOfDescriptors.get(0).getInputStream()).length, greaterThan(100));
|
||||
}
|
||||
|
||||
private void writeArchiveFile(File... theFiles) throws IOException {
|
||||
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
|
||||
ZipOutputStream zipOutputStream = new ZipOutputStream(byteArrayOutputStream, Charsets.UTF_8);
|
||||
|
||||
for (File next : theFiles) {
|
||||
ZipEntry nextEntry = new ZipEntry(UploadTerminologyCommand.stripPath(next.getAbsolutePath()));
|
||||
zipOutputStream.putNextEntry(nextEntry);
|
||||
|
||||
try (FileInputStream fileInputStream = new FileInputStream(next)) {
|
||||
IOUtils.copy(fileInputStream, zipOutputStream);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
zipOutputStream.flush();
|
||||
zipOutputStream.close();
|
||||
|
||||
myArchiveFile = File.createTempFile("temp", ".zip");
|
||||
myArchiveFile.deleteOnExit();
|
||||
myArchiveFileName = myArchiveFile.getAbsolutePath();
|
||||
try (FileOutputStream fos = new FileOutputStream(myArchiveFile, false)) {
|
||||
fos.write(byteArrayOutputStream.toByteArray());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRemoveDelta() throws IOException {
|
||||
writeConceptAndHierarchyFiles();
|
||||
|
||||
when(myTerminologySvc.applyDeltaCodesystemsRemove(eq("http://foo"), any())).thenReturn(new AtomicInteger(100));
|
||||
when(myTermLoaderSvc.loadDeltaRemove(eq("http://foo"), anyList(), any())).thenReturn(new UploadStatistics(100, new IdType("CodeSystem/101")));
|
||||
|
||||
App.main(new String[]{
|
||||
UploadTerminologyCommand.UPLOAD_TERMINOLOGY,
|
||||
|
@ -103,46 +151,38 @@ public class UploadTerminologyCommandTest {
|
|||
"-d", myHierarchyFileName
|
||||
});
|
||||
|
||||
verify(myTerminologySvc, times(1)).applyDeltaCodesystemsRemove(any(), myCodeSystemCaptor.capture());
|
||||
verify(myTermLoaderSvc, times(1)).loadDeltaRemove(eq("http://foo"), myDescriptorListCaptor.capture(), any());
|
||||
|
||||
List<ITermLoaderSvc.FileDescriptor> listOfDescriptors = myDescriptorListCaptor.getValue();
|
||||
assertEquals(1, listOfDescriptors.size());
|
||||
assertEquals("file:/files.zip", listOfDescriptors.get(0).getFilename());
|
||||
assertThat(IOUtils.toByteArray(listOfDescriptors.get(0).getInputStream()).length, greaterThan(100));
|
||||
|
||||
CodeSystem codeSystem = myCodeSystemCaptor.getValue();
|
||||
assertEquals(3, codeSystem.getConcept().size());
|
||||
assertEquals("http://foo", codeSystem.getUrl());
|
||||
assertEquals("ANIMALS", codeSystem.getConcept().get(0).getCode());
|
||||
assertEquals("Animals", codeSystem.getConcept().get(0).getDisplay());
|
||||
assertEquals("CATS", codeSystem.getConcept().get(1).getCode());
|
||||
assertEquals("Cats", codeSystem.getConcept().get(1).getDisplay());
|
||||
assertEquals("DOGS", codeSystem.getConcept().get(2).getCode());
|
||||
assertEquals("Dogs", codeSystem.getConcept().get(2).getDisplay());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTerminologyUpload_Snapshot() throws IOException {
|
||||
public void testSnapshot() throws IOException {
|
||||
|
||||
writeConceptAndHierarchyFiles();
|
||||
|
||||
when(myTerminologyLoaderSvc.loadCustom(eq("http://foo"), any(), any())).thenReturn(new IHapiTerminologyLoaderSvc.UploadStatistics(100, new IdType("CodeSystem/123")));
|
||||
when(myTermLoaderSvc.loadCustom(any(), anyList(), any())).thenReturn(new UploadStatistics(100, new IdType("CodeSystem/101")));
|
||||
|
||||
App.main(new String[]{
|
||||
UploadTerminologyCommand.UPLOAD_TERMINOLOGY,
|
||||
"-v", "r4",
|
||||
"-m", "SNAPSHOT",
|
||||
"--custom",
|
||||
"-t", "http://localhost:" + myPort,
|
||||
"-u", "http://foo",
|
||||
"-d", myConceptsFileName,
|
||||
"-d", myHierarchyFileName
|
||||
});
|
||||
|
||||
verify(myTerminologyLoaderSvc, times(1)).loadCustom(any(), myDescriptorList.capture(), any());
|
||||
verify(myTermLoaderSvc, times(1)).loadCustom(any(), myDescriptorListCaptor.capture(), any());
|
||||
|
||||
List<IHapiTerminologyLoaderSvc.FileDescriptor> listOfDescriptors = myDescriptorList.getValue();
|
||||
assertEquals(2, listOfDescriptors.size());
|
||||
|
||||
assertThat(listOfDescriptors.get(0).getFilename(), Matchers.endsWith("concepts.csv"));
|
||||
assertInputStreamEqualsFile(myConceptsFile, listOfDescriptors.get(0).getInputStream());
|
||||
assertThat(listOfDescriptors.get(1).getFilename(), Matchers.endsWith("hierarchy.csv"));
|
||||
assertInputStreamEqualsFile(myHierarchyFile, listOfDescriptors.get(1).getInputStream());
|
||||
List<ITermLoaderSvc.FileDescriptor> listOfDescriptors = myDescriptorListCaptor.getValue();
|
||||
assertEquals(1, listOfDescriptors.size());
|
||||
assertEquals("file:/files.zip", listOfDescriptors.get(0).getFilename());
|
||||
assertThat(IOUtils.toByteArray(listOfDescriptors.get(0).getInputStream()).length, greaterThan(100));
|
||||
}
|
||||
|
||||
|
||||
|
@ -161,27 +201,41 @@ public class UploadTerminologyCommandTest {
|
|||
}
|
||||
}
|
||||
|
||||
private void assertInputStreamEqualsFile(File theExpectedFile, InputStream theActualInputStream) throws IOException {
|
||||
try (FileInputStream fis = new FileInputStream(theExpectedFile)) {
|
||||
byte[] expectedBytes = IOUtils.toByteArray(fis);
|
||||
byte[] actualBytes = IOUtils.toByteArray(theActualInputStream);
|
||||
assertArrayEquals(expectedBytes, actualBytes);
|
||||
@Test
|
||||
public void testAddInvalidFileName() throws IOException {
|
||||
|
||||
writeConceptAndHierarchyFiles();
|
||||
|
||||
try {
|
||||
App.main(new String[]{
|
||||
UploadTerminologyCommand.UPLOAD_TERMINOLOGY,
|
||||
"-v", "r4",
|
||||
"-m", "ADD",
|
||||
"-t", "http://localhost:" + myPort,
|
||||
"-u", "http://foo",
|
||||
"-d", myConceptsFileName + "/foo.csv",
|
||||
"-d", myHierarchyFileName
|
||||
});
|
||||
} catch (Error e) {
|
||||
assertThat(e.toString(), Matchers.containsString("FileNotFoundException: target/concepts.csv/foo.csv"));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@After
|
||||
public void after() throws Exception {
|
||||
JettyUtil.closeServer(myServer);
|
||||
|
||||
FileUtils.deleteQuietly(myConceptsFile);
|
||||
FileUtils.deleteQuietly(myHierarchyFile);
|
||||
FileUtils.deleteQuietly(myArchiveFile);
|
||||
}
|
||||
|
||||
@Before
|
||||
public void start() throws Exception {
|
||||
public void before() throws Exception {
|
||||
myServer = new Server(0);
|
||||
|
||||
TerminologyUploaderProvider provider = new TerminologyUploaderProvider(myCtx, myTerminologyLoaderSvc, myTerminologySvc);
|
||||
TerminologyUploaderProvider provider = new TerminologyUploaderProvider(myCtx, myTermLoaderSvc);
|
||||
|
||||
ServletHandler proxyHandler = new ServletHandler();
|
||||
RestfulServer servlet = new RestfulServer(myCtx);
|
||||
|
|
|
@ -51,23 +51,19 @@ public class OkHttpRestfulResponse extends BaseHttpResponse implements IHttpResp
|
|||
this.myResponse = theResponse;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void bufferEntitity() throws IOException {
|
||||
bufferEntity();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void bufferEntity() throws IOException {
|
||||
if (myEntityBuffered) {
|
||||
return;
|
||||
}
|
||||
InputStream responseEntity = readEntity();
|
||||
if (responseEntity != null) {
|
||||
myEntityBuffered = true;
|
||||
try {
|
||||
myEntityBytes = IOUtils.toByteArray(responseEntity);
|
||||
} catch (IllegalStateException e) {
|
||||
throw new InternalErrorException(e);
|
||||
try (InputStream responseEntity = readEntity()) {
|
||||
if (responseEntity != null) {
|
||||
myEntityBuffered = true;
|
||||
try {
|
||||
myEntityBytes = IOUtils.toByteArray(responseEntity);
|
||||
} catch (IllegalStateException e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ package ca.uhn.fhir.rest.client.apache;
|
|||
*/
|
||||
import java.io.*;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.*;
|
||||
|
||||
import ca.uhn.fhir.rest.client.impl.BaseHttpResponse;
|
||||
|
@ -53,25 +54,19 @@ public class ApacheHttpResponse extends BaseHttpResponse implements IHttpRespons
|
|||
this.myResponse = theResponse;
|
||||
}
|
||||
|
||||
@Deprecated // override deprecated method
|
||||
@Override
|
||||
public void bufferEntitity() throws IOException {
|
||||
bufferEntity();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void bufferEntity() throws IOException {
|
||||
if (myEntityBuffered) {
|
||||
return;
|
||||
}
|
||||
InputStream respEntity = readEntity();
|
||||
if (respEntity != null) {
|
||||
this.myEntityBuffered = true;
|
||||
try {
|
||||
this.myEntityBytes = IOUtils.toByteArray(respEntity);
|
||||
} catch (IllegalStateException e) {
|
||||
// FIXME resouce leak
|
||||
throw new InternalErrorException(e);
|
||||
try (InputStream respEntity = readEntity()) {
|
||||
if (respEntity != null) {
|
||||
this.myEntityBuffered = true;
|
||||
try {
|
||||
this.myEntityBytes = IOUtils.toByteArray(respEntity);
|
||||
} catch (IllegalStateException e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -103,7 +98,7 @@ public class ApacheHttpResponse extends BaseHttpResponse implements IHttpRespons
|
|||
if (Constants.STATUS_HTTP_204_NO_CONTENT != myResponse.getStatusLine().getStatusCode()) {
|
||||
ourLog.debug("Response did not specify a charset, defaulting to utf-8");
|
||||
}
|
||||
charset = Charset.forName("UTF-8");
|
||||
charset = StandardCharsets.UTF_8;
|
||||
}
|
||||
|
||||
return new InputStreamReader(readEntity(), charset);
|
||||
|
@ -115,11 +110,7 @@ public class ApacheHttpResponse extends BaseHttpResponse implements IHttpRespons
|
|||
if (myResponse.getAllHeaders() != null) {
|
||||
for (Header next : myResponse.getAllHeaders()) {
|
||||
String name = next.getName().toLowerCase();
|
||||
List<String> list = headers.get(name);
|
||||
if (list == null) {
|
||||
list = new ArrayList<>();
|
||||
headers.put(name, list);
|
||||
}
|
||||
List<String> list = headers.computeIfAbsent(name, k -> new ArrayList<>());
|
||||
list.add(next.getValue());
|
||||
}
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ package ca.uhn.fhir.rest.client.interceptor;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -94,9 +95,7 @@ public class LoggingInterceptor implements IClientInterceptor {
|
|||
if (content != null) {
|
||||
myLog.info("Client request body:\n{}", content);
|
||||
}
|
||||
} catch (IllegalStateException e) {
|
||||
myLog.warn("Failed to replay request contents (during logging attempt, actual FHIR call did not fail)", e);
|
||||
} catch (IOException e) {
|
||||
} catch (IllegalStateException | IOException e) {
|
||||
myLog.warn("Failed to replay request contents (during logging attempt, actual FHIR call did not fail)", e);
|
||||
}
|
||||
}
|
||||
|
@ -147,11 +146,8 @@ public class LoggingInterceptor implements IClientInterceptor {
|
|||
}
|
||||
|
||||
if (myLogResponseBody) {
|
||||
//TODO: Use of a deprecated method should be resolved.
|
||||
theResponse.bufferEntitity();
|
||||
InputStream respEntity = null;
|
||||
try {
|
||||
respEntity = theResponse.readEntity();
|
||||
theResponse.bufferEntity();
|
||||
try (InputStream respEntity = theResponse.readEntity()) {
|
||||
if (respEntity != null) {
|
||||
final byte[] bytes;
|
||||
try {
|
||||
|
@ -159,12 +155,10 @@ public class LoggingInterceptor implements IClientInterceptor {
|
|||
} catch (IllegalStateException e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
myLog.info("Client response body:\n{}", new String(bytes, "UTF-8"));
|
||||
myLog.info("Client response body:\n{}", new String(bytes, StandardCharsets.UTF_8));
|
||||
} else {
|
||||
myLog.info("Client response body: (none)");
|
||||
}
|
||||
} finally {
|
||||
IOUtils.closeQuietly(respEntity);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -178,7 +172,9 @@ public class LoggingInterceptor implements IClientInterceptor {
|
|||
Iterator<String> values = theHeaders.get(key).iterator();
|
||||
while(values.hasNext()) {
|
||||
String value = values.next();
|
||||
b.append(key + ": " + value);
|
||||
b.append(key);
|
||||
b.append(": ");
|
||||
b.append(value);
|
||||
if (nameEntries.hasNext() || values.hasNext()) {
|
||||
b.append('\n');
|
||||
}
|
||||
|
|
|
@ -51,11 +51,6 @@ public class JaxRsHttpResponse extends BaseHttpResponse implements IHttpResponse
|
|||
this.myResponse = theResponse;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void bufferEntitity() throws IOException {
|
||||
bufferEntity();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void bufferEntity() throws IOException {
|
||||
if(!myBufferedEntity && myResponse.hasEntity()) {
|
||||
|
|
|
@ -46,7 +46,6 @@
|
|||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-csv</artifactId>
|
||||
<version>1.3</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
package ca.uhn.fhir.jpa;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
||||
public class BaseTest {
|
||||
|
||||
protected String loadResource(String theClasspath) throws IOException {
|
||||
InputStream stream = BaseTest.class.getResourceAsStream(theClasspath);
|
||||
if (stream==null) {
|
||||
throw new IllegalArgumentException("Unable to find resource: " + theClasspath);
|
||||
}
|
||||
return IOUtils.toString(stream, Charsets.UTF_8);
|
||||
}
|
||||
|
||||
}
|
|
@ -32,6 +32,13 @@ import ca.uhn.fhir.jpa.subscription.module.cache.LinkedBlockingQueueSubscribable
|
|||
import ca.uhn.fhir.jpa.subscription.module.channel.ISubscribableChannelFactory;
|
||||
import ca.uhn.fhir.jpa.subscription.module.matcher.ISubscriptionMatcher;
|
||||
import ca.uhn.fhir.jpa.subscription.module.matcher.InMemorySubscriptionMatcher;
|
||||
import ca.uhn.fhir.jpa.term.TermCodeSystemStorageSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermVersionAdapterSvc;
|
||||
import ca.uhn.fhir.jpa.term.TermReindexingSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReindexingSvc;
|
||||
import ca.uhn.fhir.rest.server.interceptor.consent.IConsentContextServices;
|
||||
import org.hibernate.jpa.HibernatePersistenceProvider;
|
||||
import org.hl7.fhir.utilities.graphql.IGraphQLStorageServices;
|
||||
|
@ -256,7 +263,6 @@ public abstract class BaseConfig {
|
|||
}
|
||||
|
||||
|
||||
|
||||
public static void configureEntityManagerFactory(LocalContainerEntityManagerFactoryBean theFactory, FhirContext theCtx) {
|
||||
theFactory.setJpaDialect(hibernateJpaDialect(theCtx.getLocalizer()));
|
||||
theFactory.setPackagesToScan("ca.uhn.fhir.jpa.model.entity", "ca.uhn.fhir.jpa.entity");
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
package ca.uhn.fhir.jpa.config;
|
||||
|
||||
import ca.uhn.fhir.jpa.term.TermCodeSystemStorageSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.TermReindexingSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReindexingSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermVersionAdapterSvc;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
@Configuration
|
||||
public abstract class BaseConfigDstu3Plus extends BaseConfig {
|
||||
|
||||
@Bean
|
||||
public ITermCodeSystemStorageSvc termCodeSystemStorageSvc() {
|
||||
return new TermCodeSystemStorageSvcImpl();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ITermDeferredStorageSvc termDeferredStorageSvc() {
|
||||
return new TermDeferredStorageSvcImpl();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ITermReindexingSvc termReindexingSvc() {
|
||||
return new TermReindexingSvcImpl();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public abstract ITermVersionAdapterSvc terminologyVersionAdapterSvc();
|
||||
|
||||
}
|
|
@ -7,8 +7,10 @@ import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
|||
import ca.uhn.fhir.jpa.searchparam.extractor.SearchParamExtractorDstu2;
|
||||
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryDstu2;
|
||||
import ca.uhn.fhir.jpa.term.HapiTerminologySvcDstu2;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
|
||||
import ca.uhn.fhir.jpa.term.TermReadSvcDstu2;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermVersionAdapterSvc;
|
||||
import ca.uhn.fhir.jpa.term.TermVersionAdapterSvcDstu2;
|
||||
import ca.uhn.fhir.jpa.util.ResourceCountCache;
|
||||
import ca.uhn.fhir.model.dstu2.composite.MetaDt;
|
||||
import ca.uhn.fhir.validation.IValidatorModule;
|
||||
|
@ -134,8 +136,8 @@ public class BaseDstu2Config extends BaseConfig {
|
|||
}
|
||||
|
||||
@Bean(autowire = Autowire.BY_TYPE)
|
||||
public IHapiTerminologySvc terminologyService() {
|
||||
return new HapiTerminologySvcDstu2();
|
||||
public ITermReadSvc terminologyService() {
|
||||
return new TermReadSvcDstu2();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.config.dstu3;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.ParserOptions;
|
||||
import ca.uhn.fhir.jpa.config.BaseConfig;
|
||||
import ca.uhn.fhir.jpa.config.BaseConfigDstu3Plus;
|
||||
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl;
|
||||
import ca.uhn.fhir.jpa.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
|
@ -12,10 +13,12 @@ import ca.uhn.fhir.jpa.provider.GraphQLProvider;
|
|||
import ca.uhn.fhir.jpa.searchparam.extractor.SearchParamExtractorDstu3;
|
||||
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryDstu3;
|
||||
import ca.uhn.fhir.jpa.term.HapiTerminologySvcDstu3;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvcDstu3;
|
||||
import ca.uhn.fhir.jpa.term.TerminologyLoaderSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.TermReadSvcDstu3;
|
||||
import ca.uhn.fhir.jpa.term.TermLoaderSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.TermVersionAdapterSvcDstu3;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvcDstu3;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermVersionAdapterSvc;
|
||||
import ca.uhn.fhir.jpa.util.ResourceCountCache;
|
||||
import ca.uhn.fhir.jpa.validation.JpaValidationSupportChainDstu3;
|
||||
import ca.uhn.fhir.validation.IValidatorModule;
|
||||
|
@ -26,7 +29,6 @@ import org.hl7.fhir.dstu3.hapi.validation.CachingValidationSupport;
|
|||
import org.hl7.fhir.dstu3.hapi.validation.FhirInstanceValidator;
|
||||
import org.hl7.fhir.dstu3.model.Bundle;
|
||||
import org.hl7.fhir.r5.utils.IResourceValidator;
|
||||
import org.springframework.beans.factory.annotation.Autowire;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Lazy;
|
||||
|
@ -55,13 +57,19 @@ import org.springframework.transaction.annotation.EnableTransactionManagement;
|
|||
|
||||
@Configuration
|
||||
@EnableTransactionManagement
|
||||
public class BaseDstu3Config extends BaseConfig {
|
||||
public class BaseDstu3Config extends BaseConfigDstu3Plus {
|
||||
|
||||
@Override
|
||||
public FhirContext fhirContext() {
|
||||
return fhirContextDstu3();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Override
|
||||
public ITermVersionAdapterSvc terminologyVersionAdapterSvc() {
|
||||
return new TermVersionAdapterSvcDstu3();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Primary
|
||||
public FhirContext fhirContextDstu3() {
|
||||
|
@ -109,10 +117,9 @@ public class BaseDstu3Config extends BaseConfig {
|
|||
return new JpaValidationSupportChainDstu3();
|
||||
}
|
||||
|
||||
@Bean(name = "myJpaValidationSupportDstu3", autowire = Autowire.BY_NAME)
|
||||
@Bean(name = "myJpaValidationSupportDstu3")
|
||||
public ca.uhn.fhir.jpa.dao.dstu3.IJpaValidationSupportDstu3 jpaValidationSupportDstu3() {
|
||||
ca.uhn.fhir.jpa.dao.dstu3.JpaValidationSupportDstu3 retVal = new ca.uhn.fhir.jpa.dao.dstu3.JpaValidationSupportDstu3();
|
||||
return retVal;
|
||||
return new ca.uhn.fhir.jpa.dao.dstu3.JpaValidationSupportDstu3();
|
||||
}
|
||||
|
||||
@Bean(name = "myResourceCountsCache")
|
||||
|
@ -122,13 +129,12 @@ public class BaseDstu3Config extends BaseConfig {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
@Bean(autowire = Autowire.BY_TYPE)
|
||||
@Bean
|
||||
public IFulltextSearchSvc searchDaoDstu3() {
|
||||
FulltextSearchSvcImpl searchDao = new FulltextSearchSvcImpl();
|
||||
return searchDao;
|
||||
return new FulltextSearchSvcImpl();
|
||||
}
|
||||
|
||||
@Bean(autowire = Autowire.BY_TYPE)
|
||||
@Bean
|
||||
public SearchParamExtractorDstu3 searchParamExtractor() {
|
||||
return new SearchParamExtractorDstu3();
|
||||
}
|
||||
|
@ -138,10 +144,9 @@ public class BaseDstu3Config extends BaseConfig {
|
|||
return new SearchParamRegistryDstu3();
|
||||
}
|
||||
|
||||
@Bean(name = "mySystemDaoDstu3", autowire = Autowire.BY_NAME)
|
||||
@Bean(name = "mySystemDaoDstu3")
|
||||
public IFhirSystemDao<org.hl7.fhir.dstu3.model.Bundle, org.hl7.fhir.dstu3.model.Meta> systemDaoDstu3() {
|
||||
ca.uhn.fhir.jpa.dao.dstu3.FhirSystemDaoDstu3 retVal = new ca.uhn.fhir.jpa.dao.dstu3.FhirSystemDaoDstu3();
|
||||
return retVal;
|
||||
return new ca.uhn.fhir.jpa.dao.dstu3.FhirSystemDaoDstu3();
|
||||
}
|
||||
|
||||
@Bean(name = "mySystemProviderDstu3")
|
||||
|
@ -152,18 +157,18 @@ public class BaseDstu3Config extends BaseConfig {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
@Bean(autowire = Autowire.BY_TYPE)
|
||||
public IHapiTerminologyLoaderSvc terminologyLoaderService() {
|
||||
return new TerminologyLoaderSvcImpl();
|
||||
@Bean
|
||||
public ITermLoaderSvc termLoaderService() {
|
||||
return new TermLoaderSvcImpl();
|
||||
}
|
||||
|
||||
@Bean(autowire = Autowire.BY_TYPE)
|
||||
public IHapiTerminologySvcDstu3 terminologyService() {
|
||||
return new HapiTerminologySvcDstu3();
|
||||
@Bean
|
||||
public ITermReadSvcDstu3 terminologyService() {
|
||||
return new TermReadSvcDstu3();
|
||||
}
|
||||
|
||||
@Primary
|
||||
@Bean(autowire = Autowire.BY_NAME, name = "myJpaValidationSupportChainDstu3")
|
||||
@Bean(name = "myJpaValidationSupportChainDstu3")
|
||||
public IValidationSupport validationSupportChainDstu3() {
|
||||
return new CachingValidationSupport(jpaValidationSupportChain());
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.config.r4;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.ParserOptions;
|
||||
import ca.uhn.fhir.jpa.config.BaseConfig;
|
||||
import ca.uhn.fhir.jpa.config.BaseConfigDstu3Plus;
|
||||
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl;
|
||||
import ca.uhn.fhir.jpa.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
|
@ -12,10 +13,10 @@ import ca.uhn.fhir.jpa.provider.GraphQLProvider;
|
|||
import ca.uhn.fhir.jpa.searchparam.extractor.SearchParamExtractorR4;
|
||||
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryR4;
|
||||
import ca.uhn.fhir.jpa.term.HapiTerminologySvcR4;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvcR4;
|
||||
import ca.uhn.fhir.jpa.term.TerminologyLoaderSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.*;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvcR4;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermVersionAdapterSvc;
|
||||
import ca.uhn.fhir.jpa.util.ResourceCountCache;
|
||||
import ca.uhn.fhir.jpa.validation.JpaValidationSupportChainR4;
|
||||
import ca.uhn.fhir.validation.IValidatorModule;
|
||||
|
@ -55,13 +56,19 @@ import org.springframework.transaction.annotation.EnableTransactionManagement;
|
|||
|
||||
@Configuration
|
||||
@EnableTransactionManagement
|
||||
public class BaseR4Config extends BaseConfig {
|
||||
public class BaseR4Config extends BaseConfigDstu3Plus {
|
||||
|
||||
@Override
|
||||
public FhirContext fhirContext() {
|
||||
return fhirContextR4();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Override
|
||||
public ITermVersionAdapterSvc terminologyVersionAdapterSvc() {
|
||||
return new TermVersionAdapterSvcR4();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Primary
|
||||
public FhirContext fhirContextR4() {
|
||||
|
@ -154,13 +161,13 @@ public class BaseR4Config extends BaseConfig {
|
|||
}
|
||||
|
||||
@Bean(autowire = Autowire.BY_TYPE)
|
||||
public IHapiTerminologyLoaderSvc terminologyLoaderService() {
|
||||
return new TerminologyLoaderSvcImpl();
|
||||
public ITermLoaderSvc termLoaderService() {
|
||||
return new TermLoaderSvcImpl();
|
||||
}
|
||||
|
||||
@Bean(autowire = Autowire.BY_TYPE)
|
||||
public IHapiTerminologySvcR4 terminologyService() {
|
||||
return new HapiTerminologySvcR4();
|
||||
public ITermReadSvcR4 terminologyService() {
|
||||
return new TermReadSvcR4();
|
||||
}
|
||||
|
||||
@Primary
|
||||
|
|
|
@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.config.r5;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.ParserOptions;
|
||||
import ca.uhn.fhir.jpa.config.BaseConfig;
|
||||
import ca.uhn.fhir.jpa.config.BaseConfigDstu3Plus;
|
||||
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl;
|
||||
import ca.uhn.fhir.jpa.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
|
@ -12,10 +13,10 @@ import ca.uhn.fhir.jpa.provider.GraphQLProvider;
|
|||
import ca.uhn.fhir.jpa.searchparam.extractor.SearchParamExtractorR5;
|
||||
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryR5;
|
||||
import ca.uhn.fhir.jpa.term.HapiTerminologySvcR5;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvcR5;
|
||||
import ca.uhn.fhir.jpa.term.TerminologyLoaderSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.*;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvcR5;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermVersionAdapterSvc;
|
||||
import ca.uhn.fhir.jpa.util.ResourceCountCache;
|
||||
import ca.uhn.fhir.jpa.validation.JpaValidationSupportChainR5;
|
||||
import ca.uhn.fhir.validation.IValidatorModule;
|
||||
|
@ -55,13 +56,19 @@ import org.springframework.transaction.annotation.EnableTransactionManagement;
|
|||
|
||||
@Configuration
|
||||
@EnableTransactionManagement
|
||||
public class BaseR5Config extends BaseConfig {
|
||||
public class BaseR5Config extends BaseConfigDstu3Plus {
|
||||
|
||||
@Override
|
||||
public FhirContext fhirContext() {
|
||||
return fhirContextR5();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Override
|
||||
public ITermVersionAdapterSvc terminologyVersionAdapterSvc() {
|
||||
return new TermVersionAdapterSvcR5();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Primary
|
||||
public FhirContext fhirContextR5() {
|
||||
|
@ -154,13 +161,13 @@ public class BaseR5Config extends BaseConfig {
|
|||
}
|
||||
|
||||
@Bean(autowire = Autowire.BY_TYPE)
|
||||
public IHapiTerminologyLoaderSvc terminologyLoaderService() {
|
||||
return new TerminologyLoaderSvcImpl();
|
||||
public ITermLoaderSvc terminologyLoaderService() {
|
||||
return new TermLoaderSvcImpl();
|
||||
}
|
||||
|
||||
@Bean(autowire = Autowire.BY_TYPE)
|
||||
public IHapiTerminologySvcR5 terminologyService() {
|
||||
return new HapiTerminologySvcR5();
|
||||
public ITermReadSvcR5 terminologyService() {
|
||||
return new TermReadSvcR5();
|
||||
}
|
||||
|
||||
@Primary
|
||||
|
|
|
@ -20,13 +20,13 @@ import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
|||
import ca.uhn.fhir.jpa.search.ISearchCoordinatorSvc;
|
||||
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
|
||||
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
|
||||
import ca.uhn.fhir.jpa.search.cache.ISearchResultCacheSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.ResourceMetaParams;
|
||||
import ca.uhn.fhir.jpa.searchparam.extractor.LogicalReferenceHelper;
|
||||
import ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams;
|
||||
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import ca.uhn.fhir.jpa.util.AddRemoveCount;
|
||||
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.model.api.IResource;
|
||||
|
@ -53,7 +53,6 @@ import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetai
|
|||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.util.CoverageIgnore;
|
||||
import ca.uhn.fhir.util.MetaUtil;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import ca.uhn.fhir.util.XmlUtil;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Charsets;
|
||||
|
@ -62,8 +61,6 @@ import com.google.common.hash.HashFunction;
|
|||
import com.google.common.hash.Hashing;
|
||||
import org.apache.commons.lang3.NotImplementedException;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hibernate.Session;
|
||||
import org.hibernate.internal.SessionImpl;
|
||||
import org.hl7.fhir.instance.model.api.*;
|
||||
import org.hl7.fhir.r4.model.Bundle.HTTPVerb;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -81,7 +78,6 @@ import javax.annotation.PostConstruct;
|
|||
import javax.persistence.*;
|
||||
import javax.persistence.criteria.CriteriaBuilder;
|
||||
import javax.persistence.criteria.CriteriaQuery;
|
||||
import javax.persistence.criteria.Predicate;
|
||||
import javax.persistence.criteria.Root;
|
||||
import javax.xml.stream.events.Characters;
|
||||
import javax.xml.stream.events.XMLEvent;
|
||||
|
@ -139,7 +135,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
|||
@Autowired
|
||||
protected ISearchParamRegistry mySerarchParamRegistry;
|
||||
@Autowired
|
||||
protected IHapiTerminologySvc myTerminologySvc;
|
||||
protected ITermReadSvc myTerminologySvc;
|
||||
@Autowired
|
||||
protected IResourceHistoryTableDao myResourceHistoryTableDao;
|
||||
@Autowired
|
||||
|
@ -161,8 +157,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
|||
@Autowired
|
||||
private ISearchCacheSvc mySearchCacheSvc;
|
||||
@Autowired
|
||||
private ISearchResultCacheSvc mySearchResultCacheSvc;
|
||||
@Autowired
|
||||
private ISearchParamPresenceSvc mySearchParamPresenceSvc;
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
|
@ -192,20 +186,18 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
|||
* none was created, returns null.
|
||||
*/
|
||||
protected ForcedId createForcedIdIfNeeded(ResourceTable theEntity, IIdType theId, boolean theCreateForPureNumericIds) {
|
||||
ForcedId retVal = null;
|
||||
if (theId.isEmpty() == false && theId.hasIdPart() && theEntity.getForcedId() == null) {
|
||||
if (!theCreateForPureNumericIds && IdHelperService.isValidPid(theId)) {
|
||||
return null;
|
||||
if (theCreateForPureNumericIds || !IdHelperService.isValidPid(theId)) {
|
||||
retVal = new ForcedId();
|
||||
retVal.setResourceType(theEntity.getResourceType());
|
||||
retVal.setForcedId(theId.getIdPart());
|
||||
retVal.setResource(theEntity);
|
||||
theEntity.setForcedId(retVal);
|
||||
}
|
||||
|
||||
ForcedId fid = new ForcedId();
|
||||
fid.setResourceType(theEntity.getResourceType());
|
||||
fid.setForcedId(theId.getIdPart());
|
||||
fid.setResource(theEntity);
|
||||
theEntity.setForcedId(fid);
|
||||
return fid;
|
||||
}
|
||||
|
||||
return null;
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private void extractTagsHapi(IResource theResource, ResourceTable theEntity, Set<ResourceTag> allDefs) {
|
||||
|
@ -285,39 +277,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
|||
|
||||
}
|
||||
|
||||
private void findMatchingTagIds(RequestDetails theRequest, String theResourceName, IIdType theResourceId, Set<Long> tagIds, Class<? extends BaseTag> entityClass) {
|
||||
{
|
||||
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
|
||||
CriteriaQuery<Tuple> cq = builder.createTupleQuery();
|
||||
Root<? extends BaseTag> from = cq.from(entityClass);
|
||||
cq.multiselect(from.get("myTagId").as(Long.class)).distinct(true);
|
||||
|
||||
if (theResourceName != null) {
|
||||
Predicate typePredicate = builder.equal(from.get("myResourceType"), theResourceName);
|
||||
if (theResourceId != null) {
|
||||
cq.where(typePredicate, builder.equal(from.get("myResourceId"), myIdHelperService.translateForcedIdToPid(theResourceName, theResourceId.getIdPart(), theRequest)));
|
||||
} else {
|
||||
cq.where(typePredicate);
|
||||
}
|
||||
}
|
||||
|
||||
TypedQuery<Tuple> query = myEntityManager.createQuery(cq);
|
||||
for (Tuple next : query.getResultList()) {
|
||||
tagIds.add(next.get(0, Long.class));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void flushJpaSession() {
|
||||
SessionImpl session = (SessionImpl) myEntityManager.unwrap(Session.class);
|
||||
int insertionCount = session.getActionQueue().numberOfInsertions();
|
||||
int updateCount = session.getActionQueue().numberOfUpdates();
|
||||
|
||||
StopWatch sw = new StopWatch();
|
||||
myEntityManager.flush();
|
||||
ourLog.debug("Session flush took {}ms for {} inserts and {} updates", sw.getMillis(), insertionCount, updateCount);
|
||||
}
|
||||
|
||||
private Set<ResourceTag> getAllTagDefinitions(ResourceTable theEntity) {
|
||||
HashSet<ResourceTag> retVal = Sets.newHashSet();
|
||||
if (theEntity.isHasTags()) {
|
||||
|
@ -358,7 +317,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public <R extends IBaseResource> IFhirResourceDao<R> getDao(Class<R> theType) {
|
||||
return myDaoRegistry.getResourceDaoOrNull(theType);
|
||||
}
|
||||
|
@ -446,6 +404,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
|||
newVersion = Long.toString(newVersionLong);
|
||||
}
|
||||
|
||||
assert theResourceId != null;
|
||||
IIdType newId = theResourceId.withVersion(newVersion);
|
||||
theResource.getIdElement().setValue(newId.getValue());
|
||||
theSavedEntity.setVersion(newVersionLong);
|
||||
|
|
|
@ -39,7 +39,7 @@ import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
|||
import ca.uhn.fhir.jpa.searchparam.ResourceMetaParams;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import ca.uhn.fhir.jpa.term.VersionIndependentConcept;
|
||||
import ca.uhn.fhir.jpa.util.*;
|
||||
import ca.uhn.fhir.model.api.*;
|
||||
|
@ -64,7 +64,6 @@ import ca.uhn.fhir.util.UrlUtil;
|
|||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Maps;
|
||||
import com.google.common.collect.Sets;
|
||||
import org.apache.commons.lang3.ObjectUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
|
@ -135,7 +134,7 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
@Autowired
|
||||
private ISearchParamRegistry mySearchParamRegistry;
|
||||
@Autowired
|
||||
private IHapiTerminologySvc myTerminologySvc;
|
||||
private ITermReadSvc myTerminologySvc;
|
||||
@Autowired
|
||||
private MatchUrlService myMatchUrlService;
|
||||
private List<Long> myAlsoIncludePids;
|
||||
|
|
|
@ -26,6 +26,7 @@ import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
|
|||
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||
import ca.uhn.fhir.jpa.util.LogicUtil;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
|
@ -41,6 +42,7 @@ import org.hl7.fhir.instance.model.api.IIdType;
|
|||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
@ -54,21 +56,22 @@ public class FhirResourceDaoCodeSystemDstu3 extends FhirResourceDaoDstu3<CodeSys
|
|||
|
||||
@Autowired
|
||||
private ITermCodeSystemDao myCsDao;
|
||||
|
||||
@Autowired
|
||||
private ValidationSupportChain myValidationSupport;
|
||||
@Autowired
|
||||
protected ITermCodeSystemStorageSvc myTerminologyCodeSystemStorageSvc;
|
||||
|
||||
@Override
|
||||
public List<IIdType> findCodeSystemIdsContainingSystemAndCode(String theCode, String theSystem, RequestDetails theRequest) {
|
||||
List<IIdType> valueSetIds;
|
||||
Set<Long> ids = searchForIds(new SearchParameterMap(CodeSystem.SP_CODE, new TokenParam(theSystem, theCode)), theRequest );
|
||||
valueSetIds = new ArrayList<>();
|
||||
List<IIdType> valueSetIds = new ArrayList<>();
|
||||
for (Long next : ids) {
|
||||
valueSetIds.add(new IdType("CodeSystem", next));
|
||||
}
|
||||
return valueSetIds;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public IContextValidationSupport.LookupCodeResult lookupCode(IPrimitiveType<String> theCode, IPrimitiveType<String> theSystem, Coding theCoding, RequestDetails theRequestDetails) {
|
||||
boolean haveCoding = theCoding != null && isNotBlank(theCoding.getSystem()) && isNotBlank(theCoding.getCode());
|
||||
|
@ -119,7 +122,7 @@ public class FhirResourceDaoCodeSystemDstu3 extends FhirResourceDaoDstu3<CodeSys
|
|||
if (isNotBlank(codeSystemUrl)) {
|
||||
TermCodeSystem persCs = myCsDao.findByCodeSystemUri(codeSystemUrl);
|
||||
if (persCs != null) {
|
||||
myTerminologySvc.deleteCodeSystem(persCs);
|
||||
myTerminologyCodeSystemStorageSvc.deleteCodeSystem(persCs);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -134,7 +137,7 @@ public class FhirResourceDaoCodeSystemDstu3 extends FhirResourceDaoDstu3<CodeSys
|
|||
org.hl7.fhir.r4.model.CodeSystem cs = VersionConvertor_30_40.convertCodeSystem(csDstu3);
|
||||
addPidToResource(theEntity, cs);
|
||||
|
||||
myTerminologySvc.storeNewCodeSystemVersionIfNeeded(cs, theEntity);
|
||||
myTerminologyCodeSystemStorageSvc.storeNewCodeSystemVersionIfNeeded(cs, theEntity);
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ import ca.uhn.fhir.jpa.term.TranslationResult;
|
|||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElement;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElementTarget;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import org.hl7.fhir.convertors.VersionConvertor_30_40;
|
||||
|
@ -42,11 +42,9 @@ import java.util.HashSet;
|
|||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
public class FhirResourceDaoConceptMapDstu3 extends FhirResourceDaoDstu3<ConceptMap> implements IFhirResourceDaoConceptMap<ConceptMap> {
|
||||
@Autowired
|
||||
private IHapiTerminologySvc myHapiTerminologySvc;
|
||||
private ITermReadSvc myHapiTerminologySvc;
|
||||
|
||||
@Override
|
||||
public TranslationResult translate(TranslationRequest theTranslationRequest, RequestDetails theRequestDetails) {
|
||||
|
|
|
@ -24,7 +24,7 @@ import ca.uhn.fhir.context.support.IContextValidationSupport;
|
|||
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem;
|
||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import ca.uhn.fhir.jpa.util.LogicUtil;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
|
@ -62,7 +62,7 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3<ValueSet>
|
|||
private static final Logger ourLog = LoggerFactory.getLogger(FhirResourceDaoValueSetDstu3.class);
|
||||
|
||||
@Autowired
|
||||
private IHapiTerminologySvc myHapiTerminologySvc;
|
||||
private ITermReadSvc myHapiTerminologySvc;
|
||||
|
||||
@Autowired
|
||||
private DefaultProfileValidationSupport myDefaultProfileValidationSupport;
|
||||
|
|
|
@ -26,6 +26,7 @@ import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
|
|||
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||
import ca.uhn.fhir.jpa.util.LogicUtil;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
|
@ -56,6 +57,8 @@ public class FhirResourceDaoCodeSystemR4 extends FhirResourceDaoR4<CodeSystem> i
|
|||
private ITermCodeSystemDao myCsDao;
|
||||
@Autowired
|
||||
private ValidationSupportChain myValidationSupport;
|
||||
@Autowired
|
||||
protected ITermCodeSystemStorageSvc myTerminologyCodeSystemStorageSvc;
|
||||
|
||||
@Override
|
||||
public List<IIdType> findCodeSystemIdsContainingSystemAndCode(String theCode, String theSystem, RequestDetails theRequest) {
|
||||
|
@ -122,7 +125,7 @@ public class FhirResourceDaoCodeSystemR4 extends FhirResourceDaoR4<CodeSystem> i
|
|||
if (isNotBlank(codeSystemUrl)) {
|
||||
TermCodeSystem persCs = myCsDao.findByCodeSystemUri(codeSystemUrl);
|
||||
if (persCs != null) {
|
||||
myTerminologySvc.deleteCodeSystem(persCs);
|
||||
myTerminologyCodeSystemStorageSvc.deleteCodeSystem(persCs);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -135,7 +138,7 @@ public class FhirResourceDaoCodeSystemR4 extends FhirResourceDaoR4<CodeSystem> i
|
|||
CodeSystem cs = (CodeSystem) theResource;
|
||||
addPidToResource(theEntity, theResource);
|
||||
|
||||
myTerminologySvc.storeNewCodeSystemVersionIfNeeded(cs, theEntity);
|
||||
myTerminologyCodeSystemStorageSvc.storeNewCodeSystemVersionIfNeeded(cs, theEntity);
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
|
|
@ -24,7 +24,7 @@ import ca.uhn.fhir.jpa.dao.IFhirResourceDaoConceptMap;
|
|||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElement;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElementTarget;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import ca.uhn.fhir.jpa.term.TranslationMatch;
|
||||
import ca.uhn.fhir.jpa.term.TranslationRequest;
|
||||
import ca.uhn.fhir.jpa.term.TranslationResult;
|
||||
|
@ -38,11 +38,9 @@ import java.util.HashSet;
|
|||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
public class FhirResourceDaoConceptMapR4 extends FhirResourceDaoR4<ConceptMap> implements IFhirResourceDaoConceptMap<ConceptMap> {
|
||||
@Autowired
|
||||
private IHapiTerminologySvc myHapiTerminologySvc;
|
||||
private ITermReadSvc myHapiTerminologySvc;
|
||||
|
||||
@Override
|
||||
public TranslationResult translate(TranslationRequest theTranslationRequest, RequestDetails theRequestDetails) {
|
||||
|
|
|
@ -24,7 +24,7 @@ import ca.uhn.fhir.context.support.IContextValidationSupport;
|
|||
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem;
|
||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import ca.uhn.fhir.jpa.util.LogicUtil;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
|
@ -56,7 +56,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4<ValueSet> implements IFhirResourceDaoValueSet<ValueSet, Coding, CodeableConcept> {
|
||||
|
||||
@Autowired
|
||||
private IHapiTerminologySvc myHapiTerminologySvc;
|
||||
private ITermReadSvc myHapiTerminologySvc;
|
||||
|
||||
@Autowired
|
||||
private DefaultProfileValidationSupport myDefaultProfileValidationSupport;
|
||||
|
|
|
@ -26,6 +26,7 @@ import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
|
|||
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||
import ca.uhn.fhir.jpa.util.LogicUtil;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
|
@ -56,6 +57,8 @@ public class FhirResourceDaoCodeSystemR5 extends FhirResourceDaoR5<CodeSystem> i
|
|||
private ITermCodeSystemDao myCsDao;
|
||||
@Autowired
|
||||
private ValidationSupportChain myValidationSupport;
|
||||
@Autowired
|
||||
protected ITermCodeSystemStorageSvc myTerminologyCodeSystemStorageSvc;
|
||||
|
||||
@Override
|
||||
public List<IIdType> findCodeSystemIdsContainingSystemAndCode(String theCode, String theSystem, RequestDetails theRequest) {
|
||||
|
@ -122,7 +125,7 @@ public class FhirResourceDaoCodeSystemR5 extends FhirResourceDaoR5<CodeSystem> i
|
|||
if (isNotBlank(codeSystemUrl)) {
|
||||
TermCodeSystem persCs = myCsDao.findByCodeSystemUri(codeSystemUrl);
|
||||
if (persCs != null) {
|
||||
myTerminologySvc.deleteCodeSystem(persCs);
|
||||
myTerminologyCodeSystemStorageSvc.deleteCodeSystem(persCs);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -135,7 +138,7 @@ public class FhirResourceDaoCodeSystemR5 extends FhirResourceDaoR5<CodeSystem> i
|
|||
CodeSystem cs = (CodeSystem) theResource;
|
||||
addPidToResource(theEntity, theResource);
|
||||
|
||||
myTerminologySvc.storeNewCodeSystemVersionIfNeeded(org.hl7.fhir.convertors.conv40_50.CodeSystem.convertCodeSystem(cs), theEntity);
|
||||
myTerminologyCodeSystemStorageSvc.storeNewCodeSystemVersionIfNeeded(org.hl7.fhir.convertors.conv40_50.CodeSystem.convertCodeSystem(cs), theEntity);
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
|
|
@ -24,7 +24,7 @@ import ca.uhn.fhir.jpa.dao.IFhirResourceDaoConceptMap;
|
|||
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElement;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElementTarget;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import ca.uhn.fhir.jpa.term.TranslationMatch;
|
||||
import ca.uhn.fhir.jpa.term.TranslationRequest;
|
||||
import ca.uhn.fhir.jpa.term.TranslationResult;
|
||||
|
@ -41,7 +41,7 @@ import java.util.Set;
|
|||
|
||||
public class FhirResourceDaoConceptMapR5 extends FhirResourceDaoR5<ConceptMap> implements IFhirResourceDaoConceptMap<ConceptMap> {
|
||||
@Autowired
|
||||
private IHapiTerminologySvc myHapiTerminologySvc;
|
||||
private ITermReadSvc myHapiTerminologySvc;
|
||||
|
||||
@Override
|
||||
public TranslationResult translate(TranslationRequest theTranslationRequest, RequestDetails theRequestDetails) {
|
||||
|
|
|
@ -24,7 +24,7 @@ import ca.uhn.fhir.context.support.IContextValidationSupport;
|
|||
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem;
|
||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import ca.uhn.fhir.jpa.util.LogicUtil;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
|
@ -56,7 +56,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
public class FhirResourceDaoValueSetR5 extends FhirResourceDaoR5<ValueSet> implements IFhirResourceDaoValueSet<ValueSet, Coding, CodeableConcept> {
|
||||
|
||||
@Autowired
|
||||
private IHapiTerminologySvc myHapiTerminologySvc;
|
||||
private ITermReadSvc myHapiTerminologySvc;
|
||||
|
||||
@Autowired
|
||||
private DefaultProfileValidationSupport myDefaultProfileValidationSupport;
|
||||
|
|
|
@ -22,6 +22,8 @@ package ca.uhn.fhir.jpa.entity;
|
|||
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.util.ValidateUtil;
|
||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
|
||||
|
@ -39,17 +41,17 @@ import static org.apache.commons.lang3.StringUtils.length;
|
|||
@Entity()
|
||||
//@formatter:on
|
||||
public class TermCodeSystem implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private static final int MAX_NAME_LENGTH = 200;
|
||||
public static final int MAX_URL_LENGTH = 200;
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
private static final int MAX_NAME_LENGTH = 200;
|
||||
@Column(name = "CODE_SYSTEM_URI", nullable = false, length = MAX_URL_LENGTH)
|
||||
private String myCodeSystemUri;
|
||||
|
||||
@OneToOne()
|
||||
@JoinColumn(name = "CURRENT_VERSION_PID", referencedColumnName = "PID", nullable = true, foreignKey = @ForeignKey(name = "FK_TRMCODESYSTEM_CURVER"))
|
||||
private TermCodeSystemVersion myCurrentVersion;
|
||||
@Column(name = "CURRENT_VERSION_PID", nullable = true, insertable = false, updatable = false)
|
||||
private Long myCurrentVersionPid;
|
||||
@Id()
|
||||
@SequenceGenerator(name = "SEQ_CODESYSTEM_PID", sequenceName = "SEQ_CODESYSTEM_PID")
|
||||
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_CODESYSTEM_PID")
|
||||
|
@ -70,12 +72,32 @@ public class TermCodeSystem implements Serializable {
|
|||
super();
|
||||
}
|
||||
|
||||
public String getCodeSystemUri() {
|
||||
return myCodeSystemUri;
|
||||
@Override
|
||||
public boolean equals(Object theO) {
|
||||
if (this == theO) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (theO == null || getClass() != theO.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
TermCodeSystem that = (TermCodeSystem) theO;
|
||||
|
||||
EqualsBuilder b = new EqualsBuilder();
|
||||
b.append(myCodeSystemUri, that.myCodeSystemUri);
|
||||
return b.isEquals();
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return myName;
|
||||
@Override
|
||||
public int hashCode() {
|
||||
HashCodeBuilder b = new HashCodeBuilder(17, 37);
|
||||
b.append(myCodeSystemUri);
|
||||
return b.toHashCode();
|
||||
}
|
||||
|
||||
public String getCodeSystemUri() {
|
||||
return myCodeSystemUri;
|
||||
}
|
||||
|
||||
public TermCodeSystem setCodeSystemUri(@Nonnull String theCodeSystemUri) {
|
||||
|
@ -86,6 +108,15 @@ public class TermCodeSystem implements Serializable {
|
|||
return this;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return myName;
|
||||
}
|
||||
|
||||
public TermCodeSystem setName(String theName) {
|
||||
myName = left(theName, MAX_NAME_LENGTH);
|
||||
return this;
|
||||
}
|
||||
|
||||
public TermCodeSystemVersion getCurrentVersion() {
|
||||
return myCurrentVersion;
|
||||
}
|
||||
|
@ -103,11 +134,6 @@ public class TermCodeSystem implements Serializable {
|
|||
return myResource;
|
||||
}
|
||||
|
||||
public TermCodeSystem setName(String theName) {
|
||||
myName = left(theName, MAX_NAME_LENGTH);
|
||||
return this;
|
||||
}
|
||||
|
||||
public TermCodeSystem setResource(ResourceTable theResource) {
|
||||
myResource = theResource;
|
||||
return this;
|
||||
|
@ -115,12 +141,13 @@ public class TermCodeSystem implements Serializable {
|
|||
|
||||
@Override
|
||||
public String toString() {
|
||||
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
|
||||
.append("codeSystemUri", myCodeSystemUri)
|
||||
.append("currentVersion", myCurrentVersion)
|
||||
.append("pid", myPid)
|
||||
.append("resourcePid", myResourcePid)
|
||||
.append("name", myName)
|
||||
ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE);
|
||||
b.append("pid", myPid);
|
||||
b.append("codeSystemUri", myCodeSystemUri);
|
||||
b.append("currentVersionPid", myCurrentVersionPid);
|
||||
b.append("resourcePid", myResourcePid);
|
||||
b.append("name", myName);
|
||||
return b
|
||||
.toString();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,8 +21,11 @@ package ca.uhn.fhir.jpa.entity;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.util.CoverageIgnore;
|
||||
import ca.uhn.fhir.util.ValidateUtil;
|
||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
|
||||
import javax.persistence.*;
|
||||
import java.io.Serializable;
|
||||
|
@ -36,10 +39,8 @@ import static org.apache.commons.lang3.StringUtils.length;
|
|||
)
|
||||
@Entity()
|
||||
public class TermCodeSystemVersion implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
public static final int MAX_VERSION_LENGTH = 200;
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
@OneToMany(fetch = FetchType.LAZY, mappedBy = "myCodeSystem")
|
||||
private Collection<TermConcept> myConcepts;
|
||||
|
||||
|
@ -84,34 +85,6 @@ public class TermCodeSystemVersion implements Serializable {
|
|||
super();
|
||||
}
|
||||
|
||||
@CoverageIgnore
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (!(obj instanceof TermCodeSystemVersion)) {
|
||||
return false;
|
||||
}
|
||||
TermCodeSystemVersion other = (TermCodeSystemVersion) obj;
|
||||
if ((myResource.getId() == null) != (other.myResource.getId() == null)) {
|
||||
return false;
|
||||
} else if (!myResource.getId().equals(other.myResource.getId())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (myCodeSystemVersionId == null) {
|
||||
if (other.myCodeSystemVersionId != null) {
|
||||
return false;
|
||||
}
|
||||
} else if (!myCodeSystemVersionId.equals(other.myCodeSystemVersionId)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public TermCodeSystem getCodeSystem() {
|
||||
return myCodeSystem;
|
||||
|
@ -154,13 +127,30 @@ public class TermCodeSystemVersion implements Serializable {
|
|||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object theO) {
|
||||
if (this == theO) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (theO == null || getClass() != theO.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
TermCodeSystemVersion that = (TermCodeSystemVersion) theO;
|
||||
|
||||
return new EqualsBuilder()
|
||||
.append(myCodeSystemVersionId, that.myCodeSystemVersionId)
|
||||
.append(myCodeSystemPid, that.myCodeSystemPid)
|
||||
.isEquals();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + ((myResource.getId() == null) ? 0 : myResource.getId().hashCode());
|
||||
result = prime * result + ((myCodeSystemVersionId == null) ? 0 : myCodeSystemVersionId.hashCode());
|
||||
return result;
|
||||
HashCodeBuilder b = new HashCodeBuilder(17, 37);
|
||||
b.append(myCodeSystemVersionId);
|
||||
b.append(myCodeSystemPid);
|
||||
return b.toHashCode();
|
||||
}
|
||||
|
||||
public String getCodeSystemDisplayName() {
|
||||
|
@ -180,4 +170,19 @@ public class TermCodeSystemVersion implements Serializable {
|
|||
getConcepts().add(concept);
|
||||
return concept;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE);
|
||||
b.append("pid", myId);
|
||||
b.append("codeSystemResourcePid", myResourcePid);
|
||||
b.append("codeSystemPid", myCodeSystemPid);
|
||||
b.append("codeSystemVersionId", myCodeSystemVersionId);
|
||||
return b.toString();
|
||||
}
|
||||
|
||||
TermCodeSystemVersion setCodeSystemPidForUnitTest(long theCodeSystemPid) {
|
||||
myCodeSystemPid = theCodeSystemPid;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,6 +37,7 @@ import javax.persistence.Index;
|
|||
import javax.persistence.*;
|
||||
import java.io.Serializable;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.left;
|
||||
import static org.apache.commons.lang3.StringUtils.length;
|
||||
|
@ -108,6 +109,13 @@ public class TermConcept implements Serializable {
|
|||
setCode(theCode);
|
||||
}
|
||||
|
||||
public TermConcept addChild(RelationshipTypeEnum theRelationshipType) {
|
||||
TermConcept child = new TermConcept();
|
||||
child.setCodeSystemVersion(myCodeSystem);
|
||||
addChild(child, theRelationshipType);
|
||||
return child;
|
||||
}
|
||||
|
||||
public TermConceptParentChildLink addChild(TermConcept theChild, RelationshipTypeEnum theRelationshipType) {
|
||||
Validate.notNull(theRelationshipType, "theRelationshipType must not be null");
|
||||
TermConceptParentChildLink link = new TermConceptParentChildLink();
|
||||
|
@ -200,7 +208,7 @@ public class TermConcept implements Serializable {
|
|||
|
||||
public TermConcept setCodeSystemVersion(TermCodeSystemVersion theCodeSystemVersion) {
|
||||
myCodeSystem = theCodeSystemVersion;
|
||||
if (theCodeSystemVersion.getPid() != null) {
|
||||
if (theCodeSystemVersion != null && theCodeSystemVersion.getPid() != null) {
|
||||
myCodeSystemVersionPid = theCodeSystemVersion.getPid();
|
||||
}
|
||||
return this;
|
||||
|
@ -365,10 +373,13 @@ public class TermConcept implements Serializable {
|
|||
|
||||
@Override
|
||||
public String toString() {
|
||||
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
|
||||
.append("code", myCode)
|
||||
.append("display", myDisplay)
|
||||
.build();
|
||||
ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE);
|
||||
b.append("code", myCode);
|
||||
b.append("display", myDisplay);
|
||||
if (mySequence != null) {
|
||||
b.append("sequence", mySequence);
|
||||
}
|
||||
return b.build();
|
||||
}
|
||||
|
||||
public List<IContextValidationSupport.BaseConceptProperty> toValidationProperties() {
|
||||
|
@ -387,4 +398,13 @@ public class TermConcept implements Serializable {
|
|||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a view of {@link #getChildren()} but containing the actual child codes
|
||||
*/
|
||||
public List<TermConcept> getChildCodes() {
|
||||
return getChildren().stream().map(t -> t.getChild()).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -20,11 +20,16 @@ package ca.uhn.fhir.jpa.entity;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import org.hibernate.search.annotations.Field;
|
||||
import org.hibernate.search.annotations.Fields;
|
||||
|
||||
import javax.persistence.*;
|
||||
import java.io.Serializable;
|
||||
|
||||
@Entity
|
||||
@Table(name = "TRM_CONCEPT_PC_LINK")
|
||||
@Table(name = "TRM_CONCEPT_PC_LINK", indexes = {
|
||||
@Index(name = "IDX_TRMCONCPCLNK_CSV", columnList = "CODESYSTEM_PID")
|
||||
})
|
||||
public class TermConceptParentChildLink implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
|
@ -39,6 +44,10 @@ public class TermConceptParentChildLink implements Serializable {
|
|||
@JoinColumn(name = "CODESYSTEM_PID", nullable = false, foreignKey = @ForeignKey(name = "FK_TERM_CONCEPTPC_CS"))
|
||||
private TermCodeSystemVersion myCodeSystem;
|
||||
|
||||
@Column(name = "CODESYSTEM_PID", insertable = false, updatable = false, nullable = false)
|
||||
@Fields({@Field(name = "myCodeSystemVersionPid")})
|
||||
private long myCodeSystemVersionPid;
|
||||
|
||||
@ManyToOne(cascade = {})
|
||||
@JoinColumn(name = "PARENT_PID", nullable = false, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_TERM_CONCEPTPC_PARENT"))
|
||||
private TermConcept myParent;
|
||||
|
|
|
@ -22,9 +22,8 @@ package ca.uhn.fhir.jpa.provider;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc.UploadStatistics;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
|
||||
import ca.uhn.fhir.jpa.term.UploadStatistics;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||
import ca.uhn.fhir.rest.annotation.Operation;
|
||||
import ca.uhn.fhir.rest.annotation.OperationParam;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
|
@ -34,140 +33,46 @@ import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
|||
import ca.uhn.fhir.util.AttachmentUtil;
|
||||
import ca.uhn.fhir.util.ParametersUtil;
|
||||
import ca.uhn.fhir.util.ValidateUtil;
|
||||
import org.hl7.fhir.convertors.VersionConvertor_30_40;
|
||||
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.ICompositeType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import java.io.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.*;
|
||||
|
||||
public class TerminologyUploaderProvider extends BaseJpaProvider {
|
||||
|
||||
public static final String CONCEPT_COUNT = "conceptCount";
|
||||
public static final String TARGET = "target";
|
||||
public static final String PARENT_CODE = "parentCode";
|
||||
public static final String VALUE = "value";
|
||||
public static final String PARAM_FILE = "file";
|
||||
public static final String PARAM_SYSTEM = "system";
|
||||
private static final String RESP_PARAM_CONCEPT_COUNT = "conceptCount";
|
||||
private static final String RESP_PARAM_TARGET = "target";
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyUploaderProvider.class);
|
||||
private static final String PACKAGE = "package";
|
||||
private static final String RESP_PARAM_SUCCESS = "success";
|
||||
|
||||
@Autowired
|
||||
private FhirContext myCtx;
|
||||
@Autowired
|
||||
private IHapiTerminologyLoaderSvc myTerminologyLoaderSvc;
|
||||
@Autowired
|
||||
private IHapiTerminologySvc myTerminologySvc;
|
||||
private ITermLoaderSvc myTerminologyLoaderSvc;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public TerminologyUploaderProvider() {
|
||||
this(null, null, null);
|
||||
this(null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public TerminologyUploaderProvider(FhirContext theContext, IHapiTerminologyLoaderSvc theTerminologyLoaderSvc, IHapiTerminologySvc theTerminologySvc) {
|
||||
public TerminologyUploaderProvider(FhirContext theContext, ITermLoaderSvc theTerminologyLoaderSvc) {
|
||||
myCtx = theContext;
|
||||
myTerminologyLoaderSvc = theTerminologyLoaderSvc;
|
||||
myTerminologySvc = theTerminologySvc;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* <code>
|
||||
* $apply-codesystem-delta-add
|
||||
* </code>
|
||||
*/
|
||||
@Operation(typeName = "CodeSystem", name = JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD, idempotent = false, returnParameters = {
|
||||
})
|
||||
public IBaseParameters applyCodeSystemDeltaAdd(
|
||||
HttpServletRequest theServletRequest,
|
||||
@OperationParam(name = PARENT_CODE, min = 0, max = 1) IPrimitiveType<String> theParentCode,
|
||||
@OperationParam(name = VALUE, min = 0, max = 1) IBaseResource theValue,
|
||||
RequestDetails theRequestDetails
|
||||
) {
|
||||
|
||||
startRequest(theServletRequest);
|
||||
try {
|
||||
|
||||
CodeSystem value;
|
||||
if (theValue instanceof CodeSystem) {
|
||||
value = (CodeSystem) theValue;
|
||||
} else if (theValue instanceof org.hl7.fhir.dstu3.model.CodeSystem) {
|
||||
value = VersionConvertor_30_40.convertCodeSystem((org.hl7.fhir.dstu3.model.CodeSystem) theValue);
|
||||
} else if (theValue instanceof org.hl7.fhir.r5.model.CodeSystem) {
|
||||
value = org.hl7.fhir.convertors.conv40_50.CodeSystem.convertCodeSystem((org.hl7.fhir.r5.model.CodeSystem) theValue);
|
||||
} else {
|
||||
throw new InvalidRequestException("Value must be present and be a CodeSystem");
|
||||
}
|
||||
|
||||
String system = value.getUrl();
|
||||
String parentCode = theParentCode != null ? theParentCode.getValue() : null;
|
||||
|
||||
AtomicInteger counter = myTerminologySvc.applyDeltaCodesystemsAdd(system, parentCode, value);
|
||||
|
||||
IBaseParameters retVal = ParametersUtil.newInstance(myCtx);
|
||||
ParametersUtil.addParameterToParametersBoolean(myCtx, retVal, "success", true);
|
||||
ParametersUtil.addParameterToParametersInteger(myCtx, retVal, "addedConcepts", counter.get());
|
||||
return retVal;
|
||||
|
||||
} finally {
|
||||
endRequest(theServletRequest);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* <code>
|
||||
* $apply-codesystem-delta-remove
|
||||
* </code>
|
||||
*/
|
||||
@Operation(typeName = "CodeSystem", name = JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_REMOVE, idempotent = false, returnParameters = {
|
||||
})
|
||||
public IBaseParameters applyCodeSystemDeltaRemove(
|
||||
HttpServletRequest theServletRequest,
|
||||
@OperationParam(name = VALUE, min = 1, max = 1) IBaseResource theValue,
|
||||
RequestDetails theRequestDetails
|
||||
) {
|
||||
|
||||
startRequest(theServletRequest);
|
||||
try {
|
||||
|
||||
CodeSystem value;
|
||||
if (theValue instanceof CodeSystem) {
|
||||
value = (CodeSystem) theValue;
|
||||
} else if (theValue instanceof org.hl7.fhir.dstu3.model.CodeSystem) {
|
||||
value = VersionConvertor_30_40.convertCodeSystem((org.hl7.fhir.dstu3.model.CodeSystem) theValue);
|
||||
} else if (theValue instanceof org.hl7.fhir.r5.model.CodeSystem) {
|
||||
value = org.hl7.fhir.convertors.conv40_50.CodeSystem.convertCodeSystem((org.hl7.fhir.r5.model.CodeSystem) theValue);
|
||||
} else {
|
||||
throw new InvalidRequestException("Value must be present and be a CodeSystem");
|
||||
}
|
||||
|
||||
String system = value.getUrl();
|
||||
|
||||
AtomicInteger counter = myTerminologySvc.applyDeltaCodesystemsRemove(system, value);
|
||||
|
||||
IBaseParameters retVal = ParametersUtil.newInstance(myCtx);
|
||||
ParametersUtil.addParameterToParametersBoolean(myCtx, retVal, "success", true);
|
||||
ParametersUtil.addParameterToParametersInteger(myCtx, retVal, "removedConcepts", counter.get());
|
||||
return retVal;
|
||||
|
||||
} finally {
|
||||
endRequest(theServletRequest);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -178,28 +83,31 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
|
|||
@Operation(typeName = "CodeSystem", name = JpaConstants.OPERATION_UPLOAD_EXTERNAL_CODE_SYSTEM, idempotent = false, returnParameters = {
|
||||
// @OperationParam(name = "conceptCount", type = IntegerType.class, min = 1)
|
||||
})
|
||||
public IBaseParameters uploadExternalCodeSystem(
|
||||
public IBaseParameters uploadSnapshot(
|
||||
HttpServletRequest theServletRequest,
|
||||
@OperationParam(name = "url", min = 1, typeName = "uri") IPrimitiveType<String> theCodeSystemUrl,
|
||||
@OperationParam(name = "contentMode", min = 0, typeName = "code") IPrimitiveType<String> theContentMode,
|
||||
@OperationParam(name = PARAM_SYSTEM, min = 1, typeName = "uri") IPrimitiveType<String> theCodeSystemUrl,
|
||||
@OperationParam(name = "localfile", min = 1, max = OperationParam.MAX_UNLIMITED, typeName = "string") List<IPrimitiveType<String>> theLocalFile,
|
||||
@OperationParam(name = PACKAGE, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "attachment") List<ICompositeType> thePackage,
|
||||
@OperationParam(name = PARAM_FILE, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "attachment") List<ICompositeType> theFiles,
|
||||
RequestDetails theRequestDetails
|
||||
) {
|
||||
|
||||
startRequest(theServletRequest);
|
||||
|
||||
if (theCodeSystemUrl == null || isBlank(theCodeSystemUrl.getValueAsString())) {
|
||||
throw new InvalidRequestException("Missing mandatory parameter: " + PARAM_SYSTEM);
|
||||
}
|
||||
|
||||
if (theLocalFile == null || theLocalFile.size() == 0) {
|
||||
if (thePackage == null || thePackage.size() == 0) {
|
||||
if (theFiles == null || theFiles.size() == 0) {
|
||||
throw new InvalidRequestException("No 'localfile' or 'package' parameter, or package had no data");
|
||||
}
|
||||
for (ICompositeType next : thePackage) {
|
||||
for (ICompositeType next : theFiles) {
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(myCtx.getElementDefinition(next.getClass()).getName().equals("Attachment"), "Package must be of type Attachment");
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
List<IHapiTerminologyLoaderSvc.FileDescriptor> localFiles = new ArrayList<>();
|
||||
List<ITermLoaderSvc.FileDescriptor> localFiles = new ArrayList<>();
|
||||
if (theLocalFile != null && theLocalFile.size() > 0) {
|
||||
for (IPrimitiveType<String> nextLocalFile : theLocalFile) {
|
||||
if (isNotBlank(nextLocalFile.getValue())) {
|
||||
|
@ -208,7 +116,7 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
|
|||
if (!nextFile.exists() || !nextFile.isFile()) {
|
||||
throw new InvalidRequestException("Unknown file: " + nextFile.getName());
|
||||
}
|
||||
localFiles.add(new IHapiTerminologyLoaderSvc.FileDescriptor() {
|
||||
localFiles.add(new ITermLoaderSvc.FileDescriptor() {
|
||||
@Override
|
||||
public String getFilename() {
|
||||
return nextFile.getAbsolutePath();
|
||||
|
@ -227,15 +135,15 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
|
|||
}
|
||||
}
|
||||
|
||||
if (thePackage != null) {
|
||||
for (ICompositeType nextPackage : thePackage) {
|
||||
if (theFiles != null) {
|
||||
for (ICompositeType nextPackage : theFiles) {
|
||||
final String url = AttachmentUtil.getOrCreateUrl(myCtx, nextPackage).getValueAsString();
|
||||
|
||||
if (isBlank(url)) {
|
||||
throw new UnprocessableEntityException("Package is missing mandatory url element");
|
||||
}
|
||||
|
||||
localFiles.add(new IHapiTerminologyLoaderSvc.FileDescriptor() {
|
||||
localFiles.add(new ITermLoaderSvc.FileDescriptor() {
|
||||
@Override
|
||||
public String getFilename() {
|
||||
return url;
|
||||
|
@ -250,33 +158,29 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
|
|||
}
|
||||
}
|
||||
|
||||
String codeSystemUrl = theCodeSystemUrl != null ? theCodeSystemUrl.getValue() : null;
|
||||
codeSystemUrl = defaultString(codeSystemUrl);
|
||||
String codeSystemUrl = theCodeSystemUrl.getValue();
|
||||
codeSystemUrl = trim(codeSystemUrl);
|
||||
|
||||
String contentMode = theContentMode != null ? theContentMode.getValue() : null;
|
||||
UploadStatistics stats;
|
||||
if ("custom".equals(contentMode)) {
|
||||
stats = myTerminologyLoaderSvc.loadCustom(codeSystemUrl, localFiles, theRequestDetails);
|
||||
} else {
|
||||
switch (codeSystemUrl) {
|
||||
case IHapiTerminologyLoaderSvc.SCT_URI:
|
||||
stats = myTerminologyLoaderSvc.loadSnomedCt(localFiles, theRequestDetails);
|
||||
break;
|
||||
case IHapiTerminologyLoaderSvc.LOINC_URI:
|
||||
stats = myTerminologyLoaderSvc.loadLoinc(localFiles, theRequestDetails);
|
||||
break;
|
||||
case IHapiTerminologyLoaderSvc.IMGTHLA_URI:
|
||||
stats = myTerminologyLoaderSvc.loadImgthla(localFiles, theRequestDetails);
|
||||
break;
|
||||
default:
|
||||
throw new InvalidRequestException("Unknown URL: " + codeSystemUrl);
|
||||
}
|
||||
switch (codeSystemUrl) {
|
||||
case ITermLoaderSvc.SCT_URI:
|
||||
stats = myTerminologyLoaderSvc.loadSnomedCt(localFiles, theRequestDetails);
|
||||
break;
|
||||
case ITermLoaderSvc.LOINC_URI:
|
||||
stats = myTerminologyLoaderSvc.loadLoinc(localFiles, theRequestDetails);
|
||||
break;
|
||||
case ITermLoaderSvc.IMGTHLA_URI:
|
||||
stats = myTerminologyLoaderSvc.loadImgthla(localFiles, theRequestDetails);
|
||||
break;
|
||||
default:
|
||||
stats = myTerminologyLoaderSvc.loadCustom(codeSystemUrl, localFiles, theRequestDetails);
|
||||
break;
|
||||
}
|
||||
|
||||
IBaseParameters retVal = ParametersUtil.newInstance(myCtx);
|
||||
ParametersUtil.addParameterToParametersBoolean(myCtx, retVal, "success", true);
|
||||
ParametersUtil.addParameterToParametersInteger(myCtx, retVal, CONCEPT_COUNT, stats.getConceptCount());
|
||||
ParametersUtil.addParameterToParametersReference(myCtx, retVal, TARGET, stats.getTarget().getValue());
|
||||
ParametersUtil.addParameterToParametersBoolean(myCtx, retVal, RESP_PARAM_SUCCESS, true);
|
||||
ParametersUtil.addParameterToParametersInteger(myCtx, retVal, RESP_PARAM_CONCEPT_COUNT, stats.getUpdatedConceptCount());
|
||||
ParametersUtil.addParameterToParametersReference(myCtx, retVal, RESP_PARAM_TARGET, stats.getTarget().getValue());
|
||||
|
||||
return retVal;
|
||||
} finally {
|
||||
|
@ -284,5 +188,100 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* <code>
|
||||
* $apply-codesystem-delta-add
|
||||
* </code>
|
||||
*/
|
||||
@Operation(typeName = "CodeSystem", name = JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD, idempotent = false, returnParameters = {
|
||||
})
|
||||
public IBaseParameters uploadDeltaAdd(
|
||||
HttpServletRequest theServletRequest,
|
||||
@OperationParam(name = PARAM_SYSTEM, min = 1, max = 1, typeName = "uri") IPrimitiveType<String> theSystem,
|
||||
@OperationParam(name = PARAM_FILE, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "attachment") List<ICompositeType> theFiles,
|
||||
RequestDetails theRequestDetails
|
||||
) {
|
||||
|
||||
startRequest(theServletRequest);
|
||||
try {
|
||||
validateHaveSystem(theSystem);
|
||||
validateHaveFiles(theFiles);
|
||||
|
||||
List<ITermLoaderSvc.FileDescriptor> files = convertAttachmentsToFileDescriptors(theFiles);
|
||||
UploadStatistics outcome = myTerminologyLoaderSvc.loadDeltaAdd(theSystem.getValue(), files, theRequestDetails);
|
||||
return toDeltaResponse(outcome);
|
||||
} finally {
|
||||
endRequest(theServletRequest);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* <code>
|
||||
* $apply-codesystem-delta-remove
|
||||
* </code>
|
||||
*/
|
||||
@Operation(typeName = "CodeSystem", name = JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_REMOVE, idempotent = false, returnParameters = {
|
||||
})
|
||||
public IBaseParameters uploadDeltaRemove(
|
||||
HttpServletRequest theServletRequest,
|
||||
@OperationParam(name = PARAM_SYSTEM, min = 1, max = 1, typeName = "uri") IPrimitiveType<String> theSystem,
|
||||
@OperationParam(name = PARAM_FILE, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "attachment") List<ICompositeType> theFiles,
|
||||
RequestDetails theRequestDetails
|
||||
) {
|
||||
|
||||
startRequest(theServletRequest);
|
||||
try {
|
||||
validateHaveSystem(theSystem);
|
||||
validateHaveFiles(theFiles);
|
||||
|
||||
List<ITermLoaderSvc.FileDescriptor> files = convertAttachmentsToFileDescriptors(theFiles);
|
||||
UploadStatistics outcome = myTerminologyLoaderSvc.loadDeltaRemove(theSystem.getValue(), files, theRequestDetails);
|
||||
return toDeltaResponse(outcome);
|
||||
} finally {
|
||||
endRequest(theServletRequest);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void validateHaveSystem(IPrimitiveType<String> theSystem) {
|
||||
if (theSystem == null || isBlank(theSystem.getValueAsString())) {
|
||||
throw new InvalidRequestException("Missing mandatory parameter: " + PARAM_SYSTEM);
|
||||
}
|
||||
}
|
||||
|
||||
private void validateHaveFiles(List<ICompositeType> theFiles) {
|
||||
if (theFiles != null) {
|
||||
for (ICompositeType nextFile : theFiles) {
|
||||
if (!nextFile.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
throw new InvalidRequestException("Missing mandatory parameter: " + PARAM_FILE);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private List<ITermLoaderSvc.FileDescriptor> convertAttachmentsToFileDescriptors(@OperationParam(name = PARAM_FILE, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "attachment") List<ICompositeType> theFiles) {
|
||||
List<ITermLoaderSvc.FileDescriptor> files = new ArrayList<>();
|
||||
for (ICompositeType next : theFiles) {
|
||||
byte[] nextData = AttachmentUtil.getOrCreateData(myCtx, next).getValue();
|
||||
String nextUrl = AttachmentUtil.getOrCreateUrl(myCtx, next).getValue();
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(nextData != null && nextData.length > 0, "Missing Attachment.data value");
|
||||
ValidateUtil.isNotBlankOrThrowUnprocessableEntity(nextUrl, "Missing Attachment.url value");
|
||||
|
||||
files.add(new ITermLoaderSvc.ByteArrayFileDescriptor(nextUrl, nextData));
|
||||
}
|
||||
return files;
|
||||
}
|
||||
|
||||
private IBaseParameters toDeltaResponse(UploadStatistics theOutcome) {
|
||||
IBaseParameters retVal = ParametersUtil.newInstance(myCtx);
|
||||
ParametersUtil.addParameterToParametersInteger(myCtx, retVal, RESP_PARAM_CONCEPT_COUNT, theOutcome.getUpdatedConceptCount());
|
||||
ParametersUtil.addParameterToParametersReference(myCtx, retVal, RESP_PARAM_TARGET, theOutcome.getTarget().getValue());
|
||||
return retVal;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -53,6 +53,7 @@ import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
|||
import ca.uhn.fhir.rest.server.method.PageMethodBinding;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.util.ICachedSearchDetails;
|
||||
import ca.uhn.fhir.util.AsyncUtil;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
|
@ -76,6 +77,7 @@ import org.springframework.transaction.annotation.Transactional;
|
|||
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.persistence.EntityManager;
|
||||
|
@ -154,14 +156,16 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
public void cancelAllActiveSearches() {
|
||||
for (SearchTask next : myIdToSearchTask.values()) {
|
||||
next.requestImmediateAbort();
|
||||
try {
|
||||
next.getCompletionLatch().await(30, TimeUnit.SECONDS);
|
||||
} catch (InterruptedException e) {
|
||||
ourLog.warn("Failed to wait for completion", e);
|
||||
}
|
||||
AsyncUtil.awaitLatchAndIgnoreInterrupt(next.getCompletionLatch(), 30, TimeUnit.SECONDS);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("SameParameterValue")
|
||||
@VisibleForTesting
|
||||
void setMaxMillisToWaitForRemoteResultsForUnitTest(long theMaxMillisToWaitForRemoteResults) {
|
||||
myMaxMillisToWaitForRemoteResults = theMaxMillisToWaitForRemoteResults;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is called by the HTTP client processing thread in order to
|
||||
* fetch resources.
|
||||
|
@ -189,18 +193,16 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
if (searchTask != null) {
|
||||
ourLog.trace("Local search found");
|
||||
List<Long> resourcePids = searchTask.getResourcePids(theFrom, theTo);
|
||||
if (resourcePids != null) {
|
||||
ourLog.trace("Local search returned {} pids, wanted {}-{} - Search: {}", resourcePids.size(), theFrom, theTo, searchTask.getSearch());
|
||||
ourLog.trace("Local search returned {} pids, wanted {}-{} - Search: {}", resourcePids.size(), theFrom, theTo, searchTask.getSearch());
|
||||
|
||||
/*
|
||||
* Generally, if a search task is open, the fastest possible thing is to just return its results. This
|
||||
* will work most of the time, but can fail if the task hit a search threshold and the client is requesting
|
||||
* results beyond that threashold. In that case, we'll keep going below, since that will trigger another
|
||||
* task.
|
||||
*/
|
||||
if ((searchTask.getSearch().getNumFound() - searchTask.getSearch().getNumBlocked()) >= theTo || resourcePids.size() == (theTo - theFrom)) {
|
||||
return resourcePids;
|
||||
}
|
||||
/*
|
||||
* Generally, if a search task is open, the fastest possible thing is to just return its results. This
|
||||
* will work most of the time, but can fail if the task hit a search threshold and the client is requesting
|
||||
* results beyond that threashold. In that case, we'll keep going below, since that will trigger another
|
||||
* task.
|
||||
*/
|
||||
if ((searchTask.getSearch().getNumFound() - searchTask.getSearch().getNumBlocked()) >= theTo || resourcePids.size() == (theTo - theFrom)) {
|
||||
return resourcePids;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -244,11 +246,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
}
|
||||
}
|
||||
|
||||
try {
|
||||
Thread.sleep(500);
|
||||
} catch (InterruptedException e) {
|
||||
// ignore
|
||||
}
|
||||
AsyncUtil.sleep(500);
|
||||
}
|
||||
|
||||
ourLog.trace("Finished looping");
|
||||
|
@ -627,14 +625,8 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
Integer awaitInitialSync() {
|
||||
ourLog.trace("Awaiting initial sync");
|
||||
do {
|
||||
try {
|
||||
if (getInitialCollectionLatch().await(250, TimeUnit.MILLISECONDS)) {
|
||||
break;
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
// Shouldn't happen
|
||||
Thread.currentThread().interrupt();
|
||||
throw new InternalErrorException(e);
|
||||
if (AsyncUtil.awaitLatchAndThrowInternalErrorExceptionOnInterrupt(getInitialCollectionLatch(), 250L, TimeUnit.MILLISECONDS)) {
|
||||
break;
|
||||
}
|
||||
} while (getSearch().getStatus() == SearchStatusEnum.LOADING);
|
||||
ourLog.trace("Initial sync completed");
|
||||
|
@ -663,7 +655,8 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
return sb;
|
||||
}
|
||||
|
||||
public List<Long> getResourcePids(int theFromIndex, int theToIndex) {
|
||||
@Nonnull
|
||||
List<Long> getResourcePids(int theFromIndex, int theToIndex) {
|
||||
ourLog.debug("Requesting search PIDs from {}-{}", theFromIndex, theToIndex);
|
||||
|
||||
boolean keepWaiting;
|
||||
|
@ -698,11 +691,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
|
||||
if (keepWaiting) {
|
||||
ourLog.info("Waiting as we only have {} results - Search status: {}", mySyncedPids.size(), mySearch.getStatus());
|
||||
try {
|
||||
Thread.sleep(500);
|
||||
} catch (InterruptedException theE) {
|
||||
// ignore
|
||||
}
|
||||
AsyncUtil.sleep(500L);
|
||||
}
|
||||
} while (keepWaiting);
|
||||
|
||||
|
@ -1081,11 +1070,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
}
|
||||
|
||||
if (myLoadingThrottleForUnitTests != null) {
|
||||
try {
|
||||
Thread.sleep(myLoadingThrottleForUnitTests);
|
||||
} catch (InterruptedException e) {
|
||||
// ignore
|
||||
}
|
||||
AsyncUtil.sleep(myLoadingThrottleForUnitTests);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -48,17 +48,15 @@ import java.util.List;
|
|||
import java.util.Optional;
|
||||
|
||||
public class DatabaseSearchCacheSvcImpl extends BaseSearchCacheSvcImpl {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(DatabaseSearchCacheSvcImpl.class);
|
||||
|
||||
/*
|
||||
* Be careful increasing this number! We use the number of params here in a
|
||||
* DELETE FROM foo WHERE params IN (aaaa)
|
||||
* DELETE FROM foo WHERE params IN (term,term,term...)
|
||||
* type query and this can fail if we have 1000s of params
|
||||
*/
|
||||
public static final int DEFAULT_MAX_RESULTS_TO_DELETE_IN_ONE_STMT = 500;
|
||||
public static final int DEFAULT_MAX_RESULTS_TO_DELETE_IN_ONE_PAS = 20000;
|
||||
public static final long DEFAULT_CUTOFF_SLACK = 10 * DateUtils.MILLIS_PER_SECOND;
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(DatabaseSearchCacheSvcImpl.class);
|
||||
private static int ourMaximumResultsToDeleteInOneStatement = DEFAULT_MAX_RESULTS_TO_DELETE_IN_ONE_STMT;
|
||||
private static int ourMaximumResultsToDeleteInOnePass = DEFAULT_MAX_RESULTS_TO_DELETE_IN_ONE_PAS;
|
||||
private static Long ourNowForUnitTests;
|
||||
|
@ -108,6 +106,14 @@ public class DatabaseSearchCacheSvcImpl extends BaseSearchCacheSvcImpl {
|
|||
}
|
||||
|
||||
|
||||
void setSearchDaoForUnitTest(ISearchDao theSearchDao) {
|
||||
mySearchDao = theSearchDao;
|
||||
}
|
||||
|
||||
void setTxManagerForUnitTest(PlatformTransactionManager theTxManager) {
|
||||
myTxManager = theTxManager;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(Transactional.TxType.NEVER)
|
||||
public Optional<Search> tryToMarkSearchAsInProgress(Search theSearch) {
|
||||
|
@ -185,7 +191,7 @@ public class DatabaseSearchCacheSvcImpl extends BaseSearchCacheSvcImpl {
|
|||
|
||||
int count = toDelete.getContent().size();
|
||||
if (count > 0) {
|
||||
if (ourLog.isDebugEnabled()) {
|
||||
if (ourLog.isDebugEnabled() || "true".equalsIgnoreCase(System.getProperty("test"))) {
|
||||
Long total = tt.execute(t -> mySearchDao.count());
|
||||
ourLog.debug("Deleted {} searches, {} remaining", count, total);
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,14 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
|
||||
import ca.uhn.fhir.jpa.term.api.ITermVersionAdapterSvc;
|
||||
import ca.uhn.fhir.util.ValidateUtil;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
|
||||
public abstract class BaseTermVersionAdapterSvcImpl implements ITermVersionAdapterSvc {
|
||||
|
||||
|
||||
protected void validateCodeSystemForStorage(CodeSystem theCodeSystemResource) {
|
||||
ValidateUtil.isNotBlankOrThrowUnprocessableEntity(theCodeSystemResource.getUrl(), "Can not store a CodeSystem without a valid URL");
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,123 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
|
||||
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.io.input.BOMInputStream;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipInputStream;
|
||||
|
||||
public class LoadedFileDescriptors implements Closeable {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(LoadedFileDescriptors.class);
|
||||
private List<File> myTemporaryFiles = new ArrayList<>();
|
||||
private List<ITermLoaderSvc.FileDescriptor> myUncompressedFileDescriptors = new ArrayList<>();
|
||||
|
||||
LoadedFileDescriptors(List<ITermLoaderSvc.FileDescriptor> theFileDescriptors) {
|
||||
try {
|
||||
for (ITermLoaderSvc.FileDescriptor next : theFileDescriptors) {
|
||||
if (next.getFilename().toLowerCase().endsWith(".zip")) {
|
||||
ourLog.info("Uncompressing {} into temporary files", next.getFilename());
|
||||
try (InputStream inputStream = next.getInputStream()) {
|
||||
try (BufferedInputStream bufferedInputStream = new BufferedInputStream(inputStream)) {
|
||||
try (ZipInputStream zis = new ZipInputStream(bufferedInputStream)) {
|
||||
for (ZipEntry nextEntry; (nextEntry = zis.getNextEntry()) != null; ) {
|
||||
BOMInputStream fis = new BOMInputStream(zis);
|
||||
File nextTemporaryFile = File.createTempFile("hapifhir", ".tmp");
|
||||
ourLog.info("Creating temporary file: {}", nextTemporaryFile.getAbsolutePath());
|
||||
nextTemporaryFile.deleteOnExit();
|
||||
try (FileOutputStream fos = new FileOutputStream(nextTemporaryFile, false)) {
|
||||
IOUtils.copy(fis, fos);
|
||||
String nextEntryFileName = nextEntry.getName();
|
||||
myUncompressedFileDescriptors.add(new ITermLoaderSvc.FileDescriptor() {
|
||||
@Override
|
||||
public String getFilename() {
|
||||
return nextEntryFileName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InputStream getInputStream() {
|
||||
try {
|
||||
return new FileInputStream(nextTemporaryFile);
|
||||
} catch (FileNotFoundException e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
}
|
||||
});
|
||||
myTemporaryFiles.add(nextTemporaryFile);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
myUncompressedFileDescriptors.add(next);
|
||||
}
|
||||
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public boolean hasFile(String theFilename) {
|
||||
return myUncompressedFileDescriptors
|
||||
.stream()
|
||||
.map(t -> t.getFilename().replaceAll(".*[\\\\/]", "")) // Strip the path from the filename
|
||||
.anyMatch(t -> t.equals(theFilename));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
for (File next : myTemporaryFiles) {
|
||||
ourLog.info("Deleting temporary file: {}", next.getAbsolutePath());
|
||||
FileUtils.deleteQuietly(next);
|
||||
}
|
||||
}
|
||||
|
||||
List<ITermLoaderSvc.FileDescriptor> getUncompressedFileDescriptors() {
|
||||
return myUncompressedFileDescriptors;
|
||||
}
|
||||
|
||||
private List<String> notFound(List<String> theExpectedFilenameFragments) {
|
||||
Set<String> foundFragments = new HashSet<>();
|
||||
for (String nextExpected : theExpectedFilenameFragments) {
|
||||
for (ITermLoaderSvc.FileDescriptor next : myUncompressedFileDescriptors) {
|
||||
if (next.getFilename().contains(nextExpected)) {
|
||||
foundFragments.add(nextExpected);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ArrayList<String> notFoundFileNameFragments = new ArrayList<>(theExpectedFilenameFragments);
|
||||
notFoundFileNameFragments.removeAll(foundFragments);
|
||||
return notFoundFileNameFragments;
|
||||
}
|
||||
|
||||
void verifyMandatoryFilesExist(List<String> theExpectedFilenameFragments) {
|
||||
List<String> notFound = notFound(theExpectedFilenameFragments);
|
||||
if (!notFound.isEmpty()) {
|
||||
throw new UnprocessableEntityException("Could not find the following mandatory files in input: " + notFound);
|
||||
}
|
||||
}
|
||||
|
||||
void verifyOptionalFilesExist(List<String> theExpectedFilenameFragments) {
|
||||
List<String> notFound = notFound(theExpectedFilenameFragments);
|
||||
if (!notFound.isEmpty()) {
|
||||
ourLog.warn("Could not find the following optional files: " + notFound);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,757 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.dao.data.*;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.entity.*;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermVersionAdapterSvc;
|
||||
import ca.uhn.fhir.jpa.term.custom.CustomTerminologySet;
|
||||
import ca.uhn.fhir.jpa.util.ScrollableResultsIterator;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.util.ObjectUtil;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import ca.uhn.fhir.util.ValidateUtil;
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hibernate.ScrollMode;
|
||||
import org.hibernate.ScrollableResults;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.TransactionDefinition;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.PersistenceContext;
|
||||
import javax.persistence.PersistenceContextType;
|
||||
import javax.persistence.TypedQuery;
|
||||
import javax.persistence.criteria.CriteriaBuilder;
|
||||
import javax.persistence.criteria.CriteriaQuery;
|
||||
import javax.persistence.criteria.Predicate;
|
||||
import javax.persistence.criteria.Root;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(TermCodeSystemStorageSvcImpl.class);
|
||||
private static final Object PLACEHOLDER_OBJECT = new Object();
|
||||
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
||||
protected EntityManager myEntityManager;
|
||||
@Autowired
|
||||
protected ITermCodeSystemDao myCodeSystemDao;
|
||||
@Autowired
|
||||
protected ITermCodeSystemVersionDao myCodeSystemVersionDao;
|
||||
@Autowired
|
||||
protected ITermConceptDao myConceptDao;
|
||||
@Autowired
|
||||
protected ITermConceptPropertyDao myConceptPropertyDao;
|
||||
@Autowired
|
||||
protected ITermConceptDesignationDao myConceptDesignationDao;
|
||||
@Autowired
|
||||
protected IdHelperService myIdHelperService;
|
||||
@Autowired
|
||||
private PlatformTransactionManager myTransactionManager;
|
||||
@Autowired
|
||||
private ITermConceptParentChildLinkDao myConceptParentChildLinkDao;
|
||||
@Autowired
|
||||
private ITermVersionAdapterSvc myTerminologyVersionAdapterSvc;
|
||||
@Autowired
|
||||
private ITermDeferredStorageSvc myDeferredStorageSvc;
|
||||
@Autowired
|
||||
private FhirContext myContext;
|
||||
@Autowired
|
||||
private ITermReadSvc myTerminologySvc;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
@Autowired
|
||||
private IResourceTableDao myResourceTableDao;
|
||||
|
||||
@Override
|
||||
public Long getValueSetResourcePid(IIdType theIdType) {
|
||||
return getValueSetResourcePid(theIdType, null);
|
||||
}
|
||||
|
||||
private Long getValueSetResourcePid(IIdType theIdType, RequestDetails theRequestDetails) {
|
||||
return myIdHelperService.translateForcedIdToPid(theIdType, theRequestDetails);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
@Override
|
||||
public UploadStatistics applyDeltaCodeSystemsAdd(String theSystem, CustomTerminologySet theAdditions) {
|
||||
ValidateUtil.isNotBlankOrThrowInvalidRequest(theSystem, "No system provided");
|
||||
validateDstu3OrNewer();
|
||||
theAdditions.validateNoCycleOrThrowInvalidRequest();
|
||||
|
||||
TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(theSystem);
|
||||
if (cs == null) {
|
||||
CodeSystem codeSystemResource = new CodeSystem();
|
||||
codeSystemResource.setUrl(theSystem);
|
||||
codeSystemResource.setContent(CodeSystem.CodeSystemContentMode.NOTPRESENT);
|
||||
myTerminologyVersionAdapterSvc.createOrUpdateCodeSystem(codeSystemResource);
|
||||
|
||||
cs = myCodeSystemDao.findByCodeSystemUri(theSystem);
|
||||
}
|
||||
|
||||
TermCodeSystemVersion csv = cs.getCurrentVersion();
|
||||
Validate.notNull(csv);
|
||||
|
||||
CodeSystem codeSystem = myTerminologySvc.getCodeSystemFromContext(theSystem);
|
||||
if (codeSystem.getContent() != CodeSystem.CodeSystemContentMode.NOTPRESENT) {
|
||||
throw new InvalidRequestException("CodeSystem with url[" + theSystem + "] can not apply a delta - wrong content mode: " + codeSystem.getContent());
|
||||
}
|
||||
|
||||
Validate.notNull(cs);
|
||||
Validate.notNull(cs.getPid());
|
||||
|
||||
IIdType codeSystemId = cs.getResource().getIdDt();
|
||||
|
||||
// Load all concepts for the code system
|
||||
Map<String, Long> codeToConceptPid = new HashMap<>();
|
||||
{
|
||||
ourLog.info("Loading all concepts in CodeSystem versionPid[{}] and url[{}]", cs.getPid(), theSystem);
|
||||
StopWatch sw = new StopWatch();
|
||||
CriteriaBuilder criteriaBuilder = myEntityManager.getCriteriaBuilder();
|
||||
CriteriaQuery<TermConcept> query = criteriaBuilder.createQuery(TermConcept.class);
|
||||
Root<TermConcept> root = query.from(TermConcept.class);
|
||||
Predicate predicate = criteriaBuilder.equal(root.get("myCodeSystemVersionPid").as(Long.class), csv.getPid());
|
||||
query.where(predicate);
|
||||
TypedQuery<TermConcept> typedQuery = myEntityManager.createQuery(query.select(root));
|
||||
org.hibernate.query.Query<TermConcept> hibernateQuery = (org.hibernate.query.Query<TermConcept>) typedQuery;
|
||||
ScrollableResults scrollableResults = hibernateQuery.scroll(ScrollMode.FORWARD_ONLY);
|
||||
try (ScrollableResultsIterator<TermConcept> scrollableResultsIterator = new ScrollableResultsIterator<>(scrollableResults)) {
|
||||
while (scrollableResultsIterator.hasNext()) {
|
||||
TermConcept next = scrollableResultsIterator.next();
|
||||
codeToConceptPid.put(next.getCode(), next.getId());
|
||||
}
|
||||
}
|
||||
ourLog.info("Loaded {} concepts in {}", codeToConceptPid.size(), sw.toString());
|
||||
}
|
||||
|
||||
// Load all parent/child links
|
||||
ListMultimap<String, String> parentCodeToChildCodes = ArrayListMultimap.create();
|
||||
ListMultimap<String, String> childCodeToParentCodes = ArrayListMultimap.create();
|
||||
{
|
||||
ourLog.info("Loading all parent/child relationships in CodeSystem url[" + theSystem + "]");
|
||||
int count = 0;
|
||||
StopWatch sw = new StopWatch();
|
||||
CriteriaBuilder criteriaBuilder = myEntityManager.getCriteriaBuilder();
|
||||
CriteriaQuery<TermConceptParentChildLink> query = criteriaBuilder.createQuery(TermConceptParentChildLink.class);
|
||||
Root<TermConceptParentChildLink> root = query.from(TermConceptParentChildLink.class);
|
||||
Predicate predicate = criteriaBuilder.equal(root.get("myCodeSystemVersionPid").as(Long.class), csv.getPid());
|
||||
root.fetch("myChild");
|
||||
root.fetch("myParent");
|
||||
query.where(predicate);
|
||||
TypedQuery<TermConceptParentChildLink> typedQuery = myEntityManager.createQuery(query.select(root));
|
||||
org.hibernate.query.Query<TermConceptParentChildLink> hibernateQuery = (org.hibernate.query.Query<TermConceptParentChildLink>) typedQuery;
|
||||
ScrollableResults scrollableResults = hibernateQuery.scroll(ScrollMode.FORWARD_ONLY);
|
||||
try (ScrollableResultsIterator<TermConceptParentChildLink> scrollableResultsIterator = new ScrollableResultsIterator<>(scrollableResults)) {
|
||||
while (scrollableResultsIterator.hasNext()) {
|
||||
TermConceptParentChildLink next = scrollableResultsIterator.next();
|
||||
String parentCode = next.getParent().getCode();
|
||||
String childCode = next.getChild().getCode();
|
||||
parentCodeToChildCodes.put(parentCode, childCode);
|
||||
childCodeToParentCodes.put(childCode, parentCode);
|
||||
count++;
|
||||
}
|
||||
}
|
||||
ourLog.info("Loaded {} parent/child relationships in {}", count, sw.toString());
|
||||
}
|
||||
|
||||
// Account for root codes in the parent->child map
|
||||
for (String nextCode : codeToConceptPid.keySet()) {
|
||||
if (childCodeToParentCodes.get(nextCode).isEmpty()) {
|
||||
parentCodeToChildCodes.put("", nextCode);
|
||||
}
|
||||
}
|
||||
|
||||
UploadStatistics retVal = new UploadStatistics(codeSystemId);
|
||||
|
||||
// Add root concepts
|
||||
for (TermConcept nextRootConcept : theAdditions.getRootConcepts()) {
|
||||
List<String> parentCodes = Collections.emptyList();
|
||||
addConcept(csv, codeToConceptPid, parentCodes, nextRootConcept, parentCodeToChildCodes, retVal, true);
|
||||
}
|
||||
|
||||
// Add unanchored child concepts
|
||||
for (TermConcept nextUnanchoredChild : theAdditions.getUnanchoredChildConceptsToParentCodes().keySet()) {
|
||||
List<String> nextParentCodes = theAdditions.getUnanchoredChildConceptsToParentCodes().get(nextUnanchoredChild);
|
||||
addConcept(csv, codeToConceptPid, nextParentCodes, nextUnanchoredChild, parentCodeToChildCodes, retVal, true);
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Transactional
|
||||
@Override
|
||||
public UploadStatistics applyDeltaCodeSystemsRemove(String theSystem, CustomTerminologySet theValue) {
|
||||
ValidateUtil.isNotBlankOrThrowInvalidRequest(theSystem, "No system provided");
|
||||
validateDstu3OrNewer();
|
||||
|
||||
TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(theSystem);
|
||||
if (cs == null) {
|
||||
throw new InvalidRequestException("Unknown code system: " + theSystem);
|
||||
}
|
||||
|
||||
AtomicInteger removeCounter = new AtomicInteger(0);
|
||||
|
||||
for (TermConcept nextSuppliedConcept : theValue.getRootConcepts()) {
|
||||
Optional<TermConcept> conceptOpt = myTerminologySvc.findCode(theSystem, nextSuppliedConcept.getCode());
|
||||
if (conceptOpt.isPresent()) {
|
||||
TermConcept concept = conceptOpt.get();
|
||||
deleteConceptChildrenAndConcept(concept, removeCounter);
|
||||
}
|
||||
}
|
||||
|
||||
IIdType target = cs.getResource().getIdDt();
|
||||
return new UploadStatistics(removeCounter.get(), target);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteCodeSystem(TermCodeSystem theCodeSystem) {
|
||||
ourLog.info(" * Deleting code system {}", theCodeSystem.getPid());
|
||||
|
||||
myEntityManager.flush();
|
||||
TermCodeSystem cs = myCodeSystemDao.findById(theCodeSystem.getPid()).orElseThrow(IllegalStateException::new);
|
||||
cs.setCurrentVersion(null);
|
||||
myCodeSystemDao.save(cs);
|
||||
myCodeSystemDao.flush();
|
||||
|
||||
List<TermCodeSystemVersion> codeSystemVersions = myCodeSystemVersionDao.findByCodeSystemPid(theCodeSystem.getPid());
|
||||
for (TermCodeSystemVersion next : codeSystemVersions) {
|
||||
deleteCodeSystemVersion(next.getPid());
|
||||
}
|
||||
myCodeSystemVersionDao.deleteForCodeSystem(theCodeSystem);
|
||||
myCodeSystemDao.delete(theCodeSystem);
|
||||
|
||||
myEntityManager.flush();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the number of saved concepts
|
||||
*/
|
||||
@Override
|
||||
public int saveConcept(TermConcept theConcept) {
|
||||
int retVal = 0;
|
||||
|
||||
/*
|
||||
* If the concept has an ID, we're reindexing, so there's no need to
|
||||
* save parent concepts first (it's way too slow to do that)
|
||||
*/
|
||||
if (theConcept.getId() == null) {
|
||||
retVal += ensureParentsSaved(theConcept.getParents());
|
||||
}
|
||||
|
||||
if (theConcept.getId() == null || theConcept.getIndexStatus() == null) {
|
||||
retVal++;
|
||||
theConcept.setIndexStatus(BaseHapiFhirDao.INDEX_STATUS_INDEXED);
|
||||
theConcept.setUpdated(new Date());
|
||||
myConceptDao.save(theConcept);
|
||||
|
||||
for (TermConceptProperty next : theConcept.getProperties()) {
|
||||
myConceptPropertyDao.save(next);
|
||||
}
|
||||
|
||||
for (TermConceptDesignation next : theConcept.getDesignations()) {
|
||||
myConceptDesignationDao.save(next);
|
||||
}
|
||||
}
|
||||
|
||||
ourLog.trace("Saved {} and got PID {}", theConcept.getCode(), theConcept.getId());
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.MANDATORY)
|
||||
public void storeNewCodeSystemVersionIfNeeded(CodeSystem theCodeSystem, ResourceTable theResourceEntity) {
|
||||
if (theCodeSystem != null && isNotBlank(theCodeSystem.getUrl())) {
|
||||
String codeSystemUrl = theCodeSystem.getUrl();
|
||||
if (theCodeSystem.getContent() == CodeSystem.CodeSystemContentMode.COMPLETE || theCodeSystem.getContent() == null || theCodeSystem.getContent() == CodeSystem.CodeSystemContentMode.NOTPRESENT) {
|
||||
ourLog.info("CodeSystem {} has a status of {}, going to store concepts in terminology tables", theResourceEntity.getIdDt().getValue(), theCodeSystem.getContentElement().getValueAsString());
|
||||
|
||||
Long codeSystemResourcePid = getCodeSystemResourcePid(theCodeSystem.getIdElement());
|
||||
|
||||
/*
|
||||
* If this is a not-present codesystem, we don't want to store a new version if one
|
||||
* already exists, since that will wipe out the existing concepts. We do create or update
|
||||
* the TermCodeSystem table though, since that allows the DB to reject changes
|
||||
* that would result in duplicate CodeSysten.url values.
|
||||
*/
|
||||
if (theCodeSystem.getContent() == CodeSystem.CodeSystemContentMode.NOTPRESENT) {
|
||||
TermCodeSystem codeSystem = myCodeSystemDao.findByCodeSystemUri(theCodeSystem.getUrl());
|
||||
if (codeSystem != null) {
|
||||
getOrCreateTermCodeSystem(codeSystemResourcePid, theCodeSystem.getUrl(), theCodeSystem.getUrl(), theResourceEntity);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
TermCodeSystemVersion persCs = new TermCodeSystemVersion();
|
||||
|
||||
populateCodeSystemVersionProperties(persCs, theCodeSystem, theResourceEntity);
|
||||
|
||||
persCs.getConcepts().addAll(BaseTermReadSvcImpl.toPersistedConcepts(theCodeSystem.getConcept(), persCs));
|
||||
ourLog.info("Code system has {} concepts", persCs.getConcepts().size());
|
||||
storeNewCodeSystemVersion(codeSystemResourcePid, codeSystemUrl, theCodeSystem.getName(), theCodeSystem.getVersion(), persCs, theResourceEntity);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
public IIdType storeNewCodeSystemVersion(CodeSystem theCodeSystemResource, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequest, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps) {
|
||||
Validate.notBlank(theCodeSystemResource.getUrl(), "theCodeSystemResource must have a URL");
|
||||
|
||||
IIdType csId = myTerminologyVersionAdapterSvc.createOrUpdateCodeSystem(theCodeSystemResource);
|
||||
Long codeSystemResourcePid = myIdHelperService.translateForcedIdToPid(csId, theRequest);
|
||||
ResourceTable resource = myResourceTableDao.getOne(codeSystemResourcePid);
|
||||
|
||||
ourLog.info("CodeSystem resource has ID: {}", csId.getValue());
|
||||
|
||||
populateCodeSystemVersionProperties(theCodeSystemVersion, theCodeSystemResource, resource);
|
||||
|
||||
storeNewCodeSystemVersion(codeSystemResourcePid, theCodeSystemResource.getUrl(), theCodeSystemResource.getName(), theCodeSystemResource.getVersion(), theCodeSystemVersion, resource);
|
||||
|
||||
myDeferredStorageSvc.addConceptMapsToStorageQueue(theConceptMaps);
|
||||
myDeferredStorageSvc.addValueSetsToStorageQueue(theValueSets);
|
||||
|
||||
return csId;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
public void storeNewCodeSystemVersion(Long theCodeSystemResourcePid, String theSystemUri, String theSystemName, String theSystemVersionId, TermCodeSystemVersion theCodeSystemVersion, ResourceTable theCodeSystemResourceTable) {
|
||||
ourLog.info("Storing code system");
|
||||
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(theCodeSystemVersion.getResource() != null, "No resource supplied");
|
||||
ValidateUtil.isNotBlankOrThrowInvalidRequest(theSystemUri, "No system URI supplied");
|
||||
|
||||
// Grab the existing versions so we can delete them later
|
||||
List<TermCodeSystemVersion> existing = myCodeSystemVersionDao.findByCodeSystemResourcePid(theCodeSystemResourcePid);
|
||||
|
||||
/*
|
||||
* For now we always delete old versions. At some point it would be nice to allow configuration to keep old versions.
|
||||
*/
|
||||
|
||||
ourLog.info("Deleting old code system versions");
|
||||
for (TermCodeSystemVersion next : existing) {
|
||||
Long codeSystemVersionPid = next.getPid();
|
||||
deleteCodeSystemVersion(codeSystemVersionPid);
|
||||
}
|
||||
|
||||
ourLog.info("Flushing...");
|
||||
myConceptDao.flush();
|
||||
ourLog.info("Done flushing");
|
||||
|
||||
/*
|
||||
* Do the upload
|
||||
*/
|
||||
|
||||
TermCodeSystem codeSystem = getOrCreateTermCodeSystem(theCodeSystemResourcePid, theSystemUri, theSystemName, theCodeSystemResourceTable);
|
||||
|
||||
theCodeSystemVersion.setCodeSystem(codeSystem);
|
||||
|
||||
theCodeSystemVersion.setCodeSystemDisplayName(theSystemName);
|
||||
theCodeSystemVersion.setCodeSystemVersionId(theSystemVersionId);
|
||||
|
||||
ourLog.info("Validating all codes in CodeSystem for storage (this can take some time for large sets)");
|
||||
|
||||
// Validate the code system
|
||||
ArrayList<String> conceptsStack = new ArrayList<>();
|
||||
IdentityHashMap<TermConcept, Object> allConcepts = new IdentityHashMap<>();
|
||||
int totalCodeCount = 0;
|
||||
for (TermConcept next : theCodeSystemVersion.getConcepts()) {
|
||||
totalCodeCount += validateConceptForStorage(next, theCodeSystemVersion, conceptsStack, allConcepts);
|
||||
}
|
||||
|
||||
ourLog.info("Saving version containing {} concepts", totalCodeCount);
|
||||
|
||||
TermCodeSystemVersion codeSystemVersion = myCodeSystemVersionDao.saveAndFlush(theCodeSystemVersion);
|
||||
|
||||
ourLog.info("Saving code system");
|
||||
|
||||
codeSystem.setCurrentVersion(theCodeSystemVersion);
|
||||
codeSystem = myCodeSystemDao.saveAndFlush(codeSystem);
|
||||
|
||||
ourLog.info("Setting CodeSystemVersion[{}] on {} concepts...", codeSystem.getPid(), totalCodeCount);
|
||||
|
||||
for (TermConcept next : theCodeSystemVersion.getConcepts()) {
|
||||
populateVersion(next, codeSystemVersion);
|
||||
}
|
||||
|
||||
ourLog.info("Saving {} concepts...", totalCodeCount);
|
||||
|
||||
IdentityHashMap<TermConcept, Object> conceptsStack2 = new IdentityHashMap<>();
|
||||
for (TermConcept next : theCodeSystemVersion.getConcepts()) {
|
||||
persistChildren(next, codeSystemVersion, conceptsStack2, totalCodeCount);
|
||||
}
|
||||
|
||||
ourLog.info("Done saving concepts, flushing to database");
|
||||
|
||||
myConceptDao.flush();
|
||||
myConceptParentChildLinkDao.flush();
|
||||
|
||||
ourLog.info("Done deleting old code system versions");
|
||||
|
||||
if (myDeferredStorageSvc.isStorageQueueEmpty() == false) {
|
||||
ourLog.info("Note that some concept saving has been deferred");
|
||||
}
|
||||
}
|
||||
|
||||
private void deleteCodeSystemVersion(final Long theCodeSystemVersionPid) {
|
||||
ourLog.info(" * Deleting code system version {}", theCodeSystemVersionPid);
|
||||
|
||||
PageRequest page1000 = PageRequest.of(0, 1000);
|
||||
|
||||
// Parent/Child links
|
||||
{
|
||||
String descriptor = "parent/child links";
|
||||
Supplier<Slice<TermConceptParentChildLink>> loader = () -> myConceptParentChildLinkDao.findByCodeSystemVersion(page1000, theCodeSystemVersionPid);
|
||||
Supplier<Integer> counter = () -> myConceptParentChildLinkDao.countByCodeSystemVersion(theCodeSystemVersionPid);
|
||||
doDelete(descriptor, loader, counter, myConceptParentChildLinkDao);
|
||||
}
|
||||
|
||||
// Properties
|
||||
{
|
||||
String descriptor = "concept properties";
|
||||
Supplier<Slice<TermConceptProperty>> loader = () -> myConceptPropertyDao.findByCodeSystemVersion(page1000, theCodeSystemVersionPid);
|
||||
Supplier<Integer> counter = () -> myConceptPropertyDao.countByCodeSystemVersion(theCodeSystemVersionPid);
|
||||
doDelete(descriptor, loader, counter, myConceptPropertyDao);
|
||||
}
|
||||
|
||||
// Designations
|
||||
{
|
||||
String descriptor = "concept designations";
|
||||
Supplier<Slice<TermConceptDesignation>> loader = () -> myConceptDesignationDao.findByCodeSystemVersion(page1000, theCodeSystemVersionPid);
|
||||
Supplier<Integer> counter = () -> myConceptDesignationDao.countByCodeSystemVersion(theCodeSystemVersionPid);
|
||||
doDelete(descriptor, loader, counter, myConceptDesignationDao);
|
||||
}
|
||||
|
||||
// Concepts
|
||||
{
|
||||
String descriptor = "concepts";
|
||||
// For some reason, concepts are much slower to delete, so use a smaller batch size
|
||||
PageRequest page100 = PageRequest.of(0, 100);
|
||||
Supplier<Slice<TermConcept>> loader = () -> myConceptDao.findByCodeSystemVersion(page100, theCodeSystemVersionPid);
|
||||
Supplier<Integer> counter = () -> myConceptDao.countByCodeSystemVersion(theCodeSystemVersionPid);
|
||||
doDelete(descriptor, loader, counter, myConceptDao);
|
||||
}
|
||||
|
||||
Optional<TermCodeSystem> codeSystemOpt = myCodeSystemDao.findWithCodeSystemVersionAsCurrentVersion(theCodeSystemVersionPid);
|
||||
if (codeSystemOpt.isPresent()) {
|
||||
TermCodeSystem codeSystem = codeSystemOpt.get();
|
||||
ourLog.info(" * Removing code system version {} as current version of code system {}", theCodeSystemVersionPid, codeSystem.getPid());
|
||||
codeSystem.setCurrentVersion(null);
|
||||
myCodeSystemDao.save(codeSystem);
|
||||
}
|
||||
|
||||
ourLog.info(" * Deleting code system version");
|
||||
myCodeSystemVersionDao.deleteById(theCodeSystemVersionPid);
|
||||
|
||||
}
|
||||
|
||||
private void validateDstu3OrNewer() {
|
||||
Validate.isTrue(myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3), "Terminology operations only supported in DSTU3+ mode");
|
||||
}
|
||||
|
||||
private void addConcept(TermCodeSystemVersion theCsv, Map<String, Long> theCodeToConceptPid, Collection<String> theParentCodes, TermConcept theConceptToAdd, ListMultimap<String, String> theParentCodeToChildCodes, UploadStatistics theStatisticsTracker, boolean theForceResequence) {
|
||||
TermConcept nextConceptToAdd = theConceptToAdd;
|
||||
|
||||
String nextCodeToAdd = nextConceptToAdd.getCode();
|
||||
String parentDescription = "(root concept)";
|
||||
Set<TermConcept> parentConcepts = new HashSet<>();
|
||||
if (!theParentCodes.isEmpty()) {
|
||||
parentDescription = "[" + String.join(", ", theParentCodes) + "]";
|
||||
for (String nextParentCode : theParentCodes) {
|
||||
Long nextParentCodePid = theCodeToConceptPid.get(nextParentCode);
|
||||
if (nextParentCodePid == null) {
|
||||
throw new InvalidRequestException("Unable to add code \"" + nextCodeToAdd + "\" to unknown parent: " + nextParentCode);
|
||||
}
|
||||
parentConcepts.add(myConceptDao.getOne(nextParentCodePid));
|
||||
}
|
||||
}
|
||||
|
||||
ourLog.info("Saving concept {} with parent {}", theStatisticsTracker.getUpdatedConceptCount(), parentDescription);
|
||||
|
||||
if (theCodeToConceptPid.containsKey(nextCodeToAdd)) {
|
||||
|
||||
TermConcept existingCode = myConceptDao.getOne(theCodeToConceptPid.get(nextCodeToAdd));
|
||||
existingCode.setIndexStatus(null);
|
||||
existingCode.setDisplay(nextConceptToAdd.getDisplay());
|
||||
nextConceptToAdd = existingCode;
|
||||
|
||||
}
|
||||
|
||||
if (theConceptToAdd.getSequence() == null || theForceResequence) {
|
||||
// If this is a new code, give it a sequence number based on how many concepts the
|
||||
// parent already has (or the highest number, if the code has multiple parents)
|
||||
int sequence = 0;
|
||||
for (String nextParentCode : theParentCodes) {
|
||||
theParentCodeToChildCodes.put(nextParentCode, nextCodeToAdd);
|
||||
sequence = Math.max(sequence, theParentCodeToChildCodes.get(nextParentCode).size());
|
||||
}
|
||||
if (theParentCodes.isEmpty()) {
|
||||
theParentCodeToChildCodes.put("", nextCodeToAdd);
|
||||
sequence = Math.max(sequence, theParentCodeToChildCodes.get("").size());
|
||||
}
|
||||
nextConceptToAdd.setSequence(sequence);
|
||||
}
|
||||
|
||||
|
||||
// Drop any old parent-child links if they aren't explicitly specified in the
|
||||
// hierarchy being added
|
||||
for (Iterator<TermConceptParentChildLink> iter = nextConceptToAdd.getParents().iterator(); iter.hasNext(); ) {
|
||||
TermConceptParentChildLink nextLink = iter.next();
|
||||
String parentCode = nextLink.getParent().getCode();
|
||||
boolean shouldRemove = !theParentCodes.contains(parentCode);
|
||||
if (shouldRemove) {
|
||||
ourLog.info("Dropping existing parent/child link from {} -> {}", parentCode, nextCodeToAdd);
|
||||
myConceptParentChildLinkDao.delete(nextLink);
|
||||
iter.remove();
|
||||
|
||||
List<TermConceptParentChildLink> parentChildrenList = nextLink.getParent().getChildren();
|
||||
parentChildrenList.remove(nextLink);
|
||||
}
|
||||
}
|
||||
|
||||
nextConceptToAdd.setParentPids(null);
|
||||
nextConceptToAdd.setCodeSystemVersion(theCsv);
|
||||
nextConceptToAdd = myConceptDao.save(nextConceptToAdd);
|
||||
|
||||
Long nextConceptPid = nextConceptToAdd.getId();
|
||||
Validate.notNull(nextConceptPid);
|
||||
theCodeToConceptPid.put(nextCodeToAdd, nextConceptPid);
|
||||
theStatisticsTracker.incrementUpdatedConceptCount();
|
||||
|
||||
// Add link to new child to the parent if this link doesn't already exist (this will be the
|
||||
// case for concepts being added to an existing child concept, but won't be the case when
|
||||
// we're recursively adding children)
|
||||
for (TermConcept nextParentConcept : parentConcepts) {
|
||||
if (nextParentConcept.getChildren().stream().noneMatch(t->t.getChild().getCode().equals(nextCodeToAdd))) {
|
||||
TermConceptParentChildLink parentLink = new TermConceptParentChildLink();
|
||||
parentLink.setParent(nextParentConcept);
|
||||
parentLink.setChild(nextConceptToAdd);
|
||||
parentLink.setCodeSystem(theCsv);
|
||||
parentLink.setRelationshipType(TermConceptParentChildLink.RelationshipTypeEnum.ISA);
|
||||
nextParentConcept.getChildren().add(parentLink);
|
||||
nextConceptToAdd.getParents().add(parentLink);
|
||||
myConceptParentChildLinkDao.save(parentLink);
|
||||
}
|
||||
}
|
||||
|
||||
// Save children recursively
|
||||
for (TermConceptParentChildLink nextChildConceptLink : nextConceptToAdd.getChildren()) {
|
||||
|
||||
TermConcept nextChild = nextChildConceptLink.getChild();
|
||||
Collection<String> parentCodes = nextChild.getParents().stream().map(t -> t.getParent().getCode()).collect(Collectors.toList());
|
||||
addConcept(theCsv, theCodeToConceptPid, parentCodes, nextChild, theParentCodeToChildCodes, theStatisticsTracker, false);
|
||||
|
||||
if (nextChildConceptLink.getId() == null) {
|
||||
nextChildConceptLink.setCodeSystem(theCsv);
|
||||
myConceptParentChildLinkDao.save(nextChildConceptLink);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private Long getCodeSystemResourcePid(IIdType theIdType) {
|
||||
return getCodeSystemResourcePid(theIdType, null);
|
||||
}
|
||||
|
||||
private Long getCodeSystemResourcePid(IIdType theIdType, RequestDetails theRequestDetails) {
|
||||
return myIdHelperService.translateForcedIdToPid(theIdType, theRequestDetails);
|
||||
}
|
||||
|
||||
private void persistChildren(TermConcept theConcept, TermCodeSystemVersion theCodeSystem, IdentityHashMap<TermConcept, Object> theConceptsStack, int theTotalConcepts) {
|
||||
if (theConceptsStack.put(theConcept, PLACEHOLDER_OBJECT) != null) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (theConceptsStack.size() == 1 || theConceptsStack.size() % 10000 == 0) {
|
||||
float pct = (float) theConceptsStack.size() / (float) theTotalConcepts;
|
||||
ourLog.info("Have processed {}/{} concepts ({}%)", theConceptsStack.size(), theTotalConcepts, (int) (pct * 100.0f));
|
||||
}
|
||||
|
||||
theConcept.setCodeSystemVersion(theCodeSystem);
|
||||
theConcept.setIndexStatus(BaseHapiFhirDao.INDEX_STATUS_INDEXED);
|
||||
|
||||
if (theConceptsStack.size() <= myDaoConfig.getDeferIndexingForCodesystemsOfSize()) {
|
||||
saveConcept(theConcept);
|
||||
} else {
|
||||
myDeferredStorageSvc.addConceptToStorageQueue(theConcept);
|
||||
}
|
||||
|
||||
for (TermConceptParentChildLink next : theConcept.getChildren()) {
|
||||
persistChildren(next.getChild(), theCodeSystem, theConceptsStack, theTotalConcepts);
|
||||
}
|
||||
|
||||
for (TermConceptParentChildLink next : theConcept.getChildren()) {
|
||||
if (theConceptsStack.size() <= myDaoConfig.getDeferIndexingForCodesystemsOfSize()) {
|
||||
saveConceptLink(next);
|
||||
} else {
|
||||
myDeferredStorageSvc.addConceptLinkToStorageQueue(next);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void populateVersion(TermConcept theNext, TermCodeSystemVersion theCodeSystemVersion) {
|
||||
if (theNext.getCodeSystemVersion() != null) {
|
||||
return;
|
||||
}
|
||||
theNext.setCodeSystemVersion(theCodeSystemVersion);
|
||||
for (TermConceptParentChildLink next : theNext.getChildren()) {
|
||||
populateVersion(next.getChild(), theCodeSystemVersion);
|
||||
}
|
||||
}
|
||||
|
||||
private void saveConceptLink(TermConceptParentChildLink next) {
|
||||
if (next.getId() == null) {
|
||||
myConceptParentChildLinkDao.save(next);
|
||||
}
|
||||
}
|
||||
|
||||
private int ensureParentsSaved(Collection<TermConceptParentChildLink> theParents) {
|
||||
ourLog.trace("Checking {} parents", theParents.size());
|
||||
int retVal = 0;
|
||||
|
||||
for (TermConceptParentChildLink nextLink : theParents) {
|
||||
if (nextLink.getRelationshipType() == TermConceptParentChildLink.RelationshipTypeEnum.ISA) {
|
||||
TermConcept nextParent = nextLink.getParent();
|
||||
retVal += ensureParentsSaved(nextParent.getParents());
|
||||
if (nextParent.getId() == null) {
|
||||
nextParent.setUpdated(new Date());
|
||||
myConceptDao.saveAndFlush(nextParent);
|
||||
retVal++;
|
||||
ourLog.debug("Saved parent code {} and got id {}", nextParent.getCode(), nextParent.getId());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private TermCodeSystem getOrCreateTermCodeSystem(Long theCodeSystemResourcePid, String theSystemUri, String theSystemName, ResourceTable theCodeSystemResourceTable) {
|
||||
TermCodeSystem codeSystem = myCodeSystemDao.findByCodeSystemUri(theSystemUri);
|
||||
if (codeSystem == null) {
|
||||
codeSystem = myCodeSystemDao.findByResourcePid(theCodeSystemResourcePid);
|
||||
if (codeSystem == null) {
|
||||
codeSystem = new TermCodeSystem();
|
||||
}
|
||||
codeSystem.setResource(theCodeSystemResourceTable);
|
||||
} else {
|
||||
if (!ObjectUtil.equals(codeSystem.getResource().getId(), theCodeSystemResourceTable.getId())) {
|
||||
String msg = myContext.getLocalizer().getMessage(BaseTermReadSvcImpl.class, "cannotCreateDuplicateCodeSystemUrl", theSystemUri,
|
||||
codeSystem.getResource().getIdDt().toUnqualifiedVersionless().getValue());
|
||||
throw new UnprocessableEntityException(msg);
|
||||
}
|
||||
}
|
||||
|
||||
codeSystem.setCodeSystemUri(theSystemUri);
|
||||
codeSystem.setName(theSystemName);
|
||||
codeSystem = myCodeSystemDao.save(codeSystem);
|
||||
return codeSystem;
|
||||
}
|
||||
|
||||
private void populateCodeSystemVersionProperties(TermCodeSystemVersion theCodeSystemVersion, CodeSystem theCodeSystemResource, ResourceTable theResourceTable) {
|
||||
theCodeSystemVersion.setResource(theResourceTable);
|
||||
theCodeSystemVersion.setCodeSystemDisplayName(theCodeSystemResource.getName());
|
||||
theCodeSystemVersion.setCodeSystemVersionId(theCodeSystemResource.getVersion());
|
||||
}
|
||||
|
||||
private void deleteConceptChildrenAndConcept(TermConcept theConcept, AtomicInteger theRemoveCounter) {
|
||||
for (TermConceptParentChildLink nextChildLink : theConcept.getChildren()) {
|
||||
deleteConceptChildrenAndConcept(nextChildLink.getChild(), theRemoveCounter);
|
||||
myConceptParentChildLinkDao.delete(nextChildLink);
|
||||
}
|
||||
|
||||
myConceptDesignationDao.deleteAll(theConcept.getDesignations());
|
||||
myConceptPropertyDao.deleteAll(theConcept.getProperties());
|
||||
myConceptDao.delete(theConcept);
|
||||
theRemoveCounter.incrementAndGet();
|
||||
}
|
||||
|
||||
|
||||
private <T> void doDelete(String theDescriptor, Supplier<Slice<T>> theLoader, Supplier<Integer> theCounter, JpaRepository<T, ?> theDao) {
|
||||
int count;
|
||||
ourLog.info(" * Deleting {}", theDescriptor);
|
||||
int totalCount = theCounter.get();
|
||||
StopWatch sw = new StopWatch();
|
||||
count = 0;
|
||||
while (true) {
|
||||
Slice<T> link = theLoader.get();
|
||||
if (!link.hasContent()) {
|
||||
break;
|
||||
}
|
||||
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager);
|
||||
txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRED);
|
||||
txTemplate.execute(t -> {
|
||||
theDao.deleteAll(link);
|
||||
return null;
|
||||
});
|
||||
|
||||
count += link.getNumberOfElements();
|
||||
ourLog.info(" * {} {} deleted - {}/sec - ETA: {}", count, theDescriptor, sw.formatThroughput(count, TimeUnit.SECONDS), sw.getEstimatedTimeRemaining(count, totalCount));
|
||||
}
|
||||
theDao.flush();
|
||||
}
|
||||
|
||||
|
||||
private int validateConceptForStorage(TermConcept theConcept, TermCodeSystemVersion theCodeSystem, ArrayList<String> theConceptsStack,
|
||||
IdentityHashMap<TermConcept, Object> theAllConcepts) {
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(theConcept.getCodeSystemVersion() != null, "CodeSystemVersion is null");
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(theConcept.getCodeSystemVersion() == theCodeSystem, "CodeSystems are not equal");
|
||||
ValidateUtil.isNotBlankOrThrowInvalidRequest(theConcept.getCode(), "CodeSystem contains a code with no code value");
|
||||
|
||||
if (theConceptsStack.contains(theConcept.getCode())) {
|
||||
throw new InvalidRequestException("CodeSystem contains circular reference around code " + theConcept.getCode());
|
||||
}
|
||||
theConceptsStack.add(theConcept.getCode());
|
||||
|
||||
int retVal = 0;
|
||||
if (theAllConcepts.put(theConcept, theAllConcepts) == null) {
|
||||
if (theAllConcepts.size() % 1000 == 0) {
|
||||
ourLog.info("Have validated {} concepts", theAllConcepts.size());
|
||||
}
|
||||
retVal = 1;
|
||||
}
|
||||
|
||||
for (TermConceptParentChildLink next : theConcept.getChildren()) {
|
||||
next.setCodeSystem(theCodeSystem);
|
||||
retVal += validateConceptForStorage(next.getChild(), theCodeSystem, theConceptsStack, theAllConcepts);
|
||||
}
|
||||
|
||||
theConceptsStack.remove(theConceptsStack.size() - 1);
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,260 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptParentChildLinkDao;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
|
||||
import ca.uhn.fhir.jpa.model.sched.FireAtIntervalJob;
|
||||
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermVersionAdapterSvc;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.quartz.JobExecutionContext;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.TransactionDefinition;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc {
|
||||
|
||||
private static final int SCHEDULE_INTERVAL_MILLIS = 5000;
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(TermDeferredStorageSvcImpl.class);
|
||||
@Autowired
|
||||
protected ITermConceptDao myConceptDao;
|
||||
@Autowired
|
||||
protected PlatformTransactionManager myTransactionMgr;
|
||||
private boolean myProcessDeferred = true;
|
||||
private List<TermConcept> myDeferredConcepts = Collections.synchronizedList(new ArrayList<>());
|
||||
private List<ValueSet> myDeferredValueSets = Collections.synchronizedList(new ArrayList<>());
|
||||
private List<ConceptMap> myDeferredConceptMaps = Collections.synchronizedList(new ArrayList<>());
|
||||
private List<TermConceptParentChildLink> myConceptLinksToSaveLater = Collections.synchronizedList(new ArrayList<>());
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
@Autowired
|
||||
private ITermConceptParentChildLinkDao myConceptParentChildLinkDao;
|
||||
@Autowired
|
||||
private ISchedulerService mySchedulerService;
|
||||
@Autowired
|
||||
private ITermVersionAdapterSvc myTerminologyVersionAdapterSvc;
|
||||
@Autowired
|
||||
private ITermCodeSystemStorageSvc myConceptStorageSvc;
|
||||
|
||||
@Override
|
||||
public void addConceptToStorageQueue(TermConcept theConcept) {
|
||||
Validate.notNull(theConcept);
|
||||
myDeferredConcepts.add(theConcept);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addConceptLinkToStorageQueue(TermConceptParentChildLink theConceptLink) {
|
||||
Validate.notNull(theConceptLink);
|
||||
myConceptLinksToSaveLater.add(theConceptLink);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addConceptMapsToStorageQueue(List<ConceptMap> theConceptMaps) {
|
||||
Validate.notNull(theConceptMaps);
|
||||
myDeferredConceptMaps.addAll(theConceptMaps);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addValueSetsToStorageQueue(List<ValueSet> theValueSets) {
|
||||
Validate.notNull(theValueSets);
|
||||
myDeferredValueSets.addAll(theValueSets);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setProcessDeferred(boolean theProcessDeferred) {
|
||||
myProcessDeferred = theProcessDeferred;
|
||||
}
|
||||
|
||||
private void processDeferredConceptMaps() {
|
||||
int count = Math.min(myDeferredConceptMaps.size(), 20);
|
||||
for (ConceptMap nextConceptMap : new ArrayList<>(myDeferredConceptMaps.subList(0, count))) {
|
||||
ourLog.info("Creating ConceptMap: {}", nextConceptMap.getId());
|
||||
myTerminologyVersionAdapterSvc.createOrUpdateConceptMap(nextConceptMap);
|
||||
myDeferredConceptMaps.remove(nextConceptMap);
|
||||
}
|
||||
ourLog.info("Saved {} deferred ConceptMap resources, have {} remaining", count, myDeferredConceptMaps.size());
|
||||
}
|
||||
|
||||
private void processDeferredConcepts() {
|
||||
int codeCount = 0, relCount = 0;
|
||||
StopWatch stopwatch = new StopWatch();
|
||||
|
||||
int count = Math.min(myDaoConfig.getDeferIndexingForCodesystemsOfSize(), myDeferredConcepts.size());
|
||||
ourLog.info("Saving {} deferred concepts...", count);
|
||||
while (codeCount < count && myDeferredConcepts.size() > 0) {
|
||||
TermConcept next = myDeferredConcepts.remove(0);
|
||||
codeCount += myConceptStorageSvc.saveConcept(next);
|
||||
}
|
||||
|
||||
if (codeCount > 0) {
|
||||
ourLog.info("Saved {} deferred concepts ({} codes remain and {} relationships remain) in {}ms ({}ms / code)",
|
||||
codeCount, myDeferredConcepts.size(), myConceptLinksToSaveLater.size(), stopwatch.getMillis(), stopwatch.getMillisPerOperation(codeCount));
|
||||
}
|
||||
|
||||
if (codeCount == 0) {
|
||||
count = Math.min(myDaoConfig.getDeferIndexingForCodesystemsOfSize(), myConceptLinksToSaveLater.size());
|
||||
ourLog.info("Saving {} deferred concept relationships...", count);
|
||||
while (relCount < count && myConceptLinksToSaveLater.size() > 0) {
|
||||
TermConceptParentChildLink next = myConceptLinksToSaveLater.remove(0);
|
||||
|
||||
if (!myConceptDao.findById(next.getChild().getId()).isPresent() || !myConceptDao.findById(next.getParent().getId()).isPresent()) {
|
||||
ourLog.warn("Not inserting link from child {} to parent {} because it appears to have been deleted", next.getParent().getCode(), next.getChild().getCode());
|
||||
continue;
|
||||
}
|
||||
|
||||
saveConceptLink(next);
|
||||
relCount++;
|
||||
}
|
||||
}
|
||||
|
||||
if (relCount > 0) {
|
||||
ourLog.info("Saved {} deferred relationships ({} remain) in {}ms ({}ms / entry)",
|
||||
relCount, myConceptLinksToSaveLater.size(), stopwatch.getMillis(), stopwatch.getMillisPerOperation(relCount));
|
||||
}
|
||||
|
||||
if ((myDeferredConcepts.size() + myConceptLinksToSaveLater.size()) == 0) {
|
||||
ourLog.info("All deferred concepts and relationships have now been synchronized to the database");
|
||||
}
|
||||
}
|
||||
|
||||
private void processDeferredValueSets() {
|
||||
int count = Math.min(myDeferredValueSets.size(), 20);
|
||||
for (ValueSet nextValueSet : new ArrayList<>(myDeferredValueSets.subList(0, count))) {
|
||||
ourLog.info("Creating ValueSet: {}", nextValueSet.getId());
|
||||
myTerminologyVersionAdapterSvc.createOrUpdateValueSet(nextValueSet);
|
||||
myDeferredValueSets.remove(nextValueSet);
|
||||
}
|
||||
ourLog.info("Saved {} deferred ValueSet resources, have {} remaining", count, myDeferredValueSets.size());
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is present only for unit tests, do not call from client code
|
||||
*/
|
||||
@VisibleForTesting
|
||||
public synchronized void clearDeferred() {
|
||||
myDeferredValueSets.clear();
|
||||
myDeferredConceptMaps.clear();
|
||||
myDeferredConcepts.clear();
|
||||
}
|
||||
|
||||
@Transactional(propagation = Propagation.NEVER)
|
||||
@Override
|
||||
public synchronized void saveDeferred() {
|
||||
if (isProcessDeferredPaused()) {
|
||||
return;
|
||||
}
|
||||
|
||||
TransactionTemplate tt = new TransactionTemplate(myTransactionMgr);
|
||||
tt.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
|
||||
if (isDeferredConceptsOrConceptLinksToSaveLater()) {
|
||||
tt.execute(t -> {
|
||||
processDeferredConcepts();
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
if (isDeferredValueSets()) {
|
||||
tt.execute(t -> {
|
||||
processDeferredValueSets();
|
||||
return null;
|
||||
});
|
||||
}
|
||||
if (isDeferredConceptMaps()) {
|
||||
tt.execute(t -> {
|
||||
processDeferredConceptMaps();
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isStorageQueueEmpty() {
|
||||
boolean retVal = true;
|
||||
retVal &= !isProcessDeferredPaused();
|
||||
retVal &= !isDeferredConcepts();
|
||||
retVal &= !isConceptLinksToSaveLater();
|
||||
retVal &= !isDeferredValueSets();
|
||||
retVal &= !isDeferredConceptMaps();
|
||||
return retVal;
|
||||
}
|
||||
|
||||
|
||||
private void saveConceptLink(TermConceptParentChildLink next) {
|
||||
if (next.getId() == null) {
|
||||
myConceptParentChildLinkDao.save(next);
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isProcessDeferredPaused() {
|
||||
return !myProcessDeferred;
|
||||
}
|
||||
|
||||
private boolean isDeferredConceptsOrConceptLinksToSaveLater() {
|
||||
return isDeferredConcepts() || isConceptLinksToSaveLater();
|
||||
}
|
||||
|
||||
private boolean isDeferredConcepts() {
|
||||
return !myDeferredConcepts.isEmpty();
|
||||
}
|
||||
|
||||
private boolean isConceptLinksToSaveLater() {
|
||||
return !myConceptLinksToSaveLater.isEmpty();
|
||||
}
|
||||
|
||||
private boolean isDeferredValueSets() {
|
||||
return !myDeferredValueSets.isEmpty();
|
||||
}
|
||||
|
||||
private boolean isDeferredConceptMaps() {
|
||||
return !myDeferredConceptMaps.isEmpty();
|
||||
}
|
||||
|
||||
@PostConstruct
|
||||
public void registerScheduledJob() {
|
||||
// Register scheduled job to save deferred concepts
|
||||
// In the future it would be great to make this a cluster-aware task somehow
|
||||
ScheduledJobDefinition jobDefinition = new ScheduledJobDefinition();
|
||||
jobDefinition.setId(BaseTermReadSvcImpl.class.getName() + "_saveDeferred");
|
||||
jobDefinition.setJobClass(SaveDeferredJob.class);
|
||||
mySchedulerService.scheduleFixedDelay(SCHEDULE_INTERVAL_MILLIS, false, jobDefinition);
|
||||
}
|
||||
|
||||
public static class SaveDeferredJob extends FireAtIntervalJob {
|
||||
|
||||
@Autowired
|
||||
private ITermDeferredStorageSvc myTerminologySvc;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public SaveDeferredJob() {
|
||||
super(SCHEDULE_INTERVAL_MILLIS);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doExecute(JobExecutionContext theContext) {
|
||||
myTerminologySvc.saveDeferred();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -4,8 +4,10 @@ import ca.uhn.fhir.context.FhirContext;
|
|||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
|
||||
import ca.uhn.fhir.jpa.term.custom.ConceptHandler;
|
||||
import ca.uhn.fhir.jpa.term.custom.HierarchyHandler;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.custom.CustomTerminologySet;
|
||||
import ca.uhn.fhir.jpa.term.loinc.*;
|
||||
import ca.uhn.fhir.jpa.term.snomedct.SctHandlerConcept;
|
||||
import ca.uhn.fhir.jpa.term.snomedct.SctHandlerDescription;
|
||||
|
@ -15,7 +17,6 @@ import ca.uhn.fhir.rest.api.EncodingEnum;
|
|||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.util.ValidateUtil;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Charsets;
|
||||
|
@ -23,9 +24,7 @@ import org.apache.commons.csv.CSVFormat;
|
|||
import org.apache.commons.csv.CSVParser;
|
||||
import org.apache.commons.csv.CSVRecord;
|
||||
import org.apache.commons.csv.QuoteMode;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.io.input.BOMInputStream;
|
||||
import org.apache.commons.lang3.ObjectUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
|
@ -41,8 +40,7 @@ import javax.validation.constraints.NotNull;
|
|||
import java.io.*;
|
||||
import java.util.*;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipInputStream;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.*;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
@ -67,64 +65,29 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
||||
public static final String SCT_FILE_CONCEPT = "Terminology/sct2_Concept_Full_";
|
||||
public static final String SCT_FILE_DESCRIPTION = "Terminology/sct2_Description_Full-en";
|
||||
public static final String SCT_FILE_RELATIONSHIP = "Terminology/sct2_Relationship_Full";
|
||||
|
||||
public static final String IMGTHLA_HLA_NOM_TXT = "hla_nom.txt";
|
||||
public static final String IMGTHLA_HLA_XML = "hla.xml";
|
||||
|
||||
public class TermLoaderSvcImpl implements ITermLoaderSvc {
|
||||
public static final String CUSTOM_CONCEPTS_FILE = "concepts.csv";
|
||||
public static final String CUSTOM_HIERARCHY_FILE = "hierarchy.csv";
|
||||
public static final String CUSTOM_CODESYSTEM_JSON = "codesystem.json";
|
||||
public static final String CUSTOM_CODESYSTEM_XML = "codesystem.xml";
|
||||
static final String IMGTHLA_HLA_NOM_TXT = "hla_nom.txt";
|
||||
static final String IMGTHLA_HLA_XML = "hla.xml";
|
||||
static final String CUSTOM_CODESYSTEM_JSON = "codesystem.json";
|
||||
private static final String SCT_FILE_CONCEPT = "Terminology/sct2_Concept_Full_";
|
||||
private static final String SCT_FILE_DESCRIPTION = "Terminology/sct2_Description_Full-en";
|
||||
private static final String SCT_FILE_RELATIONSHIP = "Terminology/sct2_Relationship_Full";
|
||||
private static final String CUSTOM_CODESYSTEM_XML = "codesystem.xml";
|
||||
|
||||
private static final int LOG_INCREMENT = 1000;
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcImpl.class);
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TermLoaderSvcImpl.class);
|
||||
// FYI: Hardcoded to R4 because that's what the term svc uses internally
|
||||
private final FhirContext myCtx = FhirContext.forR4();
|
||||
@Autowired
|
||||
private IHapiTerminologySvc myTermSvc;
|
||||
|
||||
private void dropCircularRefs(TermConcept theConcept, ArrayList<String> theChain, Map<String, TermConcept> theCode2concept, Counter theCircularCounter) {
|
||||
|
||||
theChain.add(theConcept.getCode());
|
||||
for (Iterator<TermConceptParentChildLink> childIter = theConcept.getChildren().iterator(); childIter.hasNext(); ) {
|
||||
TermConceptParentChildLink next = childIter.next();
|
||||
TermConcept nextChild = next.getChild();
|
||||
if (theChain.contains(nextChild.getCode())) {
|
||||
|
||||
StringBuilder b = new StringBuilder();
|
||||
b.append("Removing circular reference code ");
|
||||
b.append(nextChild.getCode());
|
||||
b.append(" from parent ");
|
||||
b.append(next.getParent().getCode());
|
||||
b.append(". Chain was: ");
|
||||
for (String nextInChain : theChain) {
|
||||
TermConcept nextCode = theCode2concept.get(nextInChain);
|
||||
b.append(nextCode.getCode());
|
||||
b.append('[');
|
||||
b.append(StringUtils.substring(nextCode.getDisplay(), 0, 20).replace("[", "").replace("]", "").trim());
|
||||
b.append("] ");
|
||||
}
|
||||
ourLog.info(b.toString(), theConcept.getCode());
|
||||
childIter.remove();
|
||||
nextChild.getParents().remove(next);
|
||||
|
||||
} else {
|
||||
dropCircularRefs(nextChild, theChain, theCode2concept, theCircularCounter);
|
||||
}
|
||||
}
|
||||
theChain.remove(theChain.size() - 1);
|
||||
|
||||
}
|
||||
private ITermDeferredStorageSvc myDeferredStorageSvc;
|
||||
@Autowired
|
||||
private ITermCodeSystemStorageSvc myCodeSystemStorageSvc;
|
||||
|
||||
@Override
|
||||
public UploadStatistics loadImgthla(List<FileDescriptor> theFiles, RequestDetails theRequestDetails) {
|
||||
LoadedFileDescriptors descriptors = null;
|
||||
try {
|
||||
descriptors = new LoadedFileDescriptors(theFiles);
|
||||
try (LoadedFileDescriptors descriptors = new LoadedFileDescriptors(theFiles)) {
|
||||
List<String> mandatoryFilenameFragments = Arrays.asList(
|
||||
IMGTHLA_HLA_NOM_TXT,
|
||||
IMGTHLA_HLA_XML
|
||||
|
@ -134,15 +97,13 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
ourLog.info("Beginning IMGTHLA processing");
|
||||
|
||||
return processImgthlaFiles(descriptors, theRequestDetails);
|
||||
} finally {
|
||||
IOUtils.closeQuietly(descriptors);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public UploadStatistics loadLoinc(List<FileDescriptor> theFiles, RequestDetails theRequestDetails) {
|
||||
try (LoadedFileDescriptors descriptors = new LoadedFileDescriptors(theFiles)) {
|
||||
List<String> loincUploadPropertiesFragment = Arrays.asList(
|
||||
List<String> loincUploadPropertiesFragment = Collections.singletonList(
|
||||
LOINC_UPLOAD_PROPERTIES_FILE.getCode()
|
||||
);
|
||||
descriptors.verifyMandatoryFilesExist(loincUploadPropertiesFragment);
|
||||
|
@ -180,23 +141,6 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
}
|
||||
}
|
||||
|
||||
@NotNull
|
||||
private Properties getProperties(LoadedFileDescriptors theDescriptors, String thePropertiesFile) {
|
||||
Properties retVal = new Properties();
|
||||
for (FileDescriptor next : theDescriptors.getUncompressedFileDescriptors()) {
|
||||
if (next.getFilename().endsWith(thePropertiesFile)) {
|
||||
try {
|
||||
try (InputStream inputStream = next.getInputStream()) {
|
||||
retVal.load(inputStream);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new InternalErrorException("Failed to read " + thePropertiesFile, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public UploadStatistics loadSnomedCt(List<FileDescriptor> theFiles, RequestDetails theRequestDetails) {
|
||||
try (LoadedFileDescriptors descriptors = new LoadedFileDescriptors(theFiles)) {
|
||||
|
@ -216,8 +160,6 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
@Override
|
||||
public UploadStatistics loadCustom(String theSystem, List<FileDescriptor> theFiles, RequestDetails theRequestDetails) {
|
||||
try (LoadedFileDescriptors descriptors = new LoadedFileDescriptors(theFiles)) {
|
||||
IRecordHandler handler;
|
||||
|
||||
Optional<String> codeSystemContent = loadFile(descriptors, CUSTOM_CODESYSTEM_JSON, CUSTOM_CODESYSTEM_XML);
|
||||
CodeSystem codeSystem;
|
||||
if (codeSystemContent.isPresent()) {
|
||||
|
@ -233,14 +175,83 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
codeSystem.setContent(CodeSystem.CodeSystemContentMode.NOTPRESENT);
|
||||
}
|
||||
|
||||
TermCodeSystemVersion csv = new TermCodeSystemVersion();
|
||||
final Map<String, TermConcept> code2concept = processCustomTerminologyFiles(descriptors, csv);
|
||||
CustomTerminologySet terminologySet = CustomTerminologySet.load(descriptors, false);
|
||||
TermCodeSystemVersion csv = terminologySet.toCodeSystemVersion();
|
||||
|
||||
IIdType target = storeCodeSystem(theRequestDetails, csv, codeSystem, null, null);
|
||||
return new UploadStatistics(code2concept.size(), target);
|
||||
return new UploadStatistics(terminologySet.getSize(), target);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public UploadStatistics loadDeltaAdd(String theSystem, List<FileDescriptor> theFiles, RequestDetails theRequestDetails) {
|
||||
ourLog.info("Processing terminology delta ADD for system[{}] with files: {}", theSystem, theFiles.stream().map(t -> t.getFilename()).collect(Collectors.toList()));
|
||||
try (LoadedFileDescriptors descriptors = new LoadedFileDescriptors(theFiles)) {
|
||||
CustomTerminologySet terminologySet = CustomTerminologySet.load(descriptors, false);
|
||||
return myCodeSystemStorageSvc.applyDeltaCodeSystemsAdd(theSystem, terminologySet);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public UploadStatistics loadDeltaRemove(String theSystem, List<FileDescriptor> theFiles, RequestDetails theRequestDetails) {
|
||||
ourLog.info("Processing terminology delta REMOVE for system[{}] with files: {}", theSystem, theFiles.stream().map(t -> t.getFilename()).collect(Collectors.toList()));
|
||||
try (LoadedFileDescriptors descriptors = new LoadedFileDescriptors(theFiles)) {
|
||||
CustomTerminologySet terminologySet = CustomTerminologySet.load(descriptors, true);
|
||||
return myCodeSystemStorageSvc.applyDeltaCodeSystemsRemove(theSystem, terminologySet);
|
||||
}
|
||||
}
|
||||
|
||||
private void dropCircularRefs(TermConcept theConcept, ArrayList<String> theChain, Map<String, TermConcept> theCode2concept) {
|
||||
|
||||
theChain.add(theConcept.getCode());
|
||||
for (Iterator<TermConceptParentChildLink> childIter = theConcept.getChildren().iterator(); childIter.hasNext(); ) {
|
||||
TermConceptParentChildLink next = childIter.next();
|
||||
TermConcept nextChild = next.getChild();
|
||||
if (theChain.contains(nextChild.getCode())) {
|
||||
|
||||
StringBuilder b = new StringBuilder();
|
||||
b.append("Removing circular reference code ");
|
||||
b.append(nextChild.getCode());
|
||||
b.append(" from parent ");
|
||||
b.append(next.getParent().getCode());
|
||||
b.append(". Chain was: ");
|
||||
for (String nextInChain : theChain) {
|
||||
TermConcept nextCode = theCode2concept.get(nextInChain);
|
||||
b.append(nextCode.getCode());
|
||||
b.append('[');
|
||||
b.append(StringUtils.substring(nextCode.getDisplay(), 0, 20).replace("[", "").replace("]", "").trim());
|
||||
b.append("] ");
|
||||
}
|
||||
ourLog.info(b.toString(), theConcept.getCode());
|
||||
childIter.remove();
|
||||
nextChild.getParents().remove(next);
|
||||
|
||||
} else {
|
||||
dropCircularRefs(nextChild, theChain, theCode2concept);
|
||||
}
|
||||
}
|
||||
theChain.remove(theChain.size() - 1);
|
||||
|
||||
}
|
||||
|
||||
@NotNull
|
||||
private Properties getProperties(LoadedFileDescriptors theDescriptors, String thePropertiesFile) {
|
||||
Properties retVal = new Properties();
|
||||
for (FileDescriptor next : theDescriptors.getUncompressedFileDescriptors()) {
|
||||
if (next.getFilename().endsWith(thePropertiesFile)) {
|
||||
try {
|
||||
try (InputStream inputStream = next.getInputStream()) {
|
||||
retVal.load(inputStream);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new InternalErrorException("Failed to read " + thePropertiesFile, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private Optional<String> loadFile(LoadedFileDescriptors theDescriptors, String... theFilenames) {
|
||||
for (FileDescriptor next : theDescriptors.getUncompressedFileDescriptors()) {
|
||||
for (String nextFilename : theFilenames) {
|
||||
|
@ -257,15 +268,14 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
return Optional.empty();
|
||||
}
|
||||
|
||||
UploadStatistics processImgthlaFiles(LoadedFileDescriptors theDescriptors, RequestDetails theRequestDetails) {
|
||||
private UploadStatistics processImgthlaFiles(LoadedFileDescriptors theDescriptors, RequestDetails theRequestDetails) {
|
||||
final TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion();
|
||||
final Map<String, TermConcept> code2concept = new HashMap<>();
|
||||
final List<ValueSet> valueSets = new ArrayList<>();
|
||||
final List<ConceptMap> conceptMaps = new ArrayList<>();
|
||||
|
||||
CodeSystem imgthlaCs;
|
||||
try {
|
||||
String imgthlaCsString = IOUtils.toString(BaseHapiTerminologySvcImpl.class.getResourceAsStream("/ca/uhn/fhir/jpa/term/imgthla/imgthla.xml"), Charsets.UTF_8);
|
||||
String imgthlaCsString = IOUtils.toString(BaseTermReadSvcImpl.class.getResourceAsStream("/ca/uhn/fhir/jpa/term/imgthla/imgthla.xml"), Charsets.UTF_8);
|
||||
imgthlaCs = FhirContext.forR4().newXmlParser().parseResource(CodeSystem.class, imgthlaCsString);
|
||||
} catch (IOException e) {
|
||||
throw new InternalErrorException("Failed to load imgthla.xml", e);
|
||||
|
@ -353,7 +363,7 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
|
||||
int valueSetCount = valueSets.size();
|
||||
int rootConceptCount = codeSystemVersion.getConcepts().size();
|
||||
int conceptCount = code2concept.size();
|
||||
int conceptCount = rootConceptCount;
|
||||
ourLog.info("Have {} total concepts, {} root concepts, {} ValueSets", conceptCount, rootConceptCount, valueSetCount);
|
||||
|
||||
// remove this when fully implemented ...
|
||||
|
@ -372,7 +382,7 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
|
||||
CodeSystem loincCs;
|
||||
try {
|
||||
String loincCsString = IOUtils.toString(BaseHapiTerminologySvcImpl.class.getResourceAsStream("/ca/uhn/fhir/jpa/term/loinc/loinc.xml"), Charsets.UTF_8);
|
||||
String loincCsString = IOUtils.toString(BaseTermReadSvcImpl.class.getResourceAsStream("/ca/uhn/fhir/jpa/term/loinc/loinc.xml"), Charsets.UTF_8);
|
||||
loincCs = FhirContext.forR4().newXmlParser().parseResource(CodeSystem.class, loincCsString);
|
||||
} catch (IOException e) {
|
||||
throw new InternalErrorException("Failed to load loinc.xml", e);
|
||||
|
@ -501,7 +511,7 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
retVal.setPublisher("Regenstrief Institute, Inc.");
|
||||
retVal.setDescription("A value set that includes all LOINC codes");
|
||||
retVal.setCopyright("This content from LOINC® is copyright © 1995 Regenstrief Institute, Inc. and the LOINC Committee, and available at no cost under the license at https://loinc.org/license/");
|
||||
retVal.getCompose().addInclude().setSystem(IHapiTerminologyLoaderSvc.LOINC_URI);
|
||||
retVal.getCompose().addInclude().setSystem(ITermLoaderSvc.LOINC_URI);
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
@ -540,7 +550,7 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
long count = circularCounter.getThenAdd();
|
||||
float pct = ((float) count / rootConcepts.size()) * 100.0f;
|
||||
ourLog.info(" * Scanning for circular refs - have scanned {} / {} codes ({}%)", count, rootConcepts.size(), pct);
|
||||
dropCircularRefs(next, new ArrayList<>(), code2concept, circularCounter);
|
||||
dropCircularRefs(next, new ArrayList<>(), code2concept);
|
||||
}
|
||||
|
||||
codeSystemVersion.getConcepts().addAll(rootConcepts.values());
|
||||
|
@ -555,8 +565,13 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
}
|
||||
|
||||
@VisibleForTesting
|
||||
void setTermSvcForUnitTests(IHapiTerminologySvc theTermSvc) {
|
||||
myTermSvc = theTermSvc;
|
||||
void setTermDeferredStorageSvc(ITermDeferredStorageSvc theDeferredStorageSvc) {
|
||||
myDeferredStorageSvc = theDeferredStorageSvc;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
void setTermCodeSystemStorageSvcForUnitTests(ITermCodeSystemStorageSvc theTermCodeSystemStorageSvc) {
|
||||
myCodeSystemStorageSvc = theTermCodeSystemStorageSvc;
|
||||
}
|
||||
|
||||
private IIdType storeCodeSystem(RequestDetails theRequestDetails, final TermCodeSystemVersion theCodeSystemVersion, CodeSystem theCodeSystem, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps) {
|
||||
|
@ -566,135 +581,14 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
List<ConceptMap> conceptMaps = ObjectUtils.defaultIfNull(theConceptMaps, Collections.emptyList());
|
||||
|
||||
IIdType retVal;
|
||||
myTermSvc.setProcessDeferred(false);
|
||||
retVal = myTermSvc.storeNewCodeSystemVersion(theCodeSystem, theCodeSystemVersion, theRequestDetails, valueSets, conceptMaps);
|
||||
myTermSvc.setProcessDeferred(true);
|
||||
myDeferredStorageSvc.setProcessDeferred(false);
|
||||
retVal = myCodeSystemStorageSvc.storeNewCodeSystemVersion(theCodeSystem, theCodeSystemVersion, theRequestDetails, valueSets, conceptMaps);
|
||||
myDeferredStorageSvc.setProcessDeferred(true);
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
public static class LoadedFileDescriptors implements Closeable {
|
||||
|
||||
private List<File> myTemporaryFiles = new ArrayList<>();
|
||||
private List<IHapiTerminologyLoaderSvc.FileDescriptor> myUncompressedFileDescriptors = new ArrayList<>();
|
||||
|
||||
public LoadedFileDescriptors(List<IHapiTerminologyLoaderSvc.FileDescriptor> theFileDescriptors) {
|
||||
try {
|
||||
for (FileDescriptor next : theFileDescriptors) {
|
||||
if (next.getFilename().toLowerCase().endsWith(".zip")) {
|
||||
ourLog.info("Uncompressing {} into temporary files", next.getFilename());
|
||||
try (InputStream inputStream = next.getInputStream()) {
|
||||
ZipInputStream zis = new ZipInputStream(new BufferedInputStream(inputStream));
|
||||
for (ZipEntry nextEntry; (nextEntry = zis.getNextEntry()) != null; ) {
|
||||
BOMInputStream fis = new BOMInputStream(zis);
|
||||
File nextTemporaryFile = File.createTempFile("hapifhir", ".tmp");
|
||||
nextTemporaryFile.deleteOnExit();
|
||||
FileOutputStream fos = new FileOutputStream(nextTemporaryFile, false);
|
||||
IOUtils.copy(fis, fos);
|
||||
String nextEntryFileName = nextEntry.getName();
|
||||
myUncompressedFileDescriptors.add(new FileDescriptor() {
|
||||
@Override
|
||||
public String getFilename() {
|
||||
return nextEntryFileName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InputStream getInputStream() {
|
||||
try {
|
||||
return new FileInputStream(nextTemporaryFile);
|
||||
} catch (FileNotFoundException e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
}
|
||||
});
|
||||
myTemporaryFiles.add(nextTemporaryFile);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
myUncompressedFileDescriptors.add(next);
|
||||
}
|
||||
|
||||
}
|
||||
} catch (Exception e) {
|
||||
close();
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
}
|
||||
|
||||
boolean hasFile(String theFilename) {
|
||||
return myUncompressedFileDescriptors
|
||||
.stream()
|
||||
.map(t -> t.getFilename().replaceAll(".*[\\\\/]", "")) // Strip the path from the filename
|
||||
.anyMatch(t -> t.equals(theFilename));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
for (File next : myTemporaryFiles) {
|
||||
FileUtils.deleteQuietly(next);
|
||||
}
|
||||
}
|
||||
|
||||
List<IHapiTerminologyLoaderSvc.FileDescriptor> getUncompressedFileDescriptors() {
|
||||
return myUncompressedFileDescriptors;
|
||||
}
|
||||
|
||||
private List<String> notFound(List<String> theExpectedFilenameFragments) {
|
||||
Set<String> foundFragments = new HashSet<>();
|
||||
for (String nextExpected : theExpectedFilenameFragments) {
|
||||
for (FileDescriptor next : myUncompressedFileDescriptors) {
|
||||
if (next.getFilename().contains(nextExpected)) {
|
||||
foundFragments.add(nextExpected);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ArrayList<String> notFoundFileNameFragments = new ArrayList<>(theExpectedFilenameFragments);
|
||||
notFoundFileNameFragments.removeAll(foundFragments);
|
||||
return notFoundFileNameFragments;
|
||||
}
|
||||
|
||||
private void verifyMandatoryFilesExist(List<String> theExpectedFilenameFragments) {
|
||||
List<String> notFound = notFound(theExpectedFilenameFragments);
|
||||
if (!notFound.isEmpty()) {
|
||||
throw new UnprocessableEntityException("Could not find the following mandatory files in input: " + notFound);
|
||||
}
|
||||
}
|
||||
|
||||
private void verifyOptionalFilesExist(List<String> theExpectedFilenameFragments) {
|
||||
List<String> notFound = notFound(theExpectedFilenameFragments);
|
||||
if (!notFound.isEmpty()) {
|
||||
ourLog.warn("Could not find the following optional files: " + notFound);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static Map<String, TermConcept> processCustomTerminologyFiles(LoadedFileDescriptors theDescriptors, TermCodeSystemVersion theCsv) {
|
||||
IRecordHandler handler;// Concept File
|
||||
final Map<String, TermConcept> code2concept = new HashMap<>();
|
||||
handler = new ConceptHandler(code2concept, theCsv);
|
||||
iterateOverZipFile(theDescriptors, CUSTOM_CONCEPTS_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Hierarchy
|
||||
if (theDescriptors.hasFile(CUSTOM_HIERARCHY_FILE)) {
|
||||
handler = new HierarchyHandler(code2concept);
|
||||
iterateOverZipFile(theDescriptors, CUSTOM_HIERARCHY_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
}
|
||||
|
||||
// Add root concepts to CodeSystemVersion
|
||||
for (TermConcept nextConcept : code2concept.values()) {
|
||||
if (nextConcept.getParents().isEmpty()) {
|
||||
theCsv.getConcepts().add(nextConcept);
|
||||
}
|
||||
}
|
||||
return code2concept;
|
||||
}
|
||||
|
||||
private static void iterateOverZipFile(LoadedFileDescriptors theDescriptors, String theFileNamePart, IRecordHandler theHandler, char theDelimiter, QuoteMode theQuoteMode, boolean theIsPartialFilename) {
|
||||
public static void iterateOverZipFile(LoadedFileDescriptors theDescriptors, String theFileNamePart, IRecordHandler theHandler, char theDelimiter, QuoteMode theQuoteMode, boolean theIsPartialFilename) {
|
||||
|
||||
boolean foundMatch = false;
|
||||
for (FileDescriptor nextZipBytes : theDescriptors.getUncompressedFileDescriptors()) {
|
||||
|
@ -748,9 +642,12 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
}
|
||||
|
||||
@Nonnull
|
||||
public static CSVParser newCsvRecords(char theDelimiter, QuoteMode theQuoteMode, Reader theReader) throws IOException {
|
||||
private static CSVParser newCsvRecords(char theDelimiter, QuoteMode theQuoteMode, Reader theReader) throws IOException {
|
||||
CSVParser parsed;
|
||||
CSVFormat format = CSVFormat.newFormat(theDelimiter).withFirstRecordAsHeader();
|
||||
CSVFormat format = CSVFormat
|
||||
.newFormat(theDelimiter)
|
||||
.withFirstRecordAsHeader()
|
||||
.withTrim();
|
||||
if (theQuoteMode != null) {
|
||||
format = format.withQuote('"').withQuoteMode(theQuoteMode);
|
||||
}
|
||||
|
@ -769,12 +666,11 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
public static TermConcept getOrCreateConcept(TermCodeSystemVersion codeSystemVersion, Map<String, TermConcept> id2concept, String id) {
|
||||
public static TermConcept getOrCreateConcept(Map<String, TermConcept> id2concept, String id) {
|
||||
TermConcept concept = id2concept.get(id);
|
||||
if (concept == null) {
|
||||
concept = new TermConcept();
|
||||
id2concept.put(id, concept);
|
||||
concept.setCodeSystemVersion(codeSystemVersion);
|
||||
}
|
||||
return concept;
|
||||
}
|
|
@ -25,9 +25,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
|||
import org.hl7.fhir.instance.hapi.validation.IValidationSupport;
|
||||
import org.hl7.fhir.instance.model.api.IBaseDatatype;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
|
@ -36,7 +34,7 @@ import java.util.List;
|
|||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
public class HapiTerminologySvcDstu2 extends BaseHapiTerminologySvcImpl {
|
||||
public class TermReadSvcDstu2 extends BaseTermReadSvcImpl {
|
||||
|
||||
@Autowired
|
||||
private IValidationSupport myValidationSupport;
|
||||
|
@ -65,22 +63,7 @@ public class HapiTerminologySvcDstu2 extends BaseHapiTerminologySvcImpl {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected IIdType createOrUpdateCodeSystem(CodeSystem theCodeSystemResource) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void createOrUpdateConceptMap(ConceptMap theNextConceptMap) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void createOrUpdateValueSet(ValueSet theValueSet) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected CodeSystem getCodeSystemFromContext(String theSystem) {
|
||||
public CodeSystem getCodeSystemFromContext(String theSystem) {
|
||||
return null;
|
||||
}
|
||||
|
|
@ -2,13 +2,13 @@ package ca.uhn.fhir.jpa.term;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem;
|
||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvcDstu3;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.util.CoverageIgnore;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import ca.uhn.fhir.util.ValidateUtil;
|
||||
import org.hl7.fhir.convertors.VersionConvertor_30_40;
|
||||
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
|
||||
|
@ -19,7 +19,6 @@ import org.hl7.fhir.dstu3.model.ValueSet.ValueSetExpansionComponent;
|
|||
import org.hl7.fhir.exceptions.FHIRException;
|
||||
import org.hl7.fhir.instance.model.api.IBaseDatatype;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.utilities.validation.ValidationMessage.IssueSeverity;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
|
@ -32,7 +31,6 @@ import java.util.Collections;
|
|||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
/*
|
||||
|
@ -55,27 +53,22 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvcImpl implements IValidationSupport, IHapiTerminologySvcDstu3 {
|
||||
public class TermReadSvcDstu3 extends BaseTermReadSvcImpl implements IValidationSupport, ITermReadSvcDstu3 {
|
||||
|
||||
@Autowired
|
||||
@Qualifier("myValueSetDaoDstu3")
|
||||
private IFhirResourceDao<ValueSet> myValueSetResourceDao;
|
||||
@Autowired
|
||||
@Qualifier("myConceptMapDaoDstu3")
|
||||
private IFhirResourceDao<ConceptMap> myConceptMapResourceDao;
|
||||
@Autowired
|
||||
private IFhirResourceDaoCodeSystem<CodeSystem, Coding, CodeableConcept> myCodeSystemResourceDao;
|
||||
@Autowired
|
||||
private IValidationSupport myValidationSupport;
|
||||
@Autowired
|
||||
private IHapiTerminologySvc myTerminologySvc;
|
||||
private ITermReadSvc myTerminologySvc;
|
||||
@Autowired
|
||||
private PlatformTransactionManager myTransactionManager;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public HapiTerminologySvcDstu3() {
|
||||
public TermReadSvcDstu3() {
|
||||
super();
|
||||
}
|
||||
|
||||
|
@ -102,55 +95,6 @@ public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvcImpl implemen
|
|||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IIdType createOrUpdateCodeSystem(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource) {
|
||||
CodeSystem resourceToStore;
|
||||
try {
|
||||
resourceToStore = VersionConvertor_30_40.convertCodeSystem(theCodeSystemResource);
|
||||
} catch (FHIRException e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
validateCodeSystemForStorage(theCodeSystemResource);
|
||||
if (isBlank(resourceToStore.getIdElement().getIdPart())) {
|
||||
String matchUrl = "CodeSystem?url=" + UrlUtil.escapeUrlParam(theCodeSystemResource.getUrl());
|
||||
return myCodeSystemResourceDao.update(resourceToStore, matchUrl).getId();
|
||||
} else {
|
||||
return myCodeSystemResourceDao.update(resourceToStore).getId();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void createOrUpdateConceptMap(org.hl7.fhir.r4.model.ConceptMap theConceptMap) {
|
||||
ConceptMap resourceToStore;
|
||||
try {
|
||||
resourceToStore = VersionConvertor_30_40.convertConceptMap(theConceptMap);
|
||||
} catch (FHIRException e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
if (isBlank(resourceToStore.getIdElement().getIdPart())) {
|
||||
String matchUrl = "ConceptMap?url=" + UrlUtil.escapeUrlParam(theConceptMap.getUrl());
|
||||
myConceptMapResourceDao.update(resourceToStore, matchUrl);
|
||||
} else {
|
||||
myConceptMapResourceDao.update(resourceToStore);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void createOrUpdateValueSet(org.hl7.fhir.r4.model.ValueSet theValueSet) {
|
||||
ValueSet valueSetDstu3;
|
||||
try {
|
||||
valueSetDstu3 = VersionConvertor_30_40.convertValueSet(theValueSet);
|
||||
} catch (FHIRException e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
|
||||
if (isBlank(valueSetDstu3.getIdElement().getIdPart())) {
|
||||
String matchUrl = "ValueSet?url=" + UrlUtil.escapeUrlParam(theValueSet.getUrl());
|
||||
myValueSetResourceDao.update(valueSetDstu3, matchUrl);
|
||||
} else {
|
||||
myValueSetResourceDao.update(valueSetDstu3);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSetExpansionComponent expandValueSet(FhirContext theContext, ConceptSetComponent theInclude) {
|
||||
|
@ -303,7 +247,7 @@ public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvcImpl implemen
|
|||
}
|
||||
|
||||
@Override
|
||||
protected org.hl7.fhir.r4.model.CodeSystem getCodeSystemFromContext(String theSystem) {
|
||||
public org.hl7.fhir.r4.model.CodeSystem getCodeSystemFromContext(String theSystem) {
|
||||
CodeSystem codeSystem = myValidationSupport.fetchCodeSystem(myContext, theSystem);
|
||||
try {
|
||||
return VersionConvertor_30_40.convertCodeSystem(codeSystem);
|
|
@ -5,11 +5,10 @@ import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
|
|||
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvcR4;
|
||||
import ca.uhn.fhir.util.CoverageIgnore;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import org.hl7.fhir.instance.model.api.IBaseDatatype;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
|
||||
import org.hl7.fhir.r4.model.*;
|
||||
import org.hl7.fhir.r4.model.CodeSystem.ConceptDefinitionComponent;
|
||||
|
@ -27,7 +26,6 @@ import java.util.Collections;
|
|||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
/*
|
||||
|
@ -50,14 +48,8 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
public class HapiTerminologySvcR4 extends BaseHapiTerminologySvcImpl implements IHapiTerminologySvcR4 {
|
||||
public class TermReadSvcR4 extends BaseTermReadSvcImpl implements ITermReadSvcR4 {
|
||||
|
||||
@Autowired
|
||||
@Qualifier("myConceptMapDaoR4")
|
||||
private IFhirResourceDao<ConceptMap> myConceptMapResourceDao;
|
||||
@Autowired
|
||||
@Qualifier("myCodeSystemDaoR4")
|
||||
private IFhirResourceDao<CodeSystem> myCodeSystemResourceDao;
|
||||
@Autowired
|
||||
@Qualifier("myValueSetDaoR4")
|
||||
private IFhirResourceDao<ValueSet> myValueSetResourceDao;
|
||||
|
@ -89,36 +81,6 @@ public class HapiTerminologySvcR4 extends BaseHapiTerminologySvcImpl implements
|
|||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IIdType createOrUpdateCodeSystem(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource) {
|
||||
validateCodeSystemForStorage(theCodeSystemResource);
|
||||
if (isBlank(theCodeSystemResource.getIdElement().getIdPart())) {
|
||||
String matchUrl = "CodeSystem?url=" + UrlUtil.escapeUrlParam(theCodeSystemResource.getUrl());
|
||||
return myCodeSystemResourceDao.update(theCodeSystemResource, matchUrl).getId();
|
||||
} else {
|
||||
return myCodeSystemResourceDao.update(theCodeSystemResource).getId();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void createOrUpdateConceptMap(org.hl7.fhir.r4.model.ConceptMap theConceptMap) {
|
||||
if (isBlank(theConceptMap.getIdElement().getIdPart())) {
|
||||
String matchUrl = "ConceptMap?url=" + UrlUtil.escapeUrlParam(theConceptMap.getUrl());
|
||||
myConceptMapResourceDao.update(theConceptMap, matchUrl);
|
||||
} else {
|
||||
myConceptMapResourceDao.update(theConceptMap);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void createOrUpdateValueSet(org.hl7.fhir.r4.model.ValueSet theValueSet) {
|
||||
if (isBlank(theValueSet.getIdElement().getIdPart())) {
|
||||
String matchUrl = "ValueSet?url=" + UrlUtil.escapeUrlParam(theValueSet.getUrl());
|
||||
myValueSetResourceDao.update(theValueSet, matchUrl);
|
||||
} else {
|
||||
myValueSetResourceDao.update(theValueSet);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<VersionIndependentConcept> expandValueSet(String theValueSet) {
|
||||
|
@ -233,7 +195,7 @@ public class HapiTerminologySvcR4 extends BaseHapiTerminologySvcImpl implements
|
|||
}
|
||||
|
||||
@Override
|
||||
protected CodeSystem getCodeSystemFromContext(String theSystem) {
|
||||
public CodeSystem getCodeSystemFromContext(String theSystem) {
|
||||
return myValidationSupport.fetchCodeSystem(myContext, theSystem);
|
||||
}
|
||||
|
|
@ -5,12 +5,11 @@ import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
|
|||
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvcR5;
|
||||
import ca.uhn.fhir.util.CoverageIgnore;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import ca.uhn.fhir.util.ValidateUtil;
|
||||
import org.hl7.fhir.instance.model.api.IBaseDatatype;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r5.hapi.ctx.IValidationSupport;
|
||||
import org.hl7.fhir.r5.model.*;
|
||||
import org.hl7.fhir.r5.model.CodeSystem.ConceptDefinitionComponent;
|
||||
|
@ -28,7 +27,6 @@ import java.util.Collections;
|
|||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
/*
|
||||
|
@ -51,14 +49,8 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
public class HapiTerminologySvcR5 extends BaseHapiTerminologySvcImpl implements IValidationSupport, IHapiTerminologySvcR5 {
|
||||
public class TermReadSvcR5 extends BaseTermReadSvcImpl implements IValidationSupport, ITermReadSvcR5 {
|
||||
|
||||
@Autowired
|
||||
@Qualifier("myConceptMapDaoR5")
|
||||
private IFhirResourceDao<ConceptMap> myConceptMapResourceDao;
|
||||
@Autowired
|
||||
@Qualifier("myCodeSystemDaoR5")
|
||||
private IFhirResourceDao<CodeSystem> myCodeSystemResourceDao;
|
||||
@Autowired
|
||||
@Qualifier("myValueSetDaoR5")
|
||||
private IFhirResourceDao<ValueSet> myValueSetResourceDao;
|
||||
|
@ -90,44 +82,6 @@ public class HapiTerminologySvcR5 extends BaseHapiTerminologySvcImpl implements
|
|||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IIdType createOrUpdateCodeSystem(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource) {
|
||||
validateCodeSystemForStorage(theCodeSystemResource);
|
||||
|
||||
CodeSystem codeSystemR4 = org.hl7.fhir.convertors.conv40_50.CodeSystem.convertCodeSystem(theCodeSystemResource);
|
||||
if (isBlank(theCodeSystemResource.getIdElement().getIdPart())) {
|
||||
String matchUrl = "CodeSystem?url=" + UrlUtil.escapeUrlParam(theCodeSystemResource.getUrl());
|
||||
return myCodeSystemResourceDao.update(codeSystemR4, matchUrl).getId();
|
||||
} else {
|
||||
return myCodeSystemResourceDao.update(codeSystemR4).getId();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void createOrUpdateConceptMap(org.hl7.fhir.r4.model.ConceptMap theConceptMap) {
|
||||
|
||||
ConceptMap conceptMapR4 = org.hl7.fhir.convertors.conv40_50.ConceptMap.convertConceptMap(theConceptMap);
|
||||
|
||||
if (isBlank(theConceptMap.getIdElement().getIdPart())) {
|
||||
String matchUrl = "ConceptMap?url=" + UrlUtil.escapeUrlParam(theConceptMap.getUrl());
|
||||
myConceptMapResourceDao.update(conceptMapR4, matchUrl);
|
||||
} else {
|
||||
myConceptMapResourceDao.update(conceptMapR4);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void createOrUpdateValueSet(org.hl7.fhir.r4.model.ValueSet theValueSet) {
|
||||
|
||||
ValueSet valueSetR4 = org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet(theValueSet);
|
||||
|
||||
if (isBlank(theValueSet.getIdElement().getIdPart())) {
|
||||
String matchUrl = "ValueSet?url=" + UrlUtil.escapeUrlParam(theValueSet.getUrl());
|
||||
myValueSetResourceDao.update(valueSetR4, matchUrl);
|
||||
} else {
|
||||
myValueSetResourceDao.update(valueSetR4);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<VersionIndependentConcept> expandValueSet(String theValueSet) {
|
||||
|
@ -244,7 +198,7 @@ public class HapiTerminologySvcR5 extends BaseHapiTerminologySvcImpl implements
|
|||
}
|
||||
|
||||
@Override
|
||||
protected org.hl7.fhir.r4.model.CodeSystem getCodeSystemFromContext(String theSystem) {
|
||||
public org.hl7.fhir.r4.model.CodeSystem getCodeSystemFromContext(String theSystem) {
|
||||
CodeSystem codeSystemR5 = myValidationSupport.fetchCodeSystem(myContext, theSystem);
|
||||
return org.hl7.fhir.convertors.conv40_50.CodeSystem.convertCodeSystem(codeSystemR5);
|
||||
}
|
|
@ -0,0 +1,169 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptParentChildLinkDao;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.model.sched.FireAtIntervalJob;
|
||||
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReindexingSvc;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.quartz.JobExecutionContext;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.TransactionDefinition;
|
||||
import org.springframework.transaction.TransactionStatus;
|
||||
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
|
||||
public class TermReindexingSvcImpl implements ITermReindexingSvc {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(TermReindexingSvcImpl.class);
|
||||
private static final long SCHEDULE_INTERVAL_MILLIS = DateUtils.MILLIS_PER_MINUTE;
|
||||
private static boolean ourForceSaveDeferredAlwaysForUnitTest;
|
||||
@Autowired
|
||||
protected ITermConceptDao myConceptDao;
|
||||
private ArrayListMultimap<Long, Long> myChildToParentPidCache;
|
||||
@Autowired
|
||||
private PlatformTransactionManager myTransactionMgr;
|
||||
@Autowired
|
||||
private ITermConceptParentChildLinkDao myConceptParentChildLinkDao;
|
||||
@Autowired
|
||||
private ITermCodeSystemStorageSvc myConceptStorageSvc;
|
||||
@Autowired
|
||||
private ITermDeferredStorageSvc myDeferredStorageSvc;
|
||||
@Autowired
|
||||
private ISchedulerService mySchedulerService;
|
||||
|
||||
@Override
|
||||
public void processReindexing() {
|
||||
if (myDeferredStorageSvc.isStorageQueueEmpty() == false && !ourForceSaveDeferredAlwaysForUnitTest) {
|
||||
return;
|
||||
}
|
||||
|
||||
TransactionTemplate tt = new TransactionTemplate(myTransactionMgr);
|
||||
tt.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
|
||||
tt.execute(new TransactionCallbackWithoutResult() {
|
||||
private void createParentsString(StringBuilder theParentsBuilder, Long theConceptPid) {
|
||||
Validate.notNull(theConceptPid, "theConceptPid must not be null");
|
||||
List<Long> parents = myChildToParentPidCache.get(theConceptPid);
|
||||
if (parents.contains(-1L)) {
|
||||
return;
|
||||
} else if (parents.isEmpty()) {
|
||||
Collection<Long> parentLinks = myConceptParentChildLinkDao.findAllWithChild(theConceptPid);
|
||||
if (parentLinks.isEmpty()) {
|
||||
myChildToParentPidCache.put(theConceptPid, -1L);
|
||||
ourLog.info("Found {} parent concepts of concept {} (cache has {})", 0, theConceptPid, myChildToParentPidCache.size());
|
||||
return;
|
||||
} else {
|
||||
for (Long next : parentLinks) {
|
||||
myChildToParentPidCache.put(theConceptPid, next);
|
||||
}
|
||||
int parentCount = myChildToParentPidCache.get(theConceptPid).size();
|
||||
ourLog.info("Found {} parent concepts of concept {} (cache has {})", parentCount, theConceptPid, myChildToParentPidCache.size());
|
||||
}
|
||||
}
|
||||
|
||||
for (Long nextParent : parents) {
|
||||
if (theParentsBuilder.length() > 0) {
|
||||
theParentsBuilder.append(' ');
|
||||
}
|
||||
theParentsBuilder.append(nextParent);
|
||||
createParentsString(theParentsBuilder, nextParent);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
int maxResult = 1000;
|
||||
Page<TermConcept> concepts = myConceptDao.findResourcesRequiringReindexing(PageRequest.of(0, maxResult));
|
||||
if (!concepts.hasContent()) {
|
||||
if (myChildToParentPidCache != null) {
|
||||
ourLog.info("Clearing parent concept cache");
|
||||
myChildToParentPidCache = null;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (myChildToParentPidCache == null) {
|
||||
myChildToParentPidCache = ArrayListMultimap.create();
|
||||
}
|
||||
|
||||
ourLog.info("Indexing {} / {} concepts", concepts.getContent().size(), concepts.getTotalElements());
|
||||
|
||||
int count = 0;
|
||||
StopWatch stopwatch = new StopWatch();
|
||||
|
||||
for (TermConcept nextConcept : concepts) {
|
||||
|
||||
if (isBlank(nextConcept.getParentPidsAsString())) {
|
||||
StringBuilder parentsBuilder = new StringBuilder();
|
||||
createParentsString(parentsBuilder, nextConcept.getId());
|
||||
nextConcept.setParentPids(parentsBuilder.toString());
|
||||
}
|
||||
|
||||
myConceptStorageSvc.saveConcept(nextConcept);
|
||||
count++;
|
||||
}
|
||||
|
||||
ourLog.info("Indexed {} / {} concepts in {}ms - Avg {}ms / resource", count, concepts.getContent().size(), stopwatch.getMillis(), stopwatch.getMillisPerOperation(count));
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
@PostConstruct
|
||||
public void registerScheduledJob() {
|
||||
// Register scheduled job to save deferred concepts
|
||||
// In the future it would be great to make this a cluster-aware task somehow
|
||||
ScheduledJobDefinition jobDefinition = new ScheduledJobDefinition();
|
||||
jobDefinition.setId(TermReindexingSvcImpl.class.getName() + "_reindex");
|
||||
jobDefinition.setJobClass(SaveDeferredJob.class);
|
||||
mySchedulerService.scheduleFixedDelay(SCHEDULE_INTERVAL_MILLIS, false, jobDefinition);
|
||||
}
|
||||
|
||||
public static class SaveDeferredJob extends FireAtIntervalJob {
|
||||
|
||||
@Autowired
|
||||
private ITermDeferredStorageSvc myTerminologySvc;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public SaveDeferredJob() {
|
||||
super(SCHEDULE_INTERVAL_MILLIS);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doExecute(JobExecutionContext theContext) {
|
||||
myTerminologySvc.saveDeferred();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is present only for unit tests, do not call from client code
|
||||
*/
|
||||
@VisibleForTesting
|
||||
public static void setForceSaveDeferredAlwaysForUnitTest(boolean theForceSaveDeferredAlwaysForUnitTest) {
|
||||
ourForceSaveDeferredAlwaysForUnitTest = theForceSaveDeferredAlwaysForUnitTest;
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
|
||||
import ca.uhn.fhir.jpa.term.api.ITermVersionAdapterSvc;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
|
||||
public class TermVersionAdapterSvcDstu2 implements ITermVersionAdapterSvc {
|
||||
|
||||
@Override
|
||||
public IIdType createOrUpdateCodeSystem(CodeSystem theCodeSystemResource) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createOrUpdateConceptMap(ConceptMap theNextConceptMap) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createOrUpdateValueSet(ValueSet theValueSet) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,101 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermVersionAdapterSvc;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import org.hl7.fhir.convertors.VersionConvertor_30_40;
|
||||
import org.hl7.fhir.dstu3.model.CodeSystem;
|
||||
import org.hl7.fhir.dstu3.model.ConceptMap;
|
||||
import org.hl7.fhir.dstu3.model.ValueSet;
|
||||
import org.hl7.fhir.exceptions.FHIRException;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.event.ContextRefreshedEvent;
|
||||
import org.springframework.context.event.ContextStartedEvent;
|
||||
import org.springframework.context.event.EventListener;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
|
||||
public class TermVersionAdapterSvcDstu3 extends BaseTermVersionAdapterSvcImpl implements ITermVersionAdapterSvc {
|
||||
|
||||
private IFhirResourceDao<ConceptMap> myConceptMapResourceDao;
|
||||
private IFhirResourceDao<CodeSystem> myCodeSystemResourceDao;
|
||||
private IFhirResourceDao<ValueSet> myValueSetResourceDao;
|
||||
|
||||
@Autowired
|
||||
private ApplicationContext myAppCtx;
|
||||
|
||||
public TermVersionAdapterSvcDstu3() {
|
||||
super();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Initialize the beans that are used by this service.
|
||||
*
|
||||
* Note: There is a circular dependency here where the CodeSystem DAO
|
||||
* needs terminology services, and the term services need the CodeSystem DAO.
|
||||
* So we look these up in a refresh event instead of just autowiring them
|
||||
* in order to avoid weird circular reference errors.
|
||||
*/
|
||||
@SuppressWarnings({"unchecked", "unused"})
|
||||
@EventListener
|
||||
public void start(ContextRefreshedEvent theEvent) {
|
||||
myCodeSystemResourceDao = (IFhirResourceDao<CodeSystem>) myAppCtx.getBean("myCodeSystemDaoDstu3");
|
||||
myValueSetResourceDao = (IFhirResourceDao<ValueSet>) myAppCtx.getBean("myValueSetDaoDstu3");
|
||||
myConceptMapResourceDao = (IFhirResourceDao<ConceptMap>) myAppCtx.getBean("myConceptMapDaoDstu3");
|
||||
}
|
||||
|
||||
@Override
|
||||
public IIdType createOrUpdateCodeSystem(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource) {
|
||||
CodeSystem resourceToStore;
|
||||
try {
|
||||
resourceToStore = VersionConvertor_30_40.convertCodeSystem(theCodeSystemResource);
|
||||
} catch (FHIRException e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
validateCodeSystemForStorage(theCodeSystemResource);
|
||||
if (isBlank(resourceToStore.getIdElement().getIdPart())) {
|
||||
String matchUrl = "CodeSystem?url=" + UrlUtil.escapeUrlParam(theCodeSystemResource.getUrl());
|
||||
return myCodeSystemResourceDao.update(resourceToStore, matchUrl).getId();
|
||||
} else {
|
||||
return myCodeSystemResourceDao.update(resourceToStore).getId();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createOrUpdateConceptMap(org.hl7.fhir.r4.model.ConceptMap theConceptMap) {
|
||||
ConceptMap resourceToStore;
|
||||
try {
|
||||
resourceToStore = VersionConvertor_30_40.convertConceptMap(theConceptMap);
|
||||
} catch (FHIRException e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
if (isBlank(resourceToStore.getIdElement().getIdPart())) {
|
||||
String matchUrl = "ConceptMap?url=" + UrlUtil.escapeUrlParam(theConceptMap.getUrl());
|
||||
myConceptMapResourceDao.update(resourceToStore, matchUrl);
|
||||
} else {
|
||||
myConceptMapResourceDao.update(resourceToStore);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createOrUpdateValueSet(org.hl7.fhir.r4.model.ValueSet theValueSet) {
|
||||
ValueSet valueSetDstu3;
|
||||
try {
|
||||
valueSetDstu3 = VersionConvertor_30_40.convertValueSet(theValueSet);
|
||||
} catch (FHIRException e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
|
||||
if (isBlank(valueSetDstu3.getIdElement().getIdPart())) {
|
||||
String matchUrl = "ValueSet?url=" + UrlUtil.escapeUrlParam(theValueSet.getUrl());
|
||||
myValueSetResourceDao.update(valueSetDstu3, matchUrl);
|
||||
} else {
|
||||
myValueSetResourceDao.update(valueSetDstu3);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,75 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermVersionAdapterSvc;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.event.ContextRefreshedEvent;
|
||||
import org.springframework.context.event.EventListener;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
|
||||
public class TermVersionAdapterSvcR4 extends BaseTermVersionAdapterSvcImpl implements ITermVersionAdapterSvc {
|
||||
private IFhirResourceDao<ConceptMap> myConceptMapResourceDao;
|
||||
private IFhirResourceDao<CodeSystem> myCodeSystemResourceDao;
|
||||
private IFhirResourceDao<ValueSet> myValueSetResourceDao;
|
||||
|
||||
@Autowired
|
||||
private ApplicationContext myAppCtx;
|
||||
|
||||
/**
|
||||
* Initialize the beans that are used by this service.
|
||||
*
|
||||
* Note: There is a circular dependency here where the CodeSystem DAO
|
||||
* needs terminology services, and the term services need the CodeSystem DAO.
|
||||
* So we look these up in a refresh event instead of just autowiring them
|
||||
* in order to avoid weird circular reference errors.
|
||||
*/
|
||||
@SuppressWarnings({"unchecked", "unused"})
|
||||
@EventListener
|
||||
public void start(ContextRefreshedEvent theEvent) {
|
||||
myCodeSystemResourceDao = (IFhirResourceDao<CodeSystem>) myAppCtx.getBean("myCodeSystemDaoR4");
|
||||
myValueSetResourceDao = (IFhirResourceDao<ValueSet>) myAppCtx.getBean("myValueSetDaoR4");
|
||||
myConceptMapResourceDao = (IFhirResourceDao<ConceptMap>) myAppCtx.getBean("myConceptMapDaoR4");
|
||||
}
|
||||
|
||||
@Override
|
||||
public IIdType createOrUpdateCodeSystem(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource) {
|
||||
validateCodeSystemForStorage(theCodeSystemResource);
|
||||
if (isBlank(theCodeSystemResource.getIdElement().getIdPart())) {
|
||||
String matchUrl = "CodeSystem?url=" + UrlUtil.escapeUrlParam(theCodeSystemResource.getUrl());
|
||||
return myCodeSystemResourceDao.update(theCodeSystemResource, matchUrl).getId();
|
||||
} else {
|
||||
return myCodeSystemResourceDao.update(theCodeSystemResource).getId();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createOrUpdateConceptMap(org.hl7.fhir.r4.model.ConceptMap theConceptMap) {
|
||||
if (isBlank(theConceptMap.getIdElement().getIdPart())) {
|
||||
String matchUrl = "ConceptMap?url=" + UrlUtil.escapeUrlParam(theConceptMap.getUrl());
|
||||
myConceptMapResourceDao.update(theConceptMap, matchUrl);
|
||||
} else {
|
||||
myConceptMapResourceDao.update(theConceptMap);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createOrUpdateValueSet(org.hl7.fhir.r4.model.ValueSet theValueSet) {
|
||||
if (isBlank(theValueSet.getIdElement().getIdPart())) {
|
||||
String matchUrl = "ValueSet?url=" + UrlUtil.escapeUrlParam(theValueSet.getUrl());
|
||||
myValueSetResourceDao.update(theValueSet, matchUrl);
|
||||
} else {
|
||||
myValueSetResourceDao.update(theValueSet);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,80 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermVersionAdapterSvc;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r5.model.CodeSystem;
|
||||
import org.hl7.fhir.r5.model.ConceptMap;
|
||||
import org.hl7.fhir.r5.model.ValueSet;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.event.ContextRefreshedEvent;
|
||||
import org.springframework.context.event.EventListener;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
|
||||
public class TermVersionAdapterSvcR5 extends BaseTermVersionAdapterSvcImpl implements ITermVersionAdapterSvc {
|
||||
private IFhirResourceDao<ConceptMap> myConceptMapResourceDao;
|
||||
private IFhirResourceDao<CodeSystem> myCodeSystemResourceDao;
|
||||
private IFhirResourceDao<ValueSet> myValueSetResourceDao;
|
||||
|
||||
@Autowired
|
||||
private ApplicationContext myAppCtx;
|
||||
|
||||
/**
|
||||
* Initialize the beans that are used by this service.
|
||||
*
|
||||
* Note: There is a circular dependency here where the CodeSystem DAO
|
||||
* needs terminology services, and the term services need the CodeSystem DAO.
|
||||
* So we look these up in a refresh event instead of just autowiring them
|
||||
* in order to avoid weird circular reference errors.
|
||||
*/
|
||||
@SuppressWarnings({"unchecked", "unused"})
|
||||
@EventListener
|
||||
public void start(ContextRefreshedEvent theEvent) {
|
||||
myCodeSystemResourceDao = (IFhirResourceDao<CodeSystem>) myAppCtx.getBean("myCodeSystemDaoR5");
|
||||
myValueSetResourceDao = (IFhirResourceDao<ValueSet>) myAppCtx.getBean("myValueSetDaoR5");
|
||||
myConceptMapResourceDao = (IFhirResourceDao<ConceptMap>) myAppCtx.getBean("myConceptMapDaoR5");
|
||||
}
|
||||
|
||||
@Override
|
||||
public IIdType createOrUpdateCodeSystem(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource) {
|
||||
validateCodeSystemForStorage(theCodeSystemResource);
|
||||
|
||||
CodeSystem codeSystemR4 = org.hl7.fhir.convertors.conv40_50.CodeSystem.convertCodeSystem(theCodeSystemResource);
|
||||
if (isBlank(theCodeSystemResource.getIdElement().getIdPart())) {
|
||||
String matchUrl = "CodeSystem?url=" + UrlUtil.escapeUrlParam(theCodeSystemResource.getUrl());
|
||||
return myCodeSystemResourceDao.update(codeSystemR4, matchUrl).getId();
|
||||
} else {
|
||||
return myCodeSystemResourceDao.update(codeSystemR4).getId();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createOrUpdateConceptMap(org.hl7.fhir.r4.model.ConceptMap theConceptMap) {
|
||||
|
||||
ConceptMap conceptMapR4 = org.hl7.fhir.convertors.conv40_50.ConceptMap.convertConceptMap(theConceptMap);
|
||||
|
||||
if (isBlank(theConceptMap.getIdElement().getIdPart())) {
|
||||
String matchUrl = "ConceptMap?url=" + UrlUtil.escapeUrlParam(theConceptMap.getUrl());
|
||||
myConceptMapResourceDao.update(conceptMapR4, matchUrl);
|
||||
} else {
|
||||
myConceptMapResourceDao.update(conceptMapR4);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createOrUpdateValueSet(org.hl7.fhir.r4.model.ValueSet theValueSet) {
|
||||
|
||||
ValueSet valueSetR4 = org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet(theValueSet);
|
||||
|
||||
if (isBlank(theValueSet.getIdElement().getIdPart())) {
|
||||
String matchUrl = "ValueSet?url=" + UrlUtil.escapeUrlParam(theValueSet.getUrl());
|
||||
myValueSetResourceDao.update(valueSetR4, matchUrl);
|
||||
} else {
|
||||
myValueSetResourceDao.update(valueSetR4);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
||||
public class UploadStatistics {
|
||||
private final IIdType myTarget;
|
||||
private int myUpdatedConceptCount;
|
||||
|
||||
public UploadStatistics(IIdType theTarget) {
|
||||
this(0, theTarget);
|
||||
}
|
||||
|
||||
public UploadStatistics(int theUpdatedConceptCount, IIdType theTarget) {
|
||||
myUpdatedConceptCount = theUpdatedConceptCount;
|
||||
myTarget = theTarget;
|
||||
}
|
||||
|
||||
public void incrementUpdatedConceptCount() {
|
||||
myUpdatedConceptCount++;
|
||||
}
|
||||
|
||||
public int getUpdatedConceptCount() {
|
||||
return myUpdatedConceptCount;
|
||||
}
|
||||
|
||||
public IIdType getTarget() {
|
||||
return myTarget;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
package ca.uhn.fhir.jpa.term.api;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.term.UploadStatistics;
|
||||
import ca.uhn.fhir.jpa.term.custom.CustomTerminologySet;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* This service handles writes to the CodeSystem/Concept tables within the terminology services
|
||||
*/
|
||||
public interface ITermCodeSystemStorageSvc {
|
||||
|
||||
void deleteCodeSystem(TermCodeSystem theCodeSystem);
|
||||
|
||||
void storeNewCodeSystemVersion(Long theCodeSystemResourcePid, String theSystemUri, String theSystemName, String theSystemVersionId, TermCodeSystemVersion theCodeSystemVersion, ResourceTable theCodeSystemResourceTable);
|
||||
|
||||
/**
|
||||
* @return Returns the ID of the created/updated code system
|
||||
*/
|
||||
IIdType storeNewCodeSystemVersion(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails, List<ValueSet> theValueSets, List<org.hl7.fhir.r4.model.ConceptMap> theConceptMaps);
|
||||
|
||||
void storeNewCodeSystemVersionIfNeeded(CodeSystem theCodeSystem, ResourceTable theResourceEntity);
|
||||
|
||||
UploadStatistics applyDeltaCodeSystemsAdd(String theSystem, CustomTerminologySet theAdditions);
|
||||
|
||||
UploadStatistics applyDeltaCodeSystemsRemove(String theSystem, CustomTerminologySet theRemovals);
|
||||
|
||||
int saveConcept(TermConcept theNextConcept);
|
||||
|
||||
Long getValueSetResourcePid(IIdType theIdElement);
|
||||
}
|
|
@ -0,0 +1,33 @@
|
|||
package ca.uhn.fhir.jpa.term.api;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* This service handles processing "deferred" concept writes, meaning concepts that have neen
|
||||
* queued for storage because there are too many of them to handle in a single transaction.
|
||||
*/
|
||||
public interface ITermDeferredStorageSvc {
|
||||
|
||||
void saveDeferred();
|
||||
|
||||
boolean isStorageQueueEmpty();
|
||||
|
||||
/**
|
||||
* This is mostly for unit tests - we can disable processing of deferred concepts
|
||||
* by changing this flag
|
||||
*/
|
||||
void setProcessDeferred(boolean theProcessDeferred);
|
||||
|
||||
void addConceptToStorageQueue(TermConcept theConcept);
|
||||
|
||||
void addConceptLinkToStorageQueue(TermConceptParentChildLink theConceptLink);
|
||||
|
||||
void addConceptMapsToStorageQueue(List<ConceptMap> theConceptMaps);
|
||||
|
||||
void addValueSetsToStorageQueue(List<ValueSet> theValueSets);
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
package ca.uhn.fhir.jpa.term.api;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
|
@ -20,13 +20,18 @@ package ca.uhn.fhir.jpa.term;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.term.UploadStatistics;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.InputStream;
|
||||
import java.util.List;
|
||||
|
||||
public interface IHapiTerminologyLoaderSvc {
|
||||
/**
|
||||
* This service handles bulk loading concepts into the terminology service concept tables
|
||||
* using any of several predefined input formats
|
||||
*/
|
||||
public interface ITermLoaderSvc {
|
||||
|
||||
String IMGTHLA_URI = "http://www.ebi.ac.uk/ipd/imgt/hla";
|
||||
String LOINC_URI = "http://loinc.org";
|
||||
|
@ -39,9 +44,15 @@ public interface IHapiTerminologyLoaderSvc {
|
|||
|
||||
UploadStatistics loadSnomedCt(List<FileDescriptor> theFiles, RequestDetails theRequestDetails);
|
||||
|
||||
// FIXME: remove the default implementation before 4.0.0
|
||||
// FIXME: remove the default implementation before 4.1.0
|
||||
default UploadStatistics loadCustom(String theSystem, List<FileDescriptor> theFiles, RequestDetails theRequestDetails) { return null; };
|
||||
|
||||
// FIXME: remove the default implementation before 4.1.0
|
||||
default UploadStatistics loadDeltaAdd(String theSystem, List<FileDescriptor> theFiles, RequestDetails theRequestDetails) { return null; };
|
||||
|
||||
// FIXME: remove the default implementation before 4.1.0
|
||||
default UploadStatistics loadDeltaRemove(String theSystem, List<FileDescriptor> theFiles, RequestDetails theRequestDetails) { return null; };
|
||||
|
||||
interface FileDescriptor {
|
||||
|
||||
String getFilename();
|
||||
|
@ -50,23 +61,25 @@ public interface IHapiTerminologyLoaderSvc {
|
|||
|
||||
}
|
||||
|
||||
class UploadStatistics {
|
||||
private final int myConceptCount;
|
||||
private final IIdType myTarget;
|
||||
class ByteArrayFileDescriptor implements FileDescriptor {
|
||||
|
||||
public UploadStatistics(int theConceptCount, IIdType theTarget) {
|
||||
myConceptCount = theConceptCount;
|
||||
myTarget = theTarget;
|
||||
private final String myNextUrl;
|
||||
private final byte[] myNextData;
|
||||
|
||||
public ByteArrayFileDescriptor(String theNextUrl, byte[] theNextData) {
|
||||
myNextUrl = theNextUrl;
|
||||
myNextData = theNextData;
|
||||
}
|
||||
|
||||
public int getConceptCount() {
|
||||
return myConceptCount;
|
||||
@Override
|
||||
public String getFilename() {
|
||||
return myNextUrl;
|
||||
}
|
||||
|
||||
public IIdType getTarget() {
|
||||
return myTarget;
|
||||
@Override
|
||||
public InputStream getInputStream() {
|
||||
return new ByteArrayInputStream(myNextData);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,20 +1,25 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
package ca.uhn.fhir.jpa.term.api;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem;
|
||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult;
|
||||
import ca.uhn.fhir.jpa.entity.*;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElement;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElementTarget;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import org.hl7.fhir.instance.model.api.*;
|
||||
import ca.uhn.fhir.jpa.term.IValueSetConceptAccumulator;
|
||||
import ca.uhn.fhir.jpa.term.TranslationRequest;
|
||||
import ca.uhn.fhir.jpa.term.VersionIndependentConcept;
|
||||
import org.hl7.fhir.instance.model.api.IBaseCoding;
|
||||
import org.hl7.fhir.instance.model.api.IBaseDatatype;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
|
@ -36,9 +41,17 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
public interface IHapiTerminologySvc {
|
||||
|
||||
void deleteCodeSystem(TermCodeSystem theCodeSystem);
|
||||
/**
|
||||
* This interface is the "read" interface for the terminology service. It handles things like
|
||||
* lookups, code validations, expansions, concept mappings, etc.
|
||||
* <p>
|
||||
* It is intended to only handle read operations, leaving various write operations to
|
||||
* other services within the terminology service APIs.
|
||||
* (Note that at present, a few write operations remain here- they should be moved but haven't
|
||||
* been moved yet)
|
||||
* </p>
|
||||
*/
|
||||
public interface ITermReadSvc {
|
||||
|
||||
ValueSet expandValueSet(ValueSet theValueSetToExpand);
|
||||
|
||||
|
@ -74,22 +87,7 @@ public interface IHapiTerminologySvc {
|
|||
|
||||
List<VersionIndependentConcept> findCodesBelowUsingBuiltInSystems(String theSystem, String theCode);
|
||||
|
||||
void saveDeferred();
|
||||
|
||||
/**
|
||||
* This is mostly for unit tests - we can disable processing of deferred concepts
|
||||
* by changing this flag
|
||||
*/
|
||||
void setProcessDeferred(boolean theProcessDeferred);
|
||||
|
||||
void storeNewCodeSystemVersion(Long theCodeSystemResourcePid, String theSystemUri, String theSystemName, String theSystemVersionId, TermCodeSystemVersion theCodeSystemVersion, ResourceTable theCodeSystemResourceTable);
|
||||
|
||||
/**
|
||||
* @return Returns the ID of the created/updated code system
|
||||
*/
|
||||
IIdType storeNewCodeSystemVersion(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails, List<org.hl7.fhir.r4.model.ValueSet> theValueSets, List<org.hl7.fhir.r4.model.ConceptMap> theConceptMaps);
|
||||
|
||||
void storeNewCodeSystemVersionIfNeeded(CodeSystem theCodeSystem, ResourceTable theResourceEntity);
|
||||
CodeSystem getCodeSystemFromContext(String theSystem);
|
||||
|
||||
void deleteConceptMapAndChildren(ResourceTable theResourceTable);
|
||||
|
||||
|
@ -107,10 +105,6 @@ public interface IHapiTerminologySvc {
|
|||
|
||||
IFhirResourceDaoCodeSystem.SubsumesResult subsumes(IPrimitiveType<String> theCodeA, IPrimitiveType<String> theCodeB, IPrimitiveType<String> theSystem, IBaseCoding theCodingA, IBaseCoding theCodingB);
|
||||
|
||||
AtomicInteger applyDeltaCodesystemsAdd(String theSystem, @Nullable String theParent, CodeSystem theValue);
|
||||
|
||||
AtomicInteger applyDeltaCodesystemsRemove(String theSystem, CodeSystem theDelta);
|
||||
|
||||
void preExpandDeferredValueSetsToTerminologyTables();
|
||||
|
||||
/**
|
||||
|
@ -124,4 +118,6 @@ public interface IHapiTerminologySvc {
|
|||
* Version independent
|
||||
*/
|
||||
boolean isValueSetPreExpandedForCodeValidation(IBaseResource theValueSet);
|
||||
|
||||
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
package ca.uhn.fhir.jpa.term.api;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
|
@ -22,6 +22,6 @@ package ca.uhn.fhir.jpa.term;
|
|||
|
||||
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
|
||||
|
||||
public interface IHapiTerminologySvcDstu3 extends IHapiTerminologySvc, IValidationSupport {
|
||||
public interface ITermReadSvcDstu3 extends ITermReadSvc, IValidationSupport {
|
||||
// nothing
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
package ca.uhn.fhir.jpa.term.api;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -22,6 +22,6 @@ package ca.uhn.fhir.jpa.term;
|
|||
|
||||
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
|
||||
|
||||
public interface IHapiTerminologySvcR4 extends IHapiTerminologySvc, IValidationSupport {
|
||||
public interface ITermReadSvcR4 extends ITermReadSvc, IValidationSupport {
|
||||
// nothing
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
package ca.uhn.fhir.jpa.term.api;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
|
@ -22,6 +22,6 @@ package ca.uhn.fhir.jpa.term;
|
|||
|
||||
import org.hl7.fhir.r5.hapi.ctx.IValidationSupport;
|
||||
|
||||
public interface IHapiTerminologySvcR5 extends IHapiTerminologySvc, IValidationSupport {
|
||||
public interface ITermReadSvcR5 extends ITermReadSvc, IValidationSupport {
|
||||
// nothing
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
package ca.uhn.fhir.jpa.term.api;
|
||||
|
||||
public interface ITermReindexingSvc {
|
||||
|
||||
void processReindexing();
|
||||
|
||||
}
|
|
@ -0,0 +1,21 @@
|
|||
package ca.uhn.fhir.jpa.term.api;
|
||||
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
|
||||
/**
|
||||
* This interface is used to handle differences in versions of FHIR for the terminology
|
||||
* server. It is really just an internal interface used by the
|
||||
* {@link ITermReadSvc terminology read service}.
|
||||
*/
|
||||
public interface ITermVersionAdapterSvc {
|
||||
|
||||
IIdType createOrUpdateCodeSystem(CodeSystem theCodeSystemResource);
|
||||
|
||||
void createOrUpdateConceptMap(ConceptMap theNextConceptMap);
|
||||
|
||||
void createOrUpdateValueSet(ValueSet theValueSet);
|
||||
|
||||
}
|
|
@ -20,10 +20,9 @@ package ca.uhn.fhir.jpa.term.custom;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||
import ca.uhn.fhir.jpa.term.TerminologyLoaderSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.TermLoaderSvcImpl;
|
||||
import org.apache.commons.csv.CSVRecord;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -40,11 +39,9 @@ public class ConceptHandler implements IRecordHandler {
|
|||
public static final String CODE = "CODE";
|
||||
public static final String DISPLAY = "DISPLAY";
|
||||
private final Map<String, TermConcept> myCode2Concept;
|
||||
private final TermCodeSystemVersion myCodeSystemVersion;
|
||||
|
||||
public ConceptHandler(Map<String, TermConcept> theCode2concept, TermCodeSystemVersion theCodeSystemVersion) {
|
||||
public ConceptHandler(Map<String, TermConcept> theCode2concept) {
|
||||
myCode2Concept = theCode2concept;
|
||||
myCodeSystemVersion = theCodeSystemVersion;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -55,7 +52,7 @@ public class ConceptHandler implements IRecordHandler {
|
|||
|
||||
Validate.isTrue(!myCode2Concept.containsKey(code), "The code %s has appeared more than once", code);
|
||||
|
||||
TermConcept concept = TerminologyLoaderSvcImpl.getOrCreateConcept(myCodeSystemVersion, myCode2Concept, code);
|
||||
TermConcept concept = TermLoaderSvcImpl.getOrCreateConcept(myCode2Concept, code);
|
||||
concept.setCode(code);
|
||||
concept.setDisplay(display);
|
||||
|
||||
|
|
|
@ -0,0 +1,158 @@
|
|||
package ca.uhn.fhir.jpa.term.custom;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||
import ca.uhn.fhir.jpa.term.LoadedFileDescriptors;
|
||||
import ca.uhn.fhir.jpa.term.TermLoaderSvcImpl;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
import com.google.common.collect.Multimaps;
|
||||
import org.apache.commons.csv.QuoteMode;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.*;
|
||||
|
||||
public class CustomTerminologySet {
|
||||
|
||||
private final int mySize;
|
||||
private final ListMultimap<TermConcept, String> myUnanchoredChildConceptsToParentCodes;
|
||||
private final List<TermConcept> myRootConcepts;
|
||||
|
||||
/**
|
||||
* Constructor for an empty object
|
||||
*/
|
||||
public CustomTerminologySet() {
|
||||
this(0, ArrayListMultimap.create(), new ArrayList<>());
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
private CustomTerminologySet(int theSize, ListMultimap<TermConcept, String> theUnanchoredChildConceptsToParentCodes, Collection<TermConcept> theRootConcepts) {
|
||||
this(theSize, theUnanchoredChildConceptsToParentCodes, new ArrayList<>(theRootConcepts));
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
private CustomTerminologySet(int theSize, ListMultimap<TermConcept, String> theUnanchoredChildConceptsToParentCodes, List<TermConcept> theRootConcepts) {
|
||||
mySize = theSize;
|
||||
myUnanchoredChildConceptsToParentCodes = theUnanchoredChildConceptsToParentCodes;
|
||||
myRootConcepts = theRootConcepts;
|
||||
}
|
||||
|
||||
public void addRootConcept(String theCode) {
|
||||
addRootConcept(theCode, null);
|
||||
}
|
||||
|
||||
public TermConcept addRootConcept(String theCode, String theDisplay) {
|
||||
Validate.notBlank(theCode, "theCode must not be blank");
|
||||
Validate.isTrue(myRootConcepts.stream().noneMatch(t -> t.getCode().equals(theCode)), "Already have code %s", theCode);
|
||||
TermConcept retVal = new TermConcept();
|
||||
retVal.setCode(theCode);
|
||||
retVal.setDisplay(theDisplay);
|
||||
myRootConcepts.add(retVal);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
|
||||
public ListMultimap<TermConcept, String> getUnanchoredChildConceptsToParentCodes() {
|
||||
return Multimaps.unmodifiableListMultimap(myUnanchoredChildConceptsToParentCodes);
|
||||
}
|
||||
|
||||
public int getSize() {
|
||||
return mySize;
|
||||
}
|
||||
|
||||
public TermCodeSystemVersion toCodeSystemVersion() {
|
||||
TermCodeSystemVersion csv = new TermCodeSystemVersion();
|
||||
|
||||
for (TermConcept next : myRootConcepts) {
|
||||
csv.getConcepts().add(next);
|
||||
}
|
||||
|
||||
populateVersionToChildCodes(csv, myRootConcepts);
|
||||
|
||||
return csv;
|
||||
}
|
||||
|
||||
private void populateVersionToChildCodes(TermCodeSystemVersion theCsv, List<TermConcept> theConcepts) {
|
||||
for (TermConcept next : theConcepts) {
|
||||
next.setCodeSystemVersion(theCsv);
|
||||
populateVersionToChildCodes(theCsv, next.getChildCodes());
|
||||
}
|
||||
}
|
||||
|
||||
public List<TermConcept> getRootConcepts() {
|
||||
return Collections.unmodifiableList(myRootConcepts);
|
||||
}
|
||||
|
||||
public void addUnanchoredChildConcept(String theParentCode, String theCode, String theDisplay) {
|
||||
Validate.notBlank(theParentCode);
|
||||
Validate.notBlank(theCode);
|
||||
|
||||
TermConcept code = new TermConcept()
|
||||
.setCode(theCode)
|
||||
.setDisplay(theDisplay);
|
||||
myUnanchoredChildConceptsToParentCodes.put(code, theParentCode);
|
||||
}
|
||||
|
||||
public void validateNoCycleOrThrowInvalidRequest() {
|
||||
Set<String> codes = new HashSet<>();
|
||||
validateNoCycleOrThrowInvalidRequest(codes, getRootConcepts());
|
||||
for (TermConcept next : myUnanchoredChildConceptsToParentCodes.keySet()) {
|
||||
validateNoCycleOrThrowInvalidRequest(codes, next);
|
||||
}
|
||||
}
|
||||
|
||||
private void validateNoCycleOrThrowInvalidRequest(Set<String> theCodes, List<TermConcept> theRootConcepts) {
|
||||
for (TermConcept next : theRootConcepts) {
|
||||
validateNoCycleOrThrowInvalidRequest(theCodes, next);
|
||||
}
|
||||
}
|
||||
|
||||
private void validateNoCycleOrThrowInvalidRequest(Set<String> theCodes, TermConcept next) {
|
||||
if (!theCodes.add(next.getCode())) {
|
||||
throw new InvalidRequestException("Cycle detected around code " + next.getCode());
|
||||
}
|
||||
validateNoCycleOrThrowInvalidRequest(theCodes, next.getChildCodes());
|
||||
}
|
||||
|
||||
|
||||
@Nonnull
|
||||
public static CustomTerminologySet load(LoadedFileDescriptors theDescriptors, boolean theFlat) {
|
||||
|
||||
final Map<String, TermConcept> code2concept = new LinkedHashMap<>();
|
||||
ArrayListMultimap<TermConcept, String> unanchoredChildConceptsToParentCodes = ArrayListMultimap.create();
|
||||
|
||||
// Concepts
|
||||
IRecordHandler conceptHandler = new ConceptHandler(code2concept);
|
||||
TermLoaderSvcImpl.iterateOverZipFile(theDescriptors, TermLoaderSvcImpl.CUSTOM_CONCEPTS_FILE, conceptHandler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
if (theFlat) {
|
||||
|
||||
return new CustomTerminologySet(code2concept.size(), ArrayListMultimap.create(), code2concept.values());
|
||||
|
||||
} else {
|
||||
|
||||
// Hierarchy
|
||||
if (theDescriptors.hasFile(TermLoaderSvcImpl.CUSTOM_HIERARCHY_FILE)) {
|
||||
IRecordHandler hierarchyHandler = new HierarchyHandler(code2concept, unanchoredChildConceptsToParentCodes);
|
||||
TermLoaderSvcImpl.iterateOverZipFile(theDescriptors, TermLoaderSvcImpl.CUSTOM_HIERARCHY_FILE, hierarchyHandler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
}
|
||||
|
||||
// Find root concepts
|
||||
List<TermConcept> rootConcepts = new ArrayList<>();
|
||||
for (TermConcept nextConcept : code2concept.values()) {
|
||||
if (nextConcept.getParents().isEmpty()) {
|
||||
rootConcepts.add(nextConcept);
|
||||
}
|
||||
}
|
||||
|
||||
return new CustomTerminologySet(code2concept.size(), unanchoredChildConceptsToParentCodes, rootConcepts);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -24,9 +24,8 @@ import ca.uhn.fhir.jpa.entity.TermConcept;
|
|||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
|
||||
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||
import ca.uhn.fhir.util.ValidateUtil;
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
import org.apache.commons.csv.CSVRecord;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -36,9 +35,11 @@ import static org.apache.commons.lang3.StringUtils.trim;
|
|||
public class HierarchyHandler implements IRecordHandler {
|
||||
|
||||
private final Map<String, TermConcept> myCode2Concept;
|
||||
private final ArrayListMultimap<TermConcept, String> myUnanchoredChildConceptsToParentCodes;
|
||||
|
||||
public HierarchyHandler(Map<String, TermConcept> theCode2concept) {
|
||||
public HierarchyHandler(Map<String, TermConcept> theCode2concept, ArrayListMultimap<TermConcept, String> theunanchoredChildConceptsToParentCodes) {
|
||||
myCode2Concept = theCode2concept;
|
||||
myUnanchoredChildConceptsToParentCodes = theunanchoredChildConceptsToParentCodes;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -47,12 +48,15 @@ public class HierarchyHandler implements IRecordHandler {
|
|||
String child = trim(theRecord.get("CHILD"));
|
||||
if (isNotBlank(parent) && isNotBlank(child)) {
|
||||
|
||||
TermConcept parentConcept = myCode2Concept.get(parent);
|
||||
ValidateUtil.isNotNullOrThrowUnprocessableEntity(parentConcept, "Parent code %s not found", parent);
|
||||
TermConcept childConcept = myCode2Concept.get(child);
|
||||
ValidateUtil.isNotNullOrThrowUnprocessableEntity(childConcept, "Child code %s not found", child);
|
||||
|
||||
parentConcept.addChild(childConcept, TermConceptParentChildLink.RelationshipTypeEnum.ISA);
|
||||
TermConcept parentConcept = myCode2Concept.get(parent);
|
||||
if (parentConcept == null) {
|
||||
myUnanchoredChildConceptsToParentCodes.put(childConcept, parent);
|
||||
} else {
|
||||
parentConcept.addChild(childConcept, TermConceptParentChildLink.RelationshipTypeEnum.ISA);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.term.loinc;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||
import org.apache.commons.csv.CSVRecord;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
|
@ -52,7 +52,7 @@ public class BaseLoincTop2000LabResultsHandler extends BaseLoincHandler implemen
|
|||
String displayName = trim(theRecord.get("Long Common Name"));
|
||||
|
||||
ValueSet valueSet = getValueSet(myValueSetId, myValueSetUri, myValueSetName, null);
|
||||
addCodeAsIncludeToValueSet(valueSet, IHapiTerminologyLoaderSvc.LOINC_URI, loincNumber, displayName);
|
||||
addCodeAsIncludeToValueSet(valueSet, ITermLoaderSvc.LOINC_URI, loincNumber, displayName);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ package ca.uhn.fhir.jpa.term.loinc;
|
|||
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||
import org.apache.commons.csv.CSVRecord;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
|
@ -96,7 +96,7 @@ public class LoincAnswerListHandler extends BaseLoincHandler {
|
|||
vs
|
||||
.getCompose()
|
||||
.getIncludeFirstRep()
|
||||
.setSystem(IHapiTerminologyLoaderSvc.LOINC_URI)
|
||||
.setSystem(ITermLoaderSvc.LOINC_URI)
|
||||
.addConcept()
|
||||
.setCode(answerString)
|
||||
.setDisplay(displayText);
|
||||
|
|
|
@ -20,18 +20,14 @@ package ca.uhn.fhir.jpa.term.loinc;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||
import org.apache.commons.csv.CSVRecord;
|
||||
import org.hl7.fhir.r4.model.Enumerations;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
|
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.term.loinc;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import org.apache.commons.csv.CSVRecord;
|
||||
|
@ -58,7 +58,7 @@ public class LoincDocumentOntologyHandler extends BaseLoincHandler implements IR
|
|||
|
||||
// RSNA Codes VS
|
||||
ValueSet vs = getValueSet(DOCUMENT_ONTOLOGY_CODES_VS_ID, DOCUMENT_ONTOLOGY_CODES_VS_URI, DOCUMENT_ONTOLOGY_CODES_VS_NAME, null);
|
||||
addCodeAsIncludeToValueSet(vs, IHapiTerminologyLoaderSvc.LOINC_URI, loincNumber, null);
|
||||
addCodeAsIncludeToValueSet(vs, ITermLoaderSvc.LOINC_URI, loincNumber, null);
|
||||
|
||||
// Part Properties
|
||||
String loincCodePropName;
|
||||
|
@ -84,7 +84,7 @@ public class LoincDocumentOntologyHandler extends BaseLoincHandler implements IR
|
|||
|
||||
TermConcept code = myCode2Concept.get(loincNumber);
|
||||
if (code != null) {
|
||||
code.addPropertyCoding(loincCodePropName, IHapiTerminologyLoaderSvc.LOINC_URI, partNumber, partName);
|
||||
code.addPropertyCoding(loincCodePropName, ITermLoaderSvc.LOINC_URI, partNumber, partName);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.term.loinc;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||
import org.apache.commons.csv.CSVRecord;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
|
@ -46,7 +46,7 @@ public class LoincGroupTermsFileHandler extends BaseLoincHandler implements IRec
|
|||
String loincNumber = trim(theRecord.get("LoincNumber"));
|
||||
|
||||
ValueSet valueSet = getValueSet(groupId, LoincGroupFileHandler.VS_URI_PREFIX + groupId, null, null);
|
||||
addCodeAsIncludeToValueSet(valueSet, IHapiTerminologyLoaderSvc.LOINC_URI, loincNumber, null);
|
||||
addCodeAsIncludeToValueSet(valueSet, ITermLoaderSvc.LOINC_URI, loincNumber, null);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -22,9 +22,9 @@ package ca.uhn.fhir.jpa.term.loinc;
|
|||
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||
import ca.uhn.fhir.jpa.term.TerminologyLoaderSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.TermLoaderSvcImpl;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import org.apache.commons.csv.CSVRecord;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
|
@ -58,7 +58,7 @@ public class LoincHandler implements IRecordHandler {
|
|||
String longCommonName = trim(theRecord.get("LONG_COMMON_NAME"));
|
||||
String shortName = trim(theRecord.get("SHORTNAME"));
|
||||
String consumerName = trim(theRecord.get("CONSUMER_NAME"));
|
||||
String display = TerminologyLoaderSvcImpl.firstNonBlank(longCommonName, shortName, consumerName);
|
||||
String display = TermLoaderSvcImpl.firstNonBlank(longCommonName, shortName, consumerName);
|
||||
|
||||
TermConcept concept = new TermConcept(myCodeSystemVersion, code);
|
||||
concept.setDisplay(display);
|
||||
|
@ -117,7 +117,7 @@ public class LoincHandler implements IRecordHandler {
|
|||
}
|
||||
|
||||
if (isNotBlank(partNumber)) {
|
||||
concept.addPropertyCoding(nextPropertyName, IHapiTerminologyLoaderSvc.LOINC_URI, partNumber, nextPropertyValue);
|
||||
concept.addPropertyCoding(nextPropertyName, ITermLoaderSvc.LOINC_URI, partNumber, nextPropertyValue);
|
||||
} else {
|
||||
String msg = "Unable to find part code with TYPE[" + key.getPartType() + "] and NAME[" + nextPropertyValue + "] (using name " + propertyValue + ")";
|
||||
ourLog.warn(msg);
|
||||
|
|
|
@ -23,7 +23,7 @@ package ca.uhn.fhir.jpa.term.loinc;
|
|||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||
import org.apache.commons.csv.CSVRecord;
|
||||
|
||||
|
@ -56,13 +56,13 @@ public class LoincHierarchyHandler implements IRecordHandler {
|
|||
|
||||
parent.addPropertyCoding(
|
||||
"child",
|
||||
IHapiTerminologyLoaderSvc.LOINC_URI,
|
||||
ITermLoaderSvc.LOINC_URI,
|
||||
child.getCode(),
|
||||
child.getDisplay());
|
||||
|
||||
child.addPropertyCoding(
|
||||
"parent",
|
||||
IHapiTerminologyLoaderSvc.LOINC_URI,
|
||||
ITermLoaderSvc.LOINC_URI,
|
||||
parent.getCode(),
|
||||
parent.getDisplay());
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.term.loinc;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||
import org.apache.commons.csv.CSVRecord;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
|
@ -57,8 +57,8 @@ public class LoincIeeeMedicalDeviceCodeHandler extends BaseLoincHandler implemen
|
|||
String ieeeDisplayName = trim(theRecord.get("IEEE_REFID"));
|
||||
|
||||
// LOINC Part -> IEEE 11073:10101 Mappings
|
||||
String sourceCodeSystemUri = IHapiTerminologyLoaderSvc.LOINC_URI;
|
||||
String targetCodeSystemUri = IHapiTerminologyLoaderSvc.IEEE_11073_10101_URI;
|
||||
String sourceCodeSystemUri = ITermLoaderSvc.LOINC_URI;
|
||||
String targetCodeSystemUri = ITermLoaderSvc.IEEE_11073_10101_URI;
|
||||
addConceptMapEntry(
|
||||
new ConceptMapping()
|
||||
.setConceptMapId(LOINC_IEEE_CM_ID)
|
||||
|
|
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.term.loinc;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||
import org.apache.commons.csv.CSVRecord;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
|
@ -49,7 +49,7 @@ public class LoincImagingDocumentCodeHandler extends BaseLoincHandler implements
|
|||
String displayName = trim(theRecord.get("LONG_COMMON_NAME"));
|
||||
|
||||
ValueSet valueSet = getValueSet(VS_ID, VS_URI, VS_NAME,null);
|
||||
addCodeAsIncludeToValueSet(valueSet, IHapiTerminologyLoaderSvc.LOINC_URI, loincNumber, displayName);
|
||||
addCodeAsIncludeToValueSet(valueSet, ITermLoaderSvc.LOINC_URI, loincNumber, displayName);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.term.loinc;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import org.apache.commons.csv.CSVRecord;
|
||||
|
@ -101,7 +101,7 @@ public class LoincPartRelatedCodeMappingHandler extends BaseLoincHandler impleme
|
|||
String loincPartMapUri;
|
||||
String loincPartMapName;
|
||||
switch (extCodeSystem) {
|
||||
case IHapiTerminologyLoaderSvc.SCT_URI:
|
||||
case ITermLoaderSvc.SCT_URI:
|
||||
loincPartMapId = LOINC_SCT_PART_MAP_ID;
|
||||
loincPartMapUri = LOINC_SCT_PART_MAP_URI;
|
||||
loincPartMapName = LOINC_SCT_PART_MAP_NAME;
|
||||
|
@ -133,7 +133,7 @@ public class LoincPartRelatedCodeMappingHandler extends BaseLoincHandler impleme
|
|||
.setConceptMapId(loincPartMapId)
|
||||
.setConceptMapUri(loincPartMapUri)
|
||||
.setConceptMapName(loincPartMapName)
|
||||
.setSourceCodeSystem(IHapiTerminologyLoaderSvc.LOINC_URI)
|
||||
.setSourceCodeSystem(ITermLoaderSvc.LOINC_URI)
|
||||
.setSourceCode(partNumber)
|
||||
.setSourceDisplay(partName)
|
||||
.setTargetCodeSystem(extCodeSystem)
|
||||
|
|
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.term.loinc;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import org.apache.commons.csv.CSVRecord;
|
||||
|
@ -98,7 +98,7 @@ public class LoincRsnaPlaybookHandler extends BaseLoincHandler implements IRecor
|
|||
vs
|
||||
.getCompose()
|
||||
.getIncludeFirstRep()
|
||||
.setSystem(IHapiTerminologyLoaderSvc.LOINC_URI)
|
||||
.setSystem(ITermLoaderSvc.LOINC_URI)
|
||||
.addConcept()
|
||||
.setCode(loincNumber)
|
||||
.setDisplay(longCommonName);
|
||||
|
@ -167,7 +167,7 @@ public class LoincRsnaPlaybookHandler extends BaseLoincHandler implements IRecor
|
|||
|
||||
TermConcept code = myCode2Concept.get(loincNumber);
|
||||
if (code != null) {
|
||||
code.addPropertyCoding(loincCodePropName, IHapiTerminologyLoaderSvc.LOINC_URI, partNumber, partName);
|
||||
code.addPropertyCoding(loincCodePropName, ITermLoaderSvc.LOINC_URI, partNumber, partName);
|
||||
}
|
||||
|
||||
// LOINC Part -> Radlex RID code mappings
|
||||
|
@ -177,7 +177,7 @@ public class LoincRsnaPlaybookHandler extends BaseLoincHandler implements IRecor
|
|||
.setConceptMapId(LoincPartRelatedCodeMappingHandler.LOINC_PART_TO_RID_PART_MAP_ID)
|
||||
.setConceptMapUri(LoincPartRelatedCodeMappingHandler.LOINC_PART_TO_RID_PART_MAP_URI)
|
||||
.setConceptMapName(LoincPartRelatedCodeMappingHandler.LOINC_PART_TO_RID_PART_MAP_NAME)
|
||||
.setSourceCodeSystem(IHapiTerminologyLoaderSvc.LOINC_URI)
|
||||
.setSourceCodeSystem(ITermLoaderSvc.LOINC_URI)
|
||||
.setSourceCode(partNumber)
|
||||
.setSourceDisplay(partName)
|
||||
.setTargetCodeSystem(RID_CS_URI)
|
||||
|
@ -194,7 +194,7 @@ public class LoincRsnaPlaybookHandler extends BaseLoincHandler implements IRecor
|
|||
.setConceptMapId(LoincPartRelatedCodeMappingHandler.LOINC_TERM_TO_RPID_PART_MAP_ID)
|
||||
.setConceptMapUri(LoincPartRelatedCodeMappingHandler.LOINC_TERM_TO_RPID_PART_MAP_URI)
|
||||
.setConceptMapName(LoincPartRelatedCodeMappingHandler.LOINC_TERM_TO_RPID_PART_MAP_NAME)
|
||||
.setSourceCodeSystem(IHapiTerminologyLoaderSvc.LOINC_URI)
|
||||
.setSourceCodeSystem(ITermLoaderSvc.LOINC_URI)
|
||||
.setSourceCode(loincNumber)
|
||||
.setSourceDisplay(longCommonName)
|
||||
.setTargetCodeSystem(RPID_CS_URI)
|
||||
|
|
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.term.loinc;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||
import org.apache.commons.csv.CSVRecord;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
|
@ -48,7 +48,7 @@ public class LoincUniversalOrderSetHandler extends BaseLoincHandler implements I
|
|||
String orderObs = trim(theRecord.get("ORDER_OBS"));
|
||||
|
||||
ValueSet valueSet = getValueSet(VS_ID, VS_URI, VS_NAME, null);
|
||||
addCodeAsIncludeToValueSet(valueSet, IHapiTerminologyLoaderSvc.LOINC_URI, loincNumber, displayName);
|
||||
addCodeAsIncludeToValueSet(valueSet, ITermLoaderSvc.LOINC_URI, loincNumber, displayName);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ package ca.uhn.fhir.jpa.term.snomedct;
|
|||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||
import ca.uhn.fhir.jpa.term.TerminologyLoaderSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.TermLoaderSvcImpl;
|
||||
import org.apache.commons.csv.CSVRecord;
|
||||
|
||||
import java.util.Map;
|
||||
|
@ -56,9 +56,10 @@ public final class SctHandlerDescription implements IRecordHandler {
|
|||
|
||||
String term = theRecord.get("term");
|
||||
|
||||
TermConcept concept = TerminologyLoaderSvcImpl.getOrCreateConcept(myCodeSystemVersion, myId2concept, id);
|
||||
TermConcept concept = TermLoaderSvcImpl.getOrCreateConcept(myId2concept, id);
|
||||
concept.setCode(conceptId);
|
||||
concept.setDisplay(term);
|
||||
concept.setCodeSystemVersion(myCodeSystemVersion);
|
||||
myCode2concept.put(conceptId, concept);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.validation;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvcDstu3;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvcDstu3;
|
||||
import org.hl7.fhir.dstu3.hapi.ctx.DefaultProfileValidationSupport;
|
||||
import org.hl7.fhir.dstu3.hapi.validation.ValidationSupportChain;
|
||||
import org.hl7.fhir.dstu3.hapi.validation.SnapshotGeneratingValidationSupport;
|
||||
|
@ -41,7 +41,7 @@ public class JpaValidationSupportChainDstu3 extends ValidationSupportChain {
|
|||
@Autowired
|
||||
private DefaultProfileValidationSupport myDefaultProfileValidationSupport;
|
||||
@Autowired
|
||||
private IHapiTerminologySvcDstu3 myTerminologyService;
|
||||
private ITermReadSvcDstu3 myTerminologyService;
|
||||
@Autowired
|
||||
private FhirContext myFhirContext;
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.validation;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvcR4;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvcR4;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.hapi.ctx.DefaultProfileValidationSupport;
|
||||
import org.hl7.fhir.r4.hapi.validation.SnapshotGeneratingValidationSupport;
|
||||
|
@ -46,7 +46,7 @@ public class JpaValidationSupportChainR4 extends ValidationSupportChain {
|
|||
public ca.uhn.fhir.jpa.dao.r4.IJpaValidationSupportR4 myJpaValidationSupportR4;
|
||||
|
||||
@Autowired
|
||||
private IHapiTerminologySvcR4 myTerminologyService;
|
||||
private ITermReadSvcR4 myTerminologyService;
|
||||
|
||||
public JpaValidationSupportChainR4() {
|
||||
super();
|
||||
|
|
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.validation;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvcR5;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvcR5;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r5.hapi.ctx.DefaultProfileValidationSupport;
|
||||
import org.hl7.fhir.r5.hapi.validation.SnapshotGeneratingValidationSupport;
|
||||
|
@ -46,7 +46,7 @@ public class JpaValidationSupportChainR5 extends ValidationSupportChain {
|
|||
public ca.uhn.fhir.jpa.dao.r5.IJpaValidationSupportR5 myJpaValidationSupportR5;
|
||||
|
||||
@Autowired
|
||||
private IHapiTerminologySvcR5 myTerminologyService;
|
||||
private ITermReadSvcR5 myTerminologyService;
|
||||
|
||||
public JpaValidationSupportChainR5() {
|
||||
super();
|
||||
|
|
|
@ -4,6 +4,7 @@ import ca.uhn.fhir.context.FhirContext;
|
|||
import ca.uhn.fhir.interceptor.api.IInterceptorService;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.executor.InterceptorService;
|
||||
import ca.uhn.fhir.jpa.BaseTest;
|
||||
import ca.uhn.fhir.jpa.bulk.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
|
@ -66,7 +67,7 @@ import static org.junit.Assert.fail;
|
|||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public abstract class BaseJpaTest {
|
||||
public abstract class BaseJpaTest extends BaseTest {
|
||||
|
||||
protected static final String CM_URL = "http://example.com/my_concept_map";
|
||||
protected static final String CS_URL = "http://example.com/my_code_system";
|
||||
|
|
|
@ -921,7 +921,6 @@ public class FhirResourceDaoDstu2SearchNoFtTest extends BaseJpaDstu2Test {
|
|||
Patient patient = new Patient();
|
||||
patient.addIdentifier().setSystem("urn:system").setValue("001");
|
||||
patient.addName().addFamily("testSearchNameParam01Fam").addGiven("testSearchNameParam01Giv");
|
||||
ResourceMetadataKeyEnum.TITLE.put(patient, "P1TITLE");
|
||||
id1 = myPatientDao.create(patient, mySrd).getId();
|
||||
}
|
||||
{
|
||||
|
|
|
@ -7,7 +7,6 @@ import ca.uhn.fhir.jpa.dao.*;
|
|||
import ca.uhn.fhir.jpa.dao.data.*;
|
||||
import ca.uhn.fhir.jpa.dao.dstu2.FhirResourceDaoDstu2SearchNoFtTest;
|
||||
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
|
@ -18,8 +17,11 @@ import ca.uhn.fhir.jpa.search.IStaleSearchDeletingSvc;
|
|||
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
|
||||
import ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
|
||||
import ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import ca.uhn.fhir.jpa.util.ResourceCountCache;
|
||||
import ca.uhn.fhir.jpa.util.ResourceProviderFactory;
|
||||
import ca.uhn.fhir.jpa.validation.JpaValidationSupportChainDstu3;
|
||||
|
@ -67,6 +69,8 @@ public abstract class BaseJpaDstu3Test extends BaseJpaTest {
|
|||
private static JpaValidationSupportChainDstu3 ourJpaValidationSupportChainDstu3;
|
||||
private static IFhirResourceDaoValueSet<ValueSet, Coding, CodeableConcept> ourValueSetDao;
|
||||
|
||||
@Autowired
|
||||
protected ITermDeferredStorageSvc myTerminologyDeferredStorageSvc;
|
||||
@Autowired
|
||||
@Qualifier("myResourceCountsCache")
|
||||
protected ResourceCountCache myResourceCountsCache;
|
||||
|
@ -243,7 +247,7 @@ public abstract class BaseJpaDstu3Test extends BaseJpaTest {
|
|||
@Autowired
|
||||
protected ITermCodeSystemDao myTermCodeSystemDao;
|
||||
@Autowired
|
||||
protected IHapiTerminologySvc myTermSvc;
|
||||
protected ITermReadSvc myTermSvc;
|
||||
@Autowired
|
||||
protected PlatformTransactionManager myTransactionMgr;
|
||||
@Autowired
|
||||
|
@ -262,6 +266,8 @@ public abstract class BaseJpaDstu3Test extends BaseJpaTest {
|
|||
private JpaValidationSupportChainDstu3 myJpaValidationSupportChainDstu3;
|
||||
@Autowired
|
||||
private IBulkDataExportSvc myBulkDataExportSvc;
|
||||
@Autowired
|
||||
protected ITermCodeSystemStorageSvc myTermCodeSystemStorageSvc;
|
||||
|
||||
@After()
|
||||
public void afterCleanupDao() {
|
||||
|
@ -278,12 +284,13 @@ public abstract class BaseJpaDstu3Test extends BaseJpaTest {
|
|||
|
||||
@After
|
||||
public void afterClearTerminologyCaches() {
|
||||
BaseHapiTerminologySvcImpl baseHapiTerminologySvc = AopTestUtils.getTargetObject(myTermSvc);
|
||||
BaseTermReadSvcImpl baseHapiTerminologySvc = AopTestUtils.getTargetObject(myTermSvc);
|
||||
baseHapiTerminologySvc.clearTranslationCache();
|
||||
baseHapiTerminologySvc.clearTranslationWithReverseCache();
|
||||
baseHapiTerminologySvc.clearDeferred();
|
||||
BaseHapiTerminologySvcImpl.clearOurLastResultsFromTranslationCache();
|
||||
BaseHapiTerminologySvcImpl.clearOurLastResultsFromTranslationWithReverseCache();
|
||||
BaseTermReadSvcImpl.clearOurLastResultsFromTranslationCache();
|
||||
BaseTermReadSvcImpl.clearOurLastResultsFromTranslationWithReverseCache();
|
||||
TermDeferredStorageSvcImpl deferredSvc = AopTestUtils.getTargetObject(myTerminologyDeferredStorageSvc);
|
||||
deferredSvc.clearDeferred();
|
||||
}
|
||||
|
||||
@After()
|
||||
|
|
|
@ -1,34 +1,30 @@
|
|||
package ca.uhn.fhir.jpa.dao.dstu3;
|
||||
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.TermReindexingSvcImpl;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.hl7.fhir.dstu3.model.CodeSystem;
|
||||
import org.hl7.fhir.dstu3.model.Enumerations;
|
||||
import org.hl7.fhir.dstu3.model.IdType;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotEquals;
|
||||
|
||||
public class FhirResourceDaoDstu3CodeSystemTest extends BaseJpaDstu3Test {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu3CodeSystemTest.class);
|
||||
|
||||
@AfterClass
|
||||
public static void afterClassClearContext() {
|
||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||
BaseHapiTerminologySvcImpl.setForceSaveDeferredAlwaysForUnitTest(false);
|
||||
TermReindexingSvcImpl.setForceSaveDeferredAlwaysForUnitTest(false);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testIndexContained() throws Exception {
|
||||
BaseHapiTerminologySvcImpl.setForceSaveDeferredAlwaysForUnitTest(true);
|
||||
TermReindexingSvcImpl.setForceSaveDeferredAlwaysForUnitTest(true);
|
||||
|
||||
String input = IOUtils.toString(getClass().getResource("/dstu3_codesystem_complete.json"), StandardCharsets.UTF_8);
|
||||
CodeSystem cs = myFhirCtx.newJsonParser().parseResource(CodeSystem.class, input);
|
||||
|
@ -39,13 +35,13 @@ public class FhirResourceDaoDstu3CodeSystemTest extends BaseJpaDstu3Test {
|
|||
int outcome= myResourceReindexingSvc.forceReindexingPass();
|
||||
assertNotEquals(-1, outcome); // -1 means there was a failure
|
||||
|
||||
myTermSvc.saveDeferred();
|
||||
myTerminologyDeferredStorageSvc.saveDeferred();
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeleteCodeSystemComplete() {
|
||||
BaseHapiTerminologySvcImpl.setForceSaveDeferredAlwaysForUnitTest(false);
|
||||
TermReindexingSvcImpl.setForceSaveDeferredAlwaysForUnitTest(false);
|
||||
|
||||
// Create the code system
|
||||
CodeSystem cs = new CodeSystem();
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue