("PREVIOUS_ID") {
- private static final long serialVersionUID = 1L;
-
- @Override
- public IdDt get(IResource theResource) {
- return getIdFromMetadataOrNullIfNone(theResource.getResourceMetadata(), PREVIOUS_ID);
- }
-
- @Override
- public void put(IResource theResource, IdDt theObject) {
- theResource.getResourceMetadata().put(PREVIOUS_ID, theObject);
- }
- };
/**
* The value for this key represents a {@link List} of profile IDs that this resource claims to conform to.
*
@@ -301,18 +217,8 @@ public abstract class ResourceMetadataKeyEnum implements Serializable {
if (obj == null) {
return null;
}
- try {
- @SuppressWarnings("unchecked")
- List securityLabels = (List) obj;
- if (securityLabels.isEmpty()) {
- return null;
- }
- return securityLabels;
- } catch (ClassCastException e) {
- throw new InternalErrorException("Found an object of type '" + obj.getClass().getCanonicalName() + "' in resource metadata for key SECURITY_LABELS - Expected "
- + BaseCodingDt.class.getCanonicalName());
- }
-
+ //noinspection unchecked
+ return (List) obj;
}
@Override
@@ -337,14 +243,9 @@ public abstract class ResourceMetadataKeyEnum implements Serializable {
Object retValObj = theResource.getResourceMetadata().get(TAG_LIST);
if (retValObj == null) {
return null;
- } else if (retValObj instanceof TagList) {
- if (((TagList) retValObj).isEmpty()) {
- return null;
- }
+ } else {
return (TagList) retValObj;
}
- throw new InternalErrorException("Found an object of type '" + retValObj.getClass().getCanonicalName() + "' in resource metadata for key " + TAG_LIST.name() + " - Expected "
- + TagList.class.getCanonicalName());
}
@Override
@@ -352,25 +253,6 @@ public abstract class ResourceMetadataKeyEnum implements Serializable {
theResource.getResourceMetadata().put(TAG_LIST, theObject);
}
};
- /**
- * If present and populated with a string (as an instance of {@link String}), this value contains the title for this resource, as supplied in any bundles containing the resource.
- *
- * Values for this key are of type {@link String}
- *
- */
- public static final ResourceMetadataKeyEnum TITLE = new ResourceMetadataKeyEnum("TITLE") {
- private static final long serialVersionUID = 1L;
-
- @Override
- public String get(IResource theResource) {
- return getStringFromMetadataOrNullIfNone(theResource.getResourceMetadata(), TITLE);
- }
-
- @Override
- public void put(IResource theResource, String theObject) {
- theResource.getResourceMetadata().put(TITLE, theObject);
- }
- };
/**
* The value for this key is the bundle entry Updated time. This is defined by FHIR as "Last Updated for resource". This value is also used for populating the "Last-Modified" header in the
* case of methods that return a single resource (read, vread, etc.)
@@ -398,7 +280,10 @@ public abstract class ResourceMetadataKeyEnum implements Serializable {
*
* Values for this key are of type {@link String}
*
+ *
+ * @deprecated The {@link IResource#getId()} resource ID will now be populated with the version ID via the {@link IdDt#getVersionIdPart()} method
*/
+ @Deprecated
public static final ResourceMetadataKeyEnum VERSION = new ResourceMetadataKeyEnum("VERSION") {
private static final long serialVersionUID = 1L;
@@ -426,7 +311,7 @@ public abstract class ResourceMetadataKeyEnum implements Serializable {
@Override
public IdDt get(IResource theResource) {
- return getIdFromMetadataOrNullIfNone(theResource.getResourceMetadata(), VERSION_ID);
+ return getIdFromMetadataOrNullIfNone(theResource.getResourceMetadata());
}
@Override
@@ -474,32 +359,45 @@ public abstract class ResourceMetadataKeyEnum implements Serializable {
public abstract void put(IResource theResource, T theObject);
- @Override
- public String toString() {
- return myValue;
+ public static abstract class ResourceMetadataKeySupportingAnyResource extends ResourceMetadataKeyEnum {
+
+ private static final long serialVersionUID = 1L;
+
+ public ResourceMetadataKeySupportingAnyResource(String theValue) {
+ super(theValue);
+ }
+
+ public abstract T2 get(IAnyResource theResource);
+
+ public abstract void put(IAnyResource theResource, T2 theObject);
+
}
- private static DecimalDt getDecimalFromMetadataOrNullIfNone(Map, Object> theResourceMetadata, ResourceMetadataKeyEnum theKey) {
- Object retValObj = theResourceMetadata.get(theKey);
- if (retValObj == null) {
- return null;
- } else if (retValObj instanceof DecimalDt) {
- if (((DecimalDt) retValObj).isEmpty()) {
- return null;
- }
- return (DecimalDt) retValObj;
- } else if (retValObj instanceof String) {
- if (StringUtils.isBlank((String) retValObj)) {
- return null;
- }
- return new DecimalDt((String) retValObj);
- } else if (retValObj instanceof Double) {
- return new DecimalDt((Double) retValObj);
+ public static final class ExtensionResourceMetadataKey extends ResourceMetadataKeyEnum {
+ public ExtensionResourceMetadataKey(String theUrl) {
+ super(theUrl);
+ }
+
+ @Override
+ public ExtensionDt get(IResource theResource) {
+ Object retValObj = theResource.getResourceMetadata().get(this);
+ if (retValObj == null) {
+ return null;
+ } else if (retValObj instanceof ExtensionDt) {
+ return (ExtensionDt) retValObj;
+ }
+ throw new InternalErrorException("Found an object of type '" + retValObj.getClass().getCanonicalName()
+ + "' in resource metadata for key " + this.name() + " - Expected "
+ + ExtensionDt.class.getCanonicalName());
+ }
+
+ @Override
+ public void put(IResource theResource, ExtensionDt theObject) {
+ theResource.getResourceMetadata().put(this, theObject);
}
- throw new InternalErrorException("Found an object of type '" + retValObj.getClass().getCanonicalName() + "' in resource metadata for key " + theKey.name() + " - Expected "
- + InstantDt.class.getCanonicalName());
}
+
@SuppressWarnings("unchecked")
private static > T getEnumFromMetadataOrNullIfNone(Map, Object> theResourceMetadata, ResourceMetadataKeyEnum theKey, Class theEnumType,
IValueSetEnumBinder theBinder) {
@@ -515,8 +413,8 @@ public abstract class ResourceMetadataKeyEnum implements Serializable {
+ InstantDt.class.getCanonicalName());
}
- private static IdDt getIdFromMetadataOrNullIfNone(Map, Object> theResourceMetadata, ResourceMetadataKeyEnum> theKey) {
- return toId(theKey, theResourceMetadata.get(theKey));
+ private static IdDt getIdFromMetadataOrNullIfNone(Map, Object> theResourceMetadata) {
+ return toId(ResourceMetadataKeyEnum.VERSION_ID, theResourceMetadata.get(ResourceMetadataKeyEnum.VERSION_ID));
}
private static List getIdListFromMetadataOrNullIfNone(Map, Object> theResourceMetadata, ResourceMetadataKeyEnum> theKey) {
@@ -586,49 +484,11 @@ public abstract class ResourceMetadataKeyEnum implements Serializable {
}
return (IdDt) retValObj;
} else if (retValObj instanceof Number) {
- return new IdDt(((Number) retValObj).toString());
+ return new IdDt(retValObj.toString());
}
throw new InternalErrorException("Found an object of type '" + retValObj.getClass().getCanonicalName() + "' in resource metadata for key " + theKey.name() + " - Expected "
+ IdDt.class.getCanonicalName());
}
- public static abstract class ResourceMetadataKeySupportingAnyResource extends ResourceMetadataKeyEnum {
-
- private static final long serialVersionUID = 1L;
-
- public ResourceMetadataKeySupportingAnyResource(String theValue) {
- super(theValue);
- }
-
- public abstract T2 get(IAnyResource theResource);
-
- public abstract void put(IAnyResource theResource, T2 theObject);
-
- }
-
- public static final class ExtensionResourceMetadataKey extends ResourceMetadataKeyEnum {
- public ExtensionResourceMetadataKey(String url) {
- super(url);
- }
-
- @Override
- public ExtensionDt get(IResource theResource) {
- Object retValObj = theResource.getResourceMetadata().get(this);
- if (retValObj == null) {
- return null;
- } else if (retValObj instanceof ExtensionDt) {
- return (ExtensionDt) retValObj;
- }
- throw new InternalErrorException("Found an object of type '" + retValObj.getClass().getCanonicalName()
- + "' in resource metadata for key " + this.name() + " - Expected "
- + ExtensionDt.class.getCanonicalName());
- }
-
- @Override
- public void put(IResource theResource, ExtensionDt theObject) {
- theResource.getResourceMetadata().put(this, theObject);
- }
- }
-
}
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/Tag.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/Tag.java
index 7ff4dc1d705..36aa527a369 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/Tag.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/Tag.java
@@ -168,18 +168,6 @@ public class Tag extends BaseElement implements IElement, IBaseCoding {
return this;
}
- public String toHeaderValue() {
- StringBuilder b = new StringBuilder();
- b.append(this.getTerm());
- if (isNotBlank(this.getLabel())) {
- b.append("; label=\"").append(this.getLabel()).append('"');
- }
- if (isNotBlank(this.getScheme())) {
- b.append("; scheme=\"").append(this.getScheme()).append('"');
- }
- return b.toString();
- }
-
@Override
public String toString() {
ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE);
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IHttpResponse.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IHttpResponse.java
index e2ea2f99d8e..161e6ba9db1 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IHttpResponse.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IHttpResponse.java
@@ -33,12 +33,6 @@ import java.util.Map;
*/
public interface IHttpResponse {
- /**
- * @deprecated This method was deprecated in HAPI FHIR 2.2 because its name has a typo. Use {@link #bufferEntity()} instead.
- */
- @Deprecated
- void bufferEntitity() throws IOException;
-
/**
* Buffer the message entity data.
*
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/TokenParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/TokenParam.java
index 31a9875def5..490406c424c 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/TokenParam.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/TokenParam.java
@@ -248,26 +248,30 @@ public class TokenParam extends BaseParam /*implements IQueryParameterType*/ {
@Override
public boolean equals(Object theO) {
- if (this == theO) return true;
+ if (this == theO) {
+ return true;
+ }
- if (theO == null || getClass() != theO.getClass()) return false;
+ if (theO == null || getClass() != theO.getClass()) {
+ return false;
+ }
TokenParam that = (TokenParam) theO;
- return new EqualsBuilder()
- .append(myModifier, that.myModifier)
- .append(mySystem, that.mySystem)
- .append(myValue, that.myValue)
- .isEquals();
+ EqualsBuilder b = new EqualsBuilder();
+ b.append(myModifier, that.myModifier);
+ b.append(mySystem, that.mySystem);
+ b.append(myValue, that.myValue);
+ return b.isEquals();
}
@Override
public int hashCode() {
- return new HashCodeBuilder(17, 37)
- .append(myModifier)
- .append(mySystem)
- .append(myValue)
- .toHashCode();
+ HashCodeBuilder b = new HashCodeBuilder(17, 37);
+ b.append(myModifier);
+ b.append(mySystem);
+ b.append(myValue);
+ return b.toHashCode();
}
private static String toSystemValue(UriDt theSystem) {
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/AsyncUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/AsyncUtil.java
new file mode 100644
index 00000000000..a9da8d04d26
--- /dev/null
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/AsyncUtil.java
@@ -0,0 +1,54 @@
+package ca.uhn.fhir.util;
+
+import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+public class AsyncUtil {
+ private static final Logger ourLog = LoggerFactory.getLogger(AsyncUtil.class);
+
+ /**
+ * Non instantiable
+ */
+ private AsyncUtil() {
+ }
+
+ /**
+ * Calls Thread.sleep and if an InterruptedException occurs, logs a warning but otherwise continues
+ *
+ * @param theMillis The number of millis to sleep
+ * @return Did we sleep the whole amount
+ */
+ public static boolean sleep(long theMillis) {
+ try {
+ Thread.sleep(theMillis);
+ return true;
+ } catch (InterruptedException theE) {
+ Thread.currentThread().interrupt();
+ ourLog.warn("Sleep for {}ms was interrupted", theMillis);
+ return false;
+ }
+ }
+
+ public static boolean awaitLatchAndThrowInternalErrorExceptionOnInterrupt(CountDownLatch theInitialCollectionLatch, long theTime, TimeUnit theTimeUnit) {
+ try {
+ return theInitialCollectionLatch.await(theTime, theTimeUnit);
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ throw new InternalErrorException(e);
+ }
+ }
+
+ public static boolean awaitLatchAndIgnoreInterrupt(CountDownLatch theInitialCollectionLatch, long theTime, TimeUnit theTimeUnit) {
+ try {
+ return theInitialCollectionLatch.await(theTime, theTimeUnit);
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ ourLog.warn("Interrupted while waiting for latch");
+ return false;
+ }
+ }
+}
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/AttachmentUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/AttachmentUtil.java
index 8baefd91050..db516ec2a72 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/AttachmentUtil.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/AttachmentUtil.java
@@ -20,9 +20,8 @@ package ca.uhn.fhir.util;
* #L%
*/
-import ca.uhn.fhir.context.BaseRuntimeChildDefinition;
-import ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition;
-import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.context.*;
+import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.ICompositeType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
@@ -35,12 +34,10 @@ public class AttachmentUtil {
* Fetches the base64Binary value of Attachment.data, creating it if it does not
* already exist.
*/
- @SuppressWarnings("unchecked")
public static IPrimitiveType getOrCreateData(FhirContext theContext, ICompositeType theAttachment) {
return getOrCreateChild(theContext, theAttachment, "data", "base64Binary");
}
- @SuppressWarnings("unchecked")
public static IPrimitiveType getOrCreateContentType(FhirContext theContext, ICompositeType theAttachment) {
return getOrCreateChild(theContext, theAttachment, "contentType", "string");
}
@@ -64,6 +61,16 @@ public class AttachmentUtil {
});
}
+ public static void setUrl(FhirContext theContext, ICompositeType theAttachment, String theUrl) {
+ BaseRuntimeChildDefinition entryChild = getChild(theContext, theAttachment, "url");
+ assert entryChild != null : "Version " + theContext + " has no child " + "url";
+ String typeName = "uri";
+ if (theContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.R4)) {
+ typeName = "url";
+ }
+ entryChild.getMutator().setValue(theAttachment, newPrimitive(theContext, typeName, theUrl));
+ }
+
public static void setContentType(FhirContext theContext, ICompositeType theAttachment, String theContentType) {
BaseRuntimeChildDefinition entryChild = getChild(theContext, theAttachment, "contentType");
entryChild.getMutator().setValue(theAttachment, newPrimitive(theContext, "code", theContentType));
@@ -88,7 +95,9 @@ public class AttachmentUtil {
*/
@SuppressWarnings("unchecked")
static IPrimitiveType newPrimitive(FhirContext theContext, String theType, T theValue) {
- IPrimitiveType primitive = (IPrimitiveType) theContext.getElementDefinition(theType).newInstance();
+ BaseRuntimeElementDefinition> elementDefinition = theContext.getElementDefinition(theType);
+ Validate.notNull(elementDefinition, "Unknown type %s for %s", theType, theContext);
+ IPrimitiveType primitive = (IPrimitiveType) elementDefinition.newInstance();
primitive.setValue(theValue);
return primitive;
}
@@ -100,4 +109,8 @@ public class AttachmentUtil {
BaseRuntimeElementCompositeDefinition> def = (BaseRuntimeElementCompositeDefinition>) theContext.getElementDefinition(theElement.getClass());
return def.getChildByName(theName);
}
+
+ public static ICompositeType newInstance(FhirContext theFhirCtx) {
+ return (ICompositeType) theFhirCtx.getElementDefinition("Attachment").newInstance();
+ }
}
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ParametersUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ParametersUtil.java
index c57a9f65b22..4ec185a8fd9 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ParametersUtil.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ParametersUtil.java
@@ -72,6 +72,8 @@ public class ParametersUtil {
}
private static void addClientParameter(FhirContext theContext, Object theValue, IBaseResource theTargetResource, BaseRuntimeChildDefinition paramChild, BaseRuntimeElementCompositeDefinition> paramChildElem, String theName) {
+ Validate.notNull(theValue, "theValue must not be null");
+
if (theValue instanceof IBaseResource) {
IBase parameter = createParameterRepetition(theContext, theTargetResource, paramChild, paramChildElem, theName);
paramChildElem.getChildByName("resource").getMutator().addValue(parameter, (IBaseResource) theValue);
@@ -162,7 +164,6 @@ public class ParametersUtil {
IPrimitiveType value = (IPrimitiveType) theCtx.getElementDefinition("boolean").newInstance();
value.setValue(theValue);
addParameterToParameters(theCtx, theParameters, theName, value);
-
}
@SuppressWarnings("unchecked")
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/StopWatch.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/StopWatch.java
index a3fc0c475d5..53ca7e3efb3 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/StopWatch.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/StopWatch.java
@@ -1,6 +1,7 @@
package ca.uhn.fhir.util;
import com.google.common.annotations.VisibleForTesting;
+import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.time.DateUtils;
import java.text.DecimalFormat;
@@ -9,8 +10,6 @@ import java.util.Date;
import java.util.LinkedList;
import java.util.concurrent.TimeUnit;
-import static org.apache.commons.lang3.StringUtils.isNotBlank;
-
/*
* #%L
* HAPI FHIR - Core Library
@@ -48,12 +47,14 @@ public class StopWatch {
private long myStarted = now();
private TaskTiming myCurrentTask;
private LinkedList myTasks;
+
/**
* Constructor
*/
public StopWatch() {
super();
}
+
/**
* Constructor
*
@@ -63,7 +64,13 @@ public class StopWatch {
myStarted = theStart.getTime();
}
- public StopWatch(long theL) {
+ /**
+ * Constructor
+ *
+ * @param theStart The time that the stopwatch was started
+ */
+ public StopWatch(long theStart) {
+ myStarted = theStart;
}
private void addNewlineIfContentExists(StringBuilder theB) {
@@ -120,6 +127,8 @@ public class StopWatch {
b.append(": ");
b.append(formatMillis(delta));
}
+ } else {
+ b.append("No tasks");
}
TaskTiming last = null;
@@ -257,12 +266,11 @@ public class StopWatch {
*/
public void startTask(String theTaskName) {
endCurrentTask();
- if (isNotBlank(theTaskName)) {
- myCurrentTask = new TaskTiming()
- .setTaskName(theTaskName)
- .setStart(now());
- myTasks.add(myCurrentTask);
- }
+ Validate.notBlank(theTaskName, "Task name must not be blank");
+ myCurrentTask = new TaskTiming()
+ .setTaskName(theTaskName)
+ .setStart(now());
+ myTasks.add(myCurrentTask);
}
/**
@@ -331,18 +339,18 @@ public class StopWatch {
/**
* Append a right-aligned and zero-padded numeric value to a `StringBuilder`.
*/
- static private void append(StringBuilder tgt, String pfx, int dgt, long val) {
- tgt.append(pfx);
- if (dgt > 1) {
- int pad = (dgt - 1);
- for (long xa = val; xa > 9 && pad > 0; xa /= 10) {
+ static void appendRightAlignedNumber(StringBuilder theStringBuilder, String thePrefix, int theNumberOfDigits, long theValueToAppend) {
+ theStringBuilder.append(thePrefix);
+ if (theNumberOfDigits > 1) {
+ int pad = (theNumberOfDigits - 1);
+ for (long xa = theValueToAppend; xa > 9 && pad > 0; xa /= 10) {
pad--;
}
for (int xa = 0; xa < pad; xa++) {
- tgt.append('0');
+ theStringBuilder.append('0');
}
}
- tgt.append(val);
+ theStringBuilder.append(theValueToAppend);
}
/**
@@ -399,11 +407,11 @@ public class StopWatch {
}
} else {
long millisAsLong = (long) theMillis;
- append(buf, "", 2, ((millisAsLong % DateUtils.MILLIS_PER_DAY) / DateUtils.MILLIS_PER_HOUR));
- append(buf, ":", 2, ((millisAsLong % DateUtils.MILLIS_PER_HOUR) / DateUtils.MILLIS_PER_MINUTE));
- append(buf, ":", 2, ((millisAsLong % DateUtils.MILLIS_PER_MINUTE) / DateUtils.MILLIS_PER_SECOND));
+ appendRightAlignedNumber(buf, "", 2, ((millisAsLong % DateUtils.MILLIS_PER_DAY) / DateUtils.MILLIS_PER_HOUR));
+ appendRightAlignedNumber(buf, ":", 2, ((millisAsLong % DateUtils.MILLIS_PER_HOUR) / DateUtils.MILLIS_PER_MINUTE));
+ appendRightAlignedNumber(buf, ":", 2, ((millisAsLong % DateUtils.MILLIS_PER_MINUTE) / DateUtils.MILLIS_PER_SECOND));
if (theMillis <= DateUtils.MILLIS_PER_MINUTE) {
- append(buf, ".", 3, (millisAsLong % DateUtils.MILLIS_PER_SECOND));
+ appendRightAlignedNumber(buf, ".", 3, (millisAsLong % DateUtils.MILLIS_PER_SECOND));
}
}
return buf.toString();
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/SchemaBaseValidator.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/SchemaBaseValidator.java
index d3b74d695c9..bbbdf13d23d 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/SchemaBaseValidator.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/SchemaBaseValidator.java
@@ -19,26 +19,32 @@ package ca.uhn.fhir.validation;
* limitations under the License.
* #L%
*/
-import java.io.*;
-import java.nio.charset.Charset;
-import java.util.*;
-
-import javax.xml.XMLConstants;
-import javax.xml.transform.Source;
-import javax.xml.transform.stream.StreamSource;
-import javax.xml.validation.*;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.io.input.BOMInputStream;
-import org.hl7.fhir.instance.model.api.IBaseResource;
-import org.w3c.dom.ls.LSInput;
-import org.w3c.dom.ls.LSResourceResolver;
-import org.xml.sax.*;
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.io.input.BOMInputStream;
+import org.hl7.fhir.instance.model.api.IBaseResource;
+import org.w3c.dom.ls.LSInput;
+import org.w3c.dom.ls.LSResourceResolver;
+import org.xml.sax.SAXException;
+import org.xml.sax.SAXNotRecognizedException;
+import org.xml.sax.SAXParseException;
+
+import javax.xml.XMLConstants;
+import javax.xml.transform.Source;
+import javax.xml.transform.stream.StreamSource;
+import javax.xml.validation.Schema;
+import javax.xml.validation.SchemaFactory;
+import javax.xml.validation.Validator;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.StringReader;
+import java.nio.charset.StandardCharsets;
+import java.util.*;
public class SchemaBaseValidator implements IValidatorModule {
public static final String RESOURCES_JAR_NOTE = "Note that as of HAPI FHIR 1.2, DSTU2 validation files are kept in a separate JAR (hapi-fhir-validation-resources-XXX.jar) which must be added to your classpath. See the HAPI FHIR download page for more information.";
@@ -47,7 +53,7 @@ public class SchemaBaseValidator implements IValidatorModule {
private static final Set SCHEMA_NAMES;
static {
- HashSet sn = new HashSet();
+ HashSet sn = new HashSet<>();
sn.add("xml.xsd");
sn.add("xhtml1-strict.xsd");
sn.add("fhir-single.xsd");
@@ -59,15 +65,15 @@ public class SchemaBaseValidator implements IValidatorModule {
SCHEMA_NAMES = Collections.unmodifiableSet(sn);
}
- private Map myKeyToSchema = new HashMap();
+ private final Map myKeyToSchema = new HashMap<>();
private FhirContext myCtx;
public SchemaBaseValidator(FhirContext theContext) {
myCtx = theContext;
}
- private void doValidate(IValidationContext> theContext, String schemaName) {
- Schema schema = loadSchema("dstu", schemaName);
+ private void doValidate(IValidationContext> theContext) {
+ Schema schema = loadSchema();
try {
Validator validator = schema.newValidator();
@@ -81,14 +87,14 @@ public class SchemaBaseValidator implements IValidatorModule {
}
try {
- /*
- * See https://github.com/jamesagnew/hapi-fhir/issues/339
- * https://www.owasp.org/index.php/XML_External_Entity_(XXE)_Processing
- */
+ /*
+ * See https://github.com/jamesagnew/hapi-fhir/issues/339
+ * https://www.owasp.org/index.php/XML_External_Entity_(XXE)_Processing
+ */
validator.setProperty(XMLConstants.ACCESS_EXTERNAL_DTD, "");
validator.setProperty(XMLConstants.ACCESS_EXTERNAL_SCHEMA, "");
- }catch (SAXNotRecognizedException ex){
- ourLog.warn("Jaxp 1.5 Support not found.",ex);
+ } catch (SAXNotRecognizedException ex) {
+ ourLog.warn("Jaxp 1.5 Support not found.", ex);
}
validator.validate(new StreamSource(new StringReader(encodedResource)));
@@ -99,17 +105,14 @@ public class SchemaBaseValidator implements IValidatorModule {
message.setMessage(e.getLocalizedMessage());
message.setSeverity(ResultSeverityEnum.FATAL);
theContext.addValidationMessage(message);
- } catch (SAXException e) {
- // Catch all
- throw new ConfigurationException("Could not load/parse schema file", e);
- } catch (IOException e) {
+ } catch (SAXException | IOException e) {
// Catch all
throw new ConfigurationException("Could not load/parse schema file", e);
}
}
- private Schema loadSchema(String theVersion, String theSchemaName) {
- String key = theVersion + "-" + theSchemaName;
+ private Schema loadSchema() {
+ String key = "fhir-single.xsd";
synchronized (myKeyToSchema) {
Schema schema = myKeyToSchema.get(key);
@@ -117,81 +120,52 @@ public class SchemaBaseValidator implements IValidatorModule {
return schema;
}
- Source baseSource = loadXml(null, theSchemaName);
+ Source baseSource = loadXml("fhir-single.xsd");
SchemaFactory schemaFactory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
schemaFactory.setResourceResolver(new MyResourceResolver());
try {
try {
- /*
- * See https://github.com/jamesagnew/hapi-fhir/issues/339
- * https://www.owasp.org/index.php/XML_External_Entity_(XXE)_Processing
- */
+ /*
+ * See https://github.com/jamesagnew/hapi-fhir/issues/339
+ * https://www.owasp.org/index.php/XML_External_Entity_(XXE)_Processing
+ */
schemaFactory.setProperty(XMLConstants.ACCESS_EXTERNAL_DTD, "");
- }catch (SAXNotRecognizedException snex){
- ourLog.warn("Jaxp 1.5 Support not found.",snex);
+ } catch (SAXNotRecognizedException e) {
+ ourLog.warn("Jaxp 1.5 Support not found.", e);
}
- schema = schemaFactory.newSchema(new Source[] { baseSource });
+ schema = schemaFactory.newSchema(new Source[]{baseSource});
} catch (SAXException e) {
- throw new ConfigurationException("Could not load/parse schema file: " + theSchemaName, e);
+ throw new ConfigurationException("Could not load/parse schema file: " + "fhir-single.xsd", e);
}
myKeyToSchema.put(key, schema);
return schema;
}
}
- private Source loadXml(String theSystemId, String theSchemaName) {
+ Source loadXml(String theSchemaName) {
String pathToBase = myCtx.getVersion().getPathToSchemaDefinitions() + '/' + theSchemaName;
ourLog.debug("Going to load resource: {}", pathToBase);
- InputStream baseIs = FhirValidator.class.getResourceAsStream(pathToBase);
- if (baseIs == null) {
- throw new InternalErrorException("Schema not found. " + RESOURCES_JAR_NOTE);
+ try (InputStream baseIs = FhirValidator.class.getResourceAsStream(pathToBase)) {
+ if (baseIs == null) {
+ throw new InternalErrorException("Schema not found. " + RESOURCES_JAR_NOTE);
+ }
+ try (BOMInputStream bomInputStream = new BOMInputStream(baseIs, false)) {
+ try (InputStreamReader baseReader = new InputStreamReader(bomInputStream, StandardCharsets.UTF_8)) {
+ // Buffer so that we can close the input stream
+ String contents = IOUtils.toString(baseReader);
+ return new StreamSource(new StringReader(contents), null);
+ }
+ }
+ } catch (IOException e) {
+ throw new InternalErrorException(e);
}
- baseIs = new BOMInputStream(baseIs, false);
- InputStreamReader baseReader = new InputStreamReader(baseIs, Charset.forName("UTF-8"));
- Source baseSource = new StreamSource(baseReader, theSystemId);
- //FIXME resource leak
- return baseSource;
}
@Override
public void validateResource(IValidationContext theContext) {
- doValidate(theContext, "fhir-single.xsd");
- }
-
- private static class MyErrorHandler implements org.xml.sax.ErrorHandler {
-
- private IValidationContext> myContext;
-
- public MyErrorHandler(IValidationContext> theContext) {
- myContext = theContext;
- }
-
- private void addIssue(SAXParseException theException, ResultSeverityEnum theSeverity) {
- SingleValidationMessage message = new SingleValidationMessage();
- message.setLocationLine(theException.getLineNumber());
- message.setLocationCol(theException.getColumnNumber());
- message.setMessage(theException.getLocalizedMessage());
- message.setSeverity(theSeverity);
- myContext.addValidationMessage(message);
- }
-
- @Override
- public void error(SAXParseException theException) {
- addIssue(theException, ResultSeverityEnum.ERROR);
- }
-
- @Override
- public void fatalError(SAXParseException theException) {
- addIssue(theException, ResultSeverityEnum.FATAL);
- }
-
- @Override
- public void warning(SAXParseException theException) {
- addIssue(theException, ResultSeverityEnum.WARNING);
- }
-
+ doValidate(theContext);
}
private final class MyResourceResolver implements LSResourceResolver {
@@ -225,4 +199,38 @@ public class SchemaBaseValidator implements IValidatorModule {
}
}
+ private static class MyErrorHandler implements org.xml.sax.ErrorHandler {
+
+ private IValidationContext> myContext;
+
+ MyErrorHandler(IValidationContext> theContext) {
+ myContext = theContext;
+ }
+
+ private void addIssue(SAXParseException theException, ResultSeverityEnum theSeverity) {
+ SingleValidationMessage message = new SingleValidationMessage();
+ message.setLocationLine(theException.getLineNumber());
+ message.setLocationCol(theException.getColumnNumber());
+ message.setMessage(theException.getLocalizedMessage());
+ message.setSeverity(theSeverity);
+ myContext.addValidationMessage(message);
+ }
+
+ @Override
+ public void error(SAXParseException theException) {
+ addIssue(theException, ResultSeverityEnum.ERROR);
+ }
+
+ @Override
+ public void fatalError(SAXParseException theException) {
+ addIssue(theException, ResultSeverityEnum.FATAL);
+ }
+
+ @Override
+ public void warning(SAXParseException theException) {
+ addIssue(theException, ResultSeverityEnum.WARNING);
+ }
+
+ }
+
}
diff --git a/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties b/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties
index 60980158197..84f8cee8412 100644
--- a/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties
+++ b/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties
@@ -125,9 +125,9 @@ ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.unknownPath=Unable to find content
ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.unknownType=Content in resource of type {0} at path {1} is not appropriate for binary storage: {2}
-ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.cannotCreateDuplicateCodeSystemUrl=Can not create multiple CodeSystem resources with CodeSystem.url "{0}", already have one with resource ID: {1}
-ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.cannotCreateDuplicateConceptMapUrl=Can not create multiple ConceptMap resources with ConceptMap.url "{0}", already have one with resource ID: {1}
-ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.cannotCreateDuplicateValueSetUrl=Can not create multiple ValueSet resources with ValueSet.url "{0}", already have one with resource ID: {1}
-ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.expansionTooLarge=Expansion of ValueSet produced too many codes (maximum {0}) - Operation aborted!
+ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl.cannotCreateDuplicateCodeSystemUrl=Can not create multiple CodeSystem resources with CodeSystem.url "{0}", already have one with resource ID: {1}
+ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl.cannotCreateDuplicateConceptMapUrl=Can not create multiple ConceptMap resources with ConceptMap.url "{0}", already have one with resource ID: {1}
+ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl.cannotCreateDuplicateValueSetUrl=Can not create multiple ValueSet resources with ValueSet.url "{0}", already have one with resource ID: {1}
+ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl.expansionTooLarge=Expansion of ValueSet produced too many codes (maximum {0}) - Operation aborted!
ca.uhn.fhir.jpa.util.jsonpatch.JsonPatchUtils.failedToApplyPatch=Failed to apply JSON patch to {0}: {1}
diff --git a/hapi-fhir-base/src/test/java/ca/uhn/fhir/model/api/ResourceMetadataKeyEnumTest.java b/hapi-fhir-base/src/test/java/ca/uhn/fhir/model/api/ResourceMetadataKeyEnumTest.java
new file mode 100644
index 00000000000..c491081ffc0
--- /dev/null
+++ b/hapi-fhir-base/src/test/java/ca/uhn/fhir/model/api/ResourceMetadataKeyEnumTest.java
@@ -0,0 +1,35 @@
+package ca.uhn.fhir.model.api;
+
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class ResourceMetadataKeyEnumTest {
+
+ @Test
+ public void testHashCode() {
+ assertEquals(-60968467, ResourceMetadataKeyEnum.PUBLISHED.hashCode());
+ }
+
+ @Test
+ public void testEquals() {
+ assertNotEquals(ResourceMetadataKeyEnum.PROFILES, null);
+ assertNotEquals(ResourceMetadataKeyEnum.PROFILES, "");
+ assertNotEquals(ResourceMetadataKeyEnum.PROFILES, ResourceMetadataKeyEnum.PUBLISHED);
+ assertEquals(ResourceMetadataKeyEnum.PROFILES, ResourceMetadataKeyEnum.PROFILES);
+ }
+
+
+ @Test
+ public void testExtensionResourceEquals() {
+ assertNotEquals(new ResourceMetadataKeyEnum.ExtensionResourceMetadataKey("http://foo"), new ResourceMetadataKeyEnum.ExtensionResourceMetadataKey("http://bar"));
+ assertNotEquals(new ResourceMetadataKeyEnum.ExtensionResourceMetadataKey("http://foo"), null);
+ assertNotEquals(new ResourceMetadataKeyEnum.ExtensionResourceMetadataKey("http://foo"), "");
+ assertEquals(new ResourceMetadataKeyEnum.ExtensionResourceMetadataKey("http://foo"), new ResourceMetadataKeyEnum.ExtensionResourceMetadataKey("http://foo"));
+
+ ResourceMetadataKeyEnum.ExtensionResourceMetadataKey foo = new ResourceMetadataKeyEnum.ExtensionResourceMetadataKey("http://foo");
+ assertEquals(foo, foo);
+ }
+
+
+}
diff --git a/hapi-fhir-base/src/test/java/ca/uhn/fhir/model/api/TagTest.java b/hapi-fhir-base/src/test/java/ca/uhn/fhir/model/api/TagTest.java
new file mode 100644
index 00000000000..403c96dfe74
--- /dev/null
+++ b/hapi-fhir-base/src/test/java/ca/uhn/fhir/model/api/TagTest.java
@@ -0,0 +1,46 @@
+package ca.uhn.fhir.model.api;
+
+import org.junit.Test;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import static org.junit.Assert.*;
+
+public class TagTest {
+
+ @Test
+ public void testEquals() {
+ Tag tag1 = new Tag().setScheme("scheme").setTerm("term").setLabel("label");
+ Tag tag2 = new Tag().setScheme("scheme").setTerm("term").setLabel("label");
+ Tag tag3 = new Tag().setScheme("scheme2").setTerm("term").setLabel("label");
+ Tag tag4 = new Tag().setScheme("scheme").setTerm("term2").setLabel("label");
+
+ assertEquals(tag1, tag1);
+ assertEquals(tag1, tag2);
+ assertNotEquals(tag1, tag3);
+ assertNotEquals(tag1, tag4);
+ assertNotEquals(tag1, null);
+ assertNotEquals(tag1, "");
+ }
+
+ @Test
+ public void testHashCode() {
+ Tag tag1 = new Tag().setScheme("scheme").setTerm("term").setLabel("label");
+ assertEquals(1920714536, tag1.hashCode());
+ }
+
+ @Test
+ public void testConstructors() throws URISyntaxException {
+ assertTrue(new Tag().isEmpty());
+ assertFalse(new Tag("http://foo").isEmpty());
+ assertFalse(new Tag("http://foo", "http://bar").isEmpty());
+ assertFalse(new Tag(new URI("http://foo"), new URI("http://bar"), "Label").isEmpty());
+ assertTrue(new Tag((URI)null, null, "Label").isEmpty());
+
+ assertEquals("http://foo", new Tag(new URI("http://foo"), new URI("http://bar"), "Label").getSystem());
+ assertEquals("http://bar", new Tag(new URI("http://foo"), new URI("http://bar"), "Label").getCode());
+ assertEquals("Label", new Tag(new URI("http://foo"), new URI("http://bar"), "Label").getDisplay());
+ }
+
+}
diff --git a/hapi-fhir-base/src/test/java/ca/uhn/fhir/rest/param/TokenParamTest.java b/hapi-fhir-base/src/test/java/ca/uhn/fhir/rest/param/TokenParamTest.java
index 640a150e5ec..db934b0d89c 100644
--- a/hapi-fhir-base/src/test/java/ca/uhn/fhir/rest/param/TokenParamTest.java
+++ b/hapi-fhir-base/src/test/java/ca/uhn/fhir/rest/param/TokenParamTest.java
@@ -2,13 +2,34 @@ package ca.uhn.fhir.rest.param;
import org.junit.Test;
-import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.*;
public class TokenParamTest {
@Test
public void testEquals() {
TokenParam tokenParam1 = new TokenParam("foo", "bar");
TokenParam tokenParam2 = new TokenParam("foo", "bar");
+ TokenParam tokenParam3 = new TokenParam("foo", "baz");
+ assertEquals(tokenParam1, tokenParam1);
assertEquals(tokenParam1, tokenParam2);
+ assertNotEquals(tokenParam1, tokenParam3);
+ assertNotEquals(tokenParam1, null);
+ assertNotEquals(tokenParam1, "");
}
+
+ @Test
+ public void testHashCode() {
+ TokenParam tokenParam1 = new TokenParam("foo", "bar");
+ assertEquals(4716638, tokenParam1.hashCode());
+ }
+
+
+ @Test
+ public void testIsEmpty() {
+ assertFalse(new TokenParam("foo", "bar").isEmpty());
+ assertTrue(new TokenParam("", "").isEmpty());
+ assertTrue(new TokenParam().isEmpty());
+ assertEquals("", new TokenParam().getValueNotNull());
+ }
+
}
diff --git a/hapi-fhir-base/src/test/java/ca/uhn/fhir/util/AsyncUtilTest.java b/hapi-fhir-base/src/test/java/ca/uhn/fhir/util/AsyncUtilTest.java
new file mode 100644
index 00000000000..7cbb8022c25
--- /dev/null
+++ b/hapi-fhir-base/src/test/java/ca/uhn/fhir/util/AsyncUtilTest.java
@@ -0,0 +1,62 @@
+package ca.uhn.fhir.util;
+
+import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
+import org.junit.Test;
+
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import static org.awaitility.Awaitility.await;
+
+public class AsyncUtilTest {
+
+ @Test
+ public void testSleep() {
+ AsyncUtil.sleep(10);
+ }
+
+ @Test
+ public void testSleepWithInterrupt() {
+ AtomicBoolean outcomeHolder = new AtomicBoolean(true);
+ Thread thread = new Thread(() -> {
+ boolean outcome = AsyncUtil.sleep(10000);
+ outcomeHolder.set(outcome);
+ });
+ thread.start();
+ thread.interrupt();
+ await().until(()-> outcomeHolder.get() == false);
+ }
+
+ @Test
+ public void testAwaitLatchAndThrowInternalErrorException() {
+ AtomicBoolean outcomeHolder = new AtomicBoolean(false);
+
+ CountDownLatch latch = new CountDownLatch(1);
+ Thread thread = new Thread(() -> {
+ try {
+ AsyncUtil.awaitLatchAndThrowInternalErrorExceptionOnInterrupt(latch, 10, TimeUnit.SECONDS);
+ } catch (InternalErrorException e) {
+ outcomeHolder.set(true);
+ }
+ });
+ thread.start();
+ thread.interrupt();
+ await().until(()-> outcomeHolder.get());
+ }
+
+ @Test
+ public void testAwaitLatchIgnoreInterruption() {
+ AtomicBoolean outcomeHolder = new AtomicBoolean(true);
+
+ CountDownLatch latch = new CountDownLatch(1);
+ Thread thread = new Thread(() -> {
+ boolean outcome = AsyncUtil.awaitLatchAndIgnoreInterrupt(latch, 10, TimeUnit.SECONDS);
+ outcomeHolder.set(outcome);
+ });
+ thread.start();
+ thread.interrupt();
+ await().until(()-> outcomeHolder.get() == false);
+ }
+
+}
diff --git a/hapi-fhir-base/src/test/java/ca/uhn/fhir/util/StopWatchTest.java b/hapi-fhir-base/src/test/java/ca/uhn/fhir/util/StopWatchTest.java
index 192c47f6b0e..dc79e0cdb66 100644
--- a/hapi-fhir-base/src/test/java/ca/uhn/fhir/util/StopWatchTest.java
+++ b/hapi-fhir-base/src/test/java/ca/uhn/fhir/util/StopWatchTest.java
@@ -128,6 +128,56 @@ public class StopWatchTest {
assertEquals("TASK1: 500ms\nTASK2: 100ms", taskDurations);
}
+ @Test
+ public void testFormatTaskDurationsDelayBetweenTasks() {
+ StopWatch sw = new StopWatch();
+
+ StopWatch.setNowForUnitTestForUnitTest(1000L);
+ sw.startTask("TASK1");
+
+ StopWatch.setNowForUnitTestForUnitTest(1500L);
+ sw.endCurrentTask();
+
+ StopWatch.setNowForUnitTestForUnitTest(2000L);
+ sw.startTask("TASK2");
+
+ StopWatch.setNowForUnitTestForUnitTest(2100L);
+ sw.endCurrentTask();
+
+ StopWatch.setNowForUnitTestForUnitTest(2200L);
+ String taskDurations = sw.formatTaskDurations();
+ ourLog.info(taskDurations);
+ assertEquals("TASK1: 500ms\n" +
+ "Between: 500ms\n" +
+ "TASK2: 100ms\n" +
+ "After last task: 100ms", taskDurations);
+ }
+
+ @Test
+ public void testFormatTaskDurationsLongDelayBeforeStart() {
+ StopWatch sw = new StopWatch(0);
+
+ StopWatch.setNowForUnitTestForUnitTest(1000L);
+ sw.startTask("TASK1");
+
+ StopWatch.setNowForUnitTestForUnitTest(1500L);
+ sw.startTask("TASK2");
+
+ StopWatch.setNowForUnitTestForUnitTest(1600L);
+ String taskDurations = sw.formatTaskDurations();
+ ourLog.info(taskDurations);
+ assertEquals("Before first task: 1000ms\nTASK1: 500ms\nTASK2: 100ms", taskDurations);
+ }
+
+ @Test
+ public void testFormatTaskDurationsNoTasks() {
+ StopWatch sw = new StopWatch(0);
+
+ String taskDurations = sw.formatTaskDurations();
+ ourLog.info(taskDurations);
+ assertEquals("No tasks", taskDurations);
+ }
+
@Test
public void testFormatThroughput60Ops4Min() {
StopWatch sw = new StopWatch(DateUtils.addMinutes(new Date(), -4));
@@ -210,4 +260,34 @@ public class StopWatchTest {
assertThat(string, matchesPattern("^[0-9]{3,4}ms$"));
}
+
+ @Test
+ public void testAppendRightAlignedNumber() {
+ StringBuilder b= new StringBuilder();
+
+ b.setLength(0);
+ StopWatch.appendRightAlignedNumber(b, "PFX", 0, 100);
+ assertEquals("PFX100", b.toString());
+
+ b.setLength(0);
+ StopWatch.appendRightAlignedNumber(b, "PFX", 1, 100);
+ assertEquals("PFX100", b.toString());
+
+ b.setLength(0);
+ StopWatch.appendRightAlignedNumber(b, "PFX", 2, 100);
+ assertEquals("PFX100", b.toString());
+
+ b.setLength(0);
+ StopWatch.appendRightAlignedNumber(b, "PFX", 3, 100);
+ assertEquals("PFX100", b.toString());
+
+ b.setLength(0);
+ StopWatch.appendRightAlignedNumber(b, "PFX", 4, 100);
+ assertEquals("PFX0100", b.toString());
+
+ b.setLength(0);
+ StopWatch.appendRightAlignedNumber(b, "PFX", 10, 100);
+ assertEquals("PFX0000000100", b.toString());
+ }
+
}
diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ToggleSearchParametersCommand.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ToggleSearchParametersCommand.java
index 4aa3991980a..fbfdcf0e82b 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ToggleSearchParametersCommand.java
+++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ToggleSearchParametersCommand.java
@@ -20,7 +20,7 @@ package ca.uhn.fhir.cli;
* #L%
*/
-import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
+import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
@@ -44,7 +44,7 @@ public class ToggleSearchParametersCommand extends BaseCommand {
Options options = new Options();
addFhirVersionOption(options);
addBaseUrlOption(options);
- addRequiredOption(options, "u", "url", true, "The code system URL associated with this upload (e.g. " + IHapiTerminologyLoaderSvc.SCT_URI + ")");
+ addRequiredOption(options, "u", "url", true, "The code system URL associated with this upload (e.g. " + ITermLoaderSvc.SCT_URI + ")");
addBasicAuthOption(options);
return options;
}
diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/UploadTerminologyCommand.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/UploadTerminologyCommand.java
index 2194c4cff91..42a0aa66143 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/UploadTerminologyCommand.java
+++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/UploadTerminologyCommand.java
@@ -20,35 +20,32 @@ package ca.uhn.fhir.cli;
* #L%
*/
-import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
-import ca.uhn.fhir.jpa.entity.TermConcept;
-import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
-import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
-import ca.uhn.fhir.jpa.term.TerminologyLoaderSvcImpl;
+import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider;
+import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
import ca.uhn.fhir.rest.client.api.IGenericClient;
import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
+import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
+import ca.uhn.fhir.util.AttachmentUtil;
import ca.uhn.fhir.util.ParametersUtil;
+import com.google.common.base.Charsets;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.io.IOUtils;
import org.hl7.fhir.instance.model.api.IBaseParameters;
-import org.hl7.fhir.r4.model.CodeSystem;
+import org.hl7.fhir.instance.model.api.ICompositeType;
-import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
import java.io.FileInputStream;
import java.io.IOException;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-import java.util.stream.Collectors;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipOutputStream;
import static org.apache.commons.lang3.StringUtils.isBlank;
public class UploadTerminologyCommand extends BaseCommand {
- public static final String UPLOAD_TERMINOLOGY = "upload-terminology";
+ static final String UPLOAD_TERMINOLOGY = "upload-terminology";
// TODO: Don't use qualified names for loggers in HAPI CLI.
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(UploadTerminologyCommand.class);
@@ -68,9 +65,8 @@ public class UploadTerminologyCommand extends BaseCommand {
addFhirVersionOption(options);
addBaseUrlOption(options);
- addRequiredOption(options, "u", "url", true, "The code system URL associated with this upload (e.g. " + IHapiTerminologyLoaderSvc.SCT_URI + ")");
+ addRequiredOption(options, "u", "url", true, "The code system URL associated with this upload (e.g. " + ITermLoaderSvc.SCT_URI + ")");
addOptionalOption(options, "d", "data", true, "Local file to use to upload (can be a raw file or a ZIP containing the raw file)");
- addOptionalOption(options, null, "custom", false, "Indicates that this upload uses the HAPI FHIR custom external terminology format");
addOptionalOption(options, "m", "mode", true, "The upload mode: SNAPSHOT (default), ADD, REMOVE");
addBasicAuthOption(options);
addVerboseLoggingOption(options);
@@ -109,104 +105,86 @@ public class UploadTerminologyCommand extends BaseCommand {
switch (mode) {
case SNAPSHOT:
- uploadSnapshot(inputParameters, termUrl, datafile, theCommandLine, client);
+ invokeOperation(theCommandLine, termUrl, datafile, client, inputParameters, JpaConstants.OPERATION_UPLOAD_EXTERNAL_CODE_SYSTEM);
break;
case ADD:
- uploadDelta(theCommandLine, termUrl, datafile, client, inputParameters, JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD, false);
+ invokeOperation(theCommandLine, termUrl, datafile, client, inputParameters, JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD);
break;
case REMOVE:
- uploadDelta(theCommandLine, termUrl, datafile, client, inputParameters, JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_REMOVE, true);
+ invokeOperation(theCommandLine, termUrl, datafile, client, inputParameters, JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_REMOVE);
break;
}
}
- private void uploadDelta(CommandLine theCommandLine, String theTermUrl, String[] theDatafile, IGenericClient theClient, IBaseParameters theInputParameters, String theOperationName, boolean theFlatten) {
- ParametersUtil.addParameterToParametersUri(myFhirCtx, theInputParameters, "url", theTermUrl);
+ private void invokeOperation(CommandLine theCommandLine, String theTermUrl, String[] theDatafile, IGenericClient theClient, IBaseParameters theInputParameters, String theOperationName) throws ParseException {
+ ParametersUtil.addParameterToParametersUri(myFhirCtx, theInputParameters, TerminologyUploaderProvider.PARAM_SYSTEM, theTermUrl);
- List fileDescriptors = new ArrayList<>();
+ ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
+ ZipOutputStream zipOutputStream = new ZipOutputStream(byteArrayOutputStream, Charsets.UTF_8);
+ boolean haveCompressedContents = false;
+ try {
+ for (String nextDataFile : theDatafile) {
+
+ try (FileInputStream fileInputStream = new FileInputStream(nextDataFile)) {
+ if (!nextDataFile.endsWith(".zip")) {
+
+ ourLog.info("Compressing and adding file: {}", nextDataFile);
+ ZipEntry nextEntry = new ZipEntry(stripPath(nextDataFile));
+ zipOutputStream.putNextEntry(nextEntry);
+
+ IOUtils.copy(fileInputStream, zipOutputStream);
+ haveCompressedContents = true;
+
+ zipOutputStream.flush();
+ ourLog.info("Finished compressing {} into {}", nextEntry.getSize(), nextEntry.getCompressedSize());
+
+ } else {
+
+ ourLog.info("Adding file: {}", nextDataFile);
+ ICompositeType attachment = AttachmentUtil.newInstance(myFhirCtx);
+ AttachmentUtil.setUrl(myFhirCtx, attachment, "file:" + nextDataFile);
+ AttachmentUtil.setData(myFhirCtx, attachment, IOUtils.toByteArray(fileInputStream));
+ ParametersUtil.addParameterToParameters(myFhirCtx, theInputParameters, TerminologyUploaderProvider.PARAM_FILE, attachment);
- for (String next : theDatafile) {
- try (FileInputStream inputStream = new FileInputStream(next)) {
- byte[] bytes = IOUtils.toByteArray(inputStream);
- fileDescriptors.add(new IHapiTerminologyLoaderSvc.FileDescriptor() {
- @Override
- public String getFilename() {
- return next;
}
+ }
- @Override
- public InputStream getInputStream() {
- return new ByteArrayInputStream(bytes);
- }
- });
- } catch (IOException e) {
- throw new CommandFailureException("Failed to read from file \"" + next + "\": " + e.getMessage());
}
+ zipOutputStream.flush();
+ zipOutputStream.close();
+ } catch (IOException e) {
+ throw new ParseException(e.toString());
}
- TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion();
- TerminologyLoaderSvcImpl.LoadedFileDescriptors descriptors = new TerminologyLoaderSvcImpl.LoadedFileDescriptors(fileDescriptors);
- TerminologyLoaderSvcImpl.processCustomTerminologyFiles(descriptors, codeSystemVersion);
-
- CodeSystem codeSystem = new CodeSystem();
- codeSystem.setUrl(theTermUrl);
- addCodesToCodeSystem(codeSystemVersion.getConcepts(), codeSystem.getConcept(), theFlatten);
-
- ParametersUtil.addParameterToParameters(myFhirCtx, theInputParameters, "value", codeSystem);
-
- if (theCommandLine.hasOption("custom")) {
- ParametersUtil.addParameterToParametersCode(myFhirCtx, theInputParameters, "contentMode", "custom");
+ if (haveCompressedContents) {
+ ICompositeType attachment = AttachmentUtil.newInstance(myFhirCtx);
+ AttachmentUtil.setUrl(myFhirCtx, attachment, "file:/files.zip");
+ AttachmentUtil.setData(myFhirCtx, attachment, byteArrayOutputStream.toByteArray());
+ ParametersUtil.addParameterToParameters(myFhirCtx, theInputParameters, TerminologyUploaderProvider.PARAM_FILE, attachment);
}
ourLog.info("Beginning upload - This may take a while...");
- IBaseParameters response = theClient
- .operation()
- .onType(myFhirCtx.getResourceDefinition("CodeSystem").getImplementingClass())
- .named(theOperationName)
- .withParameters(theInputParameters)
- .execute();
+ if (ourLog.isDebugEnabled() || "true".equals(System.getProperty("test"))) {
+ ourLog.info("Submitting parameters: {}", myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(theInputParameters));
+ }
- ourLog.info("Upload complete!");
- ourLog.info("Response:\n{}", myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(response));
- }
-
- private void addCodesToCodeSystem(Collection theSourceConcepts, List theTargetConcept, boolean theFlatten) {
- for (TermConcept nextSourceConcept : theSourceConcepts) {
-
- CodeSystem.ConceptDefinitionComponent nextTarget = new CodeSystem.ConceptDefinitionComponent();
- nextTarget.setCode(nextSourceConcept.getCode());
- nextTarget.setDisplay(nextSourceConcept.getDisplay());
- theTargetConcept.add(nextTarget);
-
- List children = nextSourceConcept.getChildren().stream().map(t -> t.getChild()).collect(Collectors.toList());
- if (theFlatten) {
- addCodesToCodeSystem(children, theTargetConcept, theFlatten);
- } else {
- addCodesToCodeSystem(children, nextTarget.getConcept(), theFlatten);
+ IBaseParameters response;
+ try {
+ response = theClient
+ .operation()
+ .onType(myFhirCtx.getResourceDefinition("CodeSystem").getImplementingClass())
+ .named(theOperationName)
+ .withParameters(theInputParameters)
+ .execute();
+ } catch (BaseServerResponseException e) {
+ if (e.getOperationOutcome() != null) {
+ ourLog.error("Received the following response:\n{}", myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(e.getOperationOutcome()));
}
-
- }
- }
-
- private void uploadSnapshot(IBaseParameters theInputparameters, String theTermUrl, String[] theDatafile, CommandLine theCommandLine, IGenericClient theClient) {
- ParametersUtil.addParameterToParametersUri(myFhirCtx, theInputparameters, "url", theTermUrl);
- for (String next : theDatafile) {
- ParametersUtil.addParameterToParametersString(myFhirCtx, theInputparameters, "localfile", next);
- }
- if (theCommandLine.hasOption("custom")) {
- ParametersUtil.addParameterToParametersCode(myFhirCtx, theInputparameters, "contentMode", "custom");
+ throw e;
}
- ourLog.info("Beginning upload - This may take a while...");
-
- IBaseParameters response = theClient
- .operation()
- .onType(myFhirCtx.getResourceDefinition("CodeSystem").getImplementingClass())
- .named(JpaConstants.OPERATION_UPLOAD_EXTERNAL_CODE_SYSTEM)
- .withParameters(theInputparameters)
- .execute();
ourLog.info("Upload complete!");
ourLog.info("Response:\n{}", myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(response));
@@ -216,4 +194,12 @@ public class UploadTerminologyCommand extends BaseCommand {
SNAPSHOT, ADD, REMOVE
}
+ public static String stripPath(String thePath) {
+ String retVal = thePath;
+ if (retVal.contains("/")) {
+ retVal = retVal.substring(retVal.lastIndexOf("/"));
+ }
+ return retVal;
+ }
+
}
diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/resources/logback-cli-on.xml b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/resources/logback-cli-on.xml
index 848186fb3b1..61dc270245e 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/resources/logback-cli-on.xml
+++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/resources/logback-cli-on.xml
@@ -34,7 +34,7 @@
-
+
diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/test/java/ca/uhn/fhir/cli/UploadTerminologyCommandTest.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/test/java/ca/uhn/fhir/cli/UploadTerminologyCommandTest.java
index 42c8eccd95a..568f9e49557 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-api/src/test/java/ca/uhn/fhir/cli/UploadTerminologyCommandTest.java
+++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/test/java/ca/uhn/fhir/cli/UploadTerminologyCommandTest.java
@@ -1,18 +1,20 @@
package ca.uhn.fhir.cli;
import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.jpa.BaseTest;
import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider;
-import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
-import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
+import ca.uhn.fhir.jpa.term.UploadStatistics;
+import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
+import ca.uhn.fhir.jpa.term.custom.CustomTerminologySet;
import ca.uhn.fhir.rest.server.RestfulServer;
import ca.uhn.fhir.test.utilities.JettyUtil;
+import com.google.common.base.Charsets;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.hamcrest.Matchers;
-import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.IdType;
import org.junit.After;
import org.junit.Before;
@@ -25,15 +27,19 @@ import org.mockito.junit.MockitoJUnitRunner;
import java.io.*;
import java.util.List;
-import java.util.concurrent.atomic.AtomicInteger;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipOutputStream;
-import static org.junit.Assert.*;
+import static org.hamcrest.Matchers.greaterThan;
+import static org.hamcrest.Matchers.matchesPattern;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.*;
@RunWith(MockitoJUnitRunner.class)
-public class UploadTerminologyCommandTest {
+public class UploadTerminologyCommandTest extends BaseTest {
static {
System.setProperty("test", "true");
@@ -42,26 +48,24 @@ public class UploadTerminologyCommandTest {
private Server myServer;
private FhirContext myCtx = FhirContext.forR4();
@Mock
- private IHapiTerminologyLoaderSvc myTerminologyLoaderSvc;
- @Mock
- private IHapiTerminologySvc myTerminologySvc;
+ private ITermLoaderSvc myTermLoaderSvc;
@Captor
- private ArgumentCaptor> myDescriptorList;
- @Captor
- private ArgumentCaptor myCodeSystemCaptor;
+ private ArgumentCaptor> myDescriptorListCaptor;
private int myPort;
private String myConceptsFileName = "target/concepts.csv";
private String myHierarchyFileName = "target/hierarchy.csv";
private File myConceptsFile = new File(myConceptsFileName);
private File myHierarchyFile = new File(myHierarchyFileName);
+ private File myArchiveFile;
+ private String myArchiveFileName;
@Test
- public void testTerminologyUpload_AddDelta() throws IOException {
+ public void testAddDelta() throws IOException {
writeConceptAndHierarchyFiles();
- when(myTerminologySvc.applyDeltaCodesystemsAdd(eq("http://foo"), any(), any())).thenReturn(new AtomicInteger(100));
+ when(myTermLoaderSvc.loadDeltaAdd(eq("http://foo"), anyList(), any())).thenReturn(new UploadStatistics(100, new IdType("CodeSystem/101")));
App.main(new String[]{
UploadTerminologyCommand.UPLOAD_TERMINOLOGY,
@@ -73,25 +77,69 @@ public class UploadTerminologyCommandTest {
"-d", myHierarchyFileName
});
- verify(myTerminologySvc, times(1)).applyDeltaCodesystemsAdd(any(), isNull(), myCodeSystemCaptor.capture());
+ verify(myTermLoaderSvc, times(1)).loadDeltaAdd(eq("http://foo"), myDescriptorListCaptor.capture(), any());
- CodeSystem codeSystem = myCodeSystemCaptor.getValue();
- assertEquals(1, codeSystem.getConcept().size());
- assertEquals("http://foo", codeSystem.getUrl());
- assertEquals("ANIMALS", codeSystem.getConcept().get(0).getCode());
- assertEquals("Animals", codeSystem.getConcept().get(0).getDisplay());
- assertEquals(2, codeSystem.getConcept().get(0).getConcept().size());
- assertEquals("CATS", codeSystem.getConcept().get(0).getConcept().get(0).getCode());
- assertEquals("Cats", codeSystem.getConcept().get(0).getConcept().get(0).getDisplay());
- assertEquals("DOGS", codeSystem.getConcept().get(0).getConcept().get(1).getCode());
- assertEquals("Dogs", codeSystem.getConcept().get(0).getConcept().get(1).getDisplay());
+ List listOfDescriptors = myDescriptorListCaptor.getValue();
+ assertEquals(1, listOfDescriptors.size());
+ assertEquals("file:/files.zip", listOfDescriptors.get(0).getFilename());
+ assertThat(IOUtils.toByteArray(listOfDescriptors.get(0).getInputStream()).length, greaterThan(100));
}
@Test
- public void testTerminologyUpload_RemoveDelta() throws IOException {
+ public void testAddDeltaUsingCompressedFile() throws IOException {
+
+ writeConceptAndHierarchyFiles();
+ writeArchiveFile(myConceptsFile, myHierarchyFile);
+
+ when(myTermLoaderSvc.loadDeltaAdd(eq("http://foo"), anyList(), any())).thenReturn(new UploadStatistics(100, new IdType("CodeSystem/101")));
+
+ App.main(new String[]{
+ UploadTerminologyCommand.UPLOAD_TERMINOLOGY,
+ "-v", "r4",
+ "-m", "ADD",
+ "-t", "http://localhost:" + myPort,
+ "-u", "http://foo",
+ "-d", myArchiveFileName
+ });
+
+ verify(myTermLoaderSvc, times(1)).loadDeltaAdd(eq("http://foo"), myDescriptorListCaptor.capture(), any());
+
+ List listOfDescriptors = myDescriptorListCaptor.getValue();
+ assertEquals(1, listOfDescriptors.size());
+ assertThat(listOfDescriptors.get(0).getFilename(), matchesPattern("^file:.*temp.*\\.zip$"));
+ assertThat(IOUtils.toByteArray(listOfDescriptors.get(0).getInputStream()).length, greaterThan(100));
+ }
+
+ private void writeArchiveFile(File... theFiles) throws IOException {
+ ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
+ ZipOutputStream zipOutputStream = new ZipOutputStream(byteArrayOutputStream, Charsets.UTF_8);
+
+ for (File next : theFiles) {
+ ZipEntry nextEntry = new ZipEntry(UploadTerminologyCommand.stripPath(next.getAbsolutePath()));
+ zipOutputStream.putNextEntry(nextEntry);
+
+ try (FileInputStream fileInputStream = new FileInputStream(next)) {
+ IOUtils.copy(fileInputStream, zipOutputStream);
+ }
+
+ }
+
+ zipOutputStream.flush();
+ zipOutputStream.close();
+
+ myArchiveFile = File.createTempFile("temp", ".zip");
+ myArchiveFile.deleteOnExit();
+ myArchiveFileName = myArchiveFile.getAbsolutePath();
+ try (FileOutputStream fos = new FileOutputStream(myArchiveFile, false)) {
+ fos.write(byteArrayOutputStream.toByteArray());
+ }
+ }
+
+ @Test
+ public void testRemoveDelta() throws IOException {
writeConceptAndHierarchyFiles();
- when(myTerminologySvc.applyDeltaCodesystemsRemove(eq("http://foo"), any())).thenReturn(new AtomicInteger(100));
+ when(myTermLoaderSvc.loadDeltaRemove(eq("http://foo"), anyList(), any())).thenReturn(new UploadStatistics(100, new IdType("CodeSystem/101")));
App.main(new String[]{
UploadTerminologyCommand.UPLOAD_TERMINOLOGY,
@@ -103,46 +151,38 @@ public class UploadTerminologyCommandTest {
"-d", myHierarchyFileName
});
- verify(myTerminologySvc, times(1)).applyDeltaCodesystemsRemove(any(), myCodeSystemCaptor.capture());
+ verify(myTermLoaderSvc, times(1)).loadDeltaRemove(eq("http://foo"), myDescriptorListCaptor.capture(), any());
+
+ List listOfDescriptors = myDescriptorListCaptor.getValue();
+ assertEquals(1, listOfDescriptors.size());
+ assertEquals("file:/files.zip", listOfDescriptors.get(0).getFilename());
+ assertThat(IOUtils.toByteArray(listOfDescriptors.get(0).getInputStream()).length, greaterThan(100));
- CodeSystem codeSystem = myCodeSystemCaptor.getValue();
- assertEquals(3, codeSystem.getConcept().size());
- assertEquals("http://foo", codeSystem.getUrl());
- assertEquals("ANIMALS", codeSystem.getConcept().get(0).getCode());
- assertEquals("Animals", codeSystem.getConcept().get(0).getDisplay());
- assertEquals("CATS", codeSystem.getConcept().get(1).getCode());
- assertEquals("Cats", codeSystem.getConcept().get(1).getDisplay());
- assertEquals("DOGS", codeSystem.getConcept().get(2).getCode());
- assertEquals("Dogs", codeSystem.getConcept().get(2).getDisplay());
}
@Test
- public void testTerminologyUpload_Snapshot() throws IOException {
+ public void testSnapshot() throws IOException {
writeConceptAndHierarchyFiles();
- when(myTerminologyLoaderSvc.loadCustom(eq("http://foo"), any(), any())).thenReturn(new IHapiTerminologyLoaderSvc.UploadStatistics(100, new IdType("CodeSystem/123")));
+ when(myTermLoaderSvc.loadCustom(any(), anyList(), any())).thenReturn(new UploadStatistics(100, new IdType("CodeSystem/101")));
App.main(new String[]{
UploadTerminologyCommand.UPLOAD_TERMINOLOGY,
"-v", "r4",
"-m", "SNAPSHOT",
- "--custom",
"-t", "http://localhost:" + myPort,
"-u", "http://foo",
"-d", myConceptsFileName,
"-d", myHierarchyFileName
});
- verify(myTerminologyLoaderSvc, times(1)).loadCustom(any(), myDescriptorList.capture(), any());
+ verify(myTermLoaderSvc, times(1)).loadCustom(any(), myDescriptorListCaptor.capture(), any());
- List listOfDescriptors = myDescriptorList.getValue();
- assertEquals(2, listOfDescriptors.size());
-
- assertThat(listOfDescriptors.get(0).getFilename(), Matchers.endsWith("concepts.csv"));
- assertInputStreamEqualsFile(myConceptsFile, listOfDescriptors.get(0).getInputStream());
- assertThat(listOfDescriptors.get(1).getFilename(), Matchers.endsWith("hierarchy.csv"));
- assertInputStreamEqualsFile(myHierarchyFile, listOfDescriptors.get(1).getInputStream());
+ List listOfDescriptors = myDescriptorListCaptor.getValue();
+ assertEquals(1, listOfDescriptors.size());
+ assertEquals("file:/files.zip", listOfDescriptors.get(0).getFilename());
+ assertThat(IOUtils.toByteArray(listOfDescriptors.get(0).getInputStream()).length, greaterThan(100));
}
@@ -161,27 +201,41 @@ public class UploadTerminologyCommandTest {
}
}
- private void assertInputStreamEqualsFile(File theExpectedFile, InputStream theActualInputStream) throws IOException {
- try (FileInputStream fis = new FileInputStream(theExpectedFile)) {
- byte[] expectedBytes = IOUtils.toByteArray(fis);
- byte[] actualBytes = IOUtils.toByteArray(theActualInputStream);
- assertArrayEquals(expectedBytes, actualBytes);
+ @Test
+ public void testAddInvalidFileName() throws IOException {
+
+ writeConceptAndHierarchyFiles();
+
+ try {
+ App.main(new String[]{
+ UploadTerminologyCommand.UPLOAD_TERMINOLOGY,
+ "-v", "r4",
+ "-m", "ADD",
+ "-t", "http://localhost:" + myPort,
+ "-u", "http://foo",
+ "-d", myConceptsFileName + "/foo.csv",
+ "-d", myHierarchyFileName
+ });
+ } catch (Error e) {
+ assertThat(e.toString(), Matchers.containsString("FileNotFoundException: target/concepts.csv/foo.csv"));
}
}
+
@After
public void after() throws Exception {
JettyUtil.closeServer(myServer);
FileUtils.deleteQuietly(myConceptsFile);
FileUtils.deleteQuietly(myHierarchyFile);
+ FileUtils.deleteQuietly(myArchiveFile);
}
@Before
- public void start() throws Exception {
+ public void before() throws Exception {
myServer = new Server(0);
- TerminologyUploaderProvider provider = new TerminologyUploaderProvider(myCtx, myTerminologyLoaderSvc, myTerminologySvc);
+ TerminologyUploaderProvider provider = new TerminologyUploaderProvider(myCtx, myTermLoaderSvc);
ServletHandler proxyHandler = new ServletHandler();
RestfulServer servlet = new RestfulServer(myCtx);
diff --git a/hapi-fhir-client-okhttp/src/main/java/ca/uhn/fhir/okhttp/client/OkHttpRestfulResponse.java b/hapi-fhir-client-okhttp/src/main/java/ca/uhn/fhir/okhttp/client/OkHttpRestfulResponse.java
index 9aa1b688369..9f6fd70abe4 100644
--- a/hapi-fhir-client-okhttp/src/main/java/ca/uhn/fhir/okhttp/client/OkHttpRestfulResponse.java
+++ b/hapi-fhir-client-okhttp/src/main/java/ca/uhn/fhir/okhttp/client/OkHttpRestfulResponse.java
@@ -51,23 +51,19 @@ public class OkHttpRestfulResponse extends BaseHttpResponse implements IHttpResp
this.myResponse = theResponse;
}
- @Override
- public void bufferEntitity() throws IOException {
- bufferEntity();
- }
-
@Override
public void bufferEntity() throws IOException {
if (myEntityBuffered) {
return;
}
- InputStream responseEntity = readEntity();
- if (responseEntity != null) {
- myEntityBuffered = true;
- try {
- myEntityBytes = IOUtils.toByteArray(responseEntity);
- } catch (IllegalStateException e) {
- throw new InternalErrorException(e);
+ try (InputStream responseEntity = readEntity()) {
+ if (responseEntity != null) {
+ myEntityBuffered = true;
+ try {
+ myEntityBytes = IOUtils.toByteArray(responseEntity);
+ } catch (IllegalStateException e) {
+ throw new InternalErrorException(e);
+ }
}
}
}
diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ApacheHttpResponse.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ApacheHttpResponse.java
index 1ec72030ba8..da0f1c17f17 100644
--- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ApacheHttpResponse.java
+++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ApacheHttpResponse.java
@@ -21,6 +21,7 @@ package ca.uhn.fhir.rest.client.apache;
*/
import java.io.*;
import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.*;
import ca.uhn.fhir.rest.client.impl.BaseHttpResponse;
@@ -53,25 +54,19 @@ public class ApacheHttpResponse extends BaseHttpResponse implements IHttpRespons
this.myResponse = theResponse;
}
- @Deprecated // override deprecated method
- @Override
- public void bufferEntitity() throws IOException {
- bufferEntity();
- }
-
@Override
public void bufferEntity() throws IOException {
if (myEntityBuffered) {
return;
}
- InputStream respEntity = readEntity();
- if (respEntity != null) {
- this.myEntityBuffered = true;
- try {
- this.myEntityBytes = IOUtils.toByteArray(respEntity);
- } catch (IllegalStateException e) {
- // FIXME resouce leak
- throw new InternalErrorException(e);
+ try (InputStream respEntity = readEntity()) {
+ if (respEntity != null) {
+ this.myEntityBuffered = true;
+ try {
+ this.myEntityBytes = IOUtils.toByteArray(respEntity);
+ } catch (IllegalStateException e) {
+ throw new InternalErrorException(e);
+ }
}
}
}
@@ -103,7 +98,7 @@ public class ApacheHttpResponse extends BaseHttpResponse implements IHttpRespons
if (Constants.STATUS_HTTP_204_NO_CONTENT != myResponse.getStatusLine().getStatusCode()) {
ourLog.debug("Response did not specify a charset, defaulting to utf-8");
}
- charset = Charset.forName("UTF-8");
+ charset = StandardCharsets.UTF_8;
}
return new InputStreamReader(readEntity(), charset);
@@ -115,11 +110,7 @@ public class ApacheHttpResponse extends BaseHttpResponse implements IHttpRespons
if (myResponse.getAllHeaders() != null) {
for (Header next : myResponse.getAllHeaders()) {
String name = next.getName().toLowerCase();
- List list = headers.get(name);
- if (list == null) {
- list = new ArrayList<>();
- headers.put(name, list);
- }
+ List list = headers.computeIfAbsent(name, k -> new ArrayList<>());
list.add(next.getValue());
}
diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/LoggingInterceptor.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/LoggingInterceptor.java
index d46fdaf6b0b..a7952a8b6e2 100644
--- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/LoggingInterceptor.java
+++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/LoggingInterceptor.java
@@ -22,6 +22,7 @@ package ca.uhn.fhir.rest.client.interceptor;
import java.io.IOException;
import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@@ -94,9 +95,7 @@ public class LoggingInterceptor implements IClientInterceptor {
if (content != null) {
myLog.info("Client request body:\n{}", content);
}
- } catch (IllegalStateException e) {
- myLog.warn("Failed to replay request contents (during logging attempt, actual FHIR call did not fail)", e);
- } catch (IOException e) {
+ } catch (IllegalStateException | IOException e) {
myLog.warn("Failed to replay request contents (during logging attempt, actual FHIR call did not fail)", e);
}
}
@@ -147,11 +146,8 @@ public class LoggingInterceptor implements IClientInterceptor {
}
if (myLogResponseBody) {
- //TODO: Use of a deprecated method should be resolved.
- theResponse.bufferEntitity();
- InputStream respEntity = null;
- try {
- respEntity = theResponse.readEntity();
+ theResponse.bufferEntity();
+ try (InputStream respEntity = theResponse.readEntity()) {
if (respEntity != null) {
final byte[] bytes;
try {
@@ -159,12 +155,10 @@ public class LoggingInterceptor implements IClientInterceptor {
} catch (IllegalStateException e) {
throw new InternalErrorException(e);
}
- myLog.info("Client response body:\n{}", new String(bytes, "UTF-8"));
+ myLog.info("Client response body:\n{}", new String(bytes, StandardCharsets.UTF_8));
} else {
myLog.info("Client response body: (none)");
}
- } finally {
- IOUtils.closeQuietly(respEntity);
}
}
}
@@ -178,7 +172,9 @@ public class LoggingInterceptor implements IClientInterceptor {
Iterator values = theHeaders.get(key).iterator();
while(values.hasNext()) {
String value = values.next();
- b.append(key + ": " + value);
+ b.append(key);
+ b.append(": ");
+ b.append(value);
if (nameEntries.hasNext() || values.hasNext()) {
b.append('\n');
}
diff --git a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/client/JaxRsHttpResponse.java b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/client/JaxRsHttpResponse.java
index f29c0ca9037..1f1479507a7 100644
--- a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/client/JaxRsHttpResponse.java
+++ b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/client/JaxRsHttpResponse.java
@@ -51,11 +51,6 @@ public class JaxRsHttpResponse extends BaseHttpResponse implements IHttpResponse
this.myResponse = theResponse;
}
- @Override
- public void bufferEntitity() throws IOException {
- bufferEntity();
- }
-
@Override
public void bufferEntity() throws IOException {
if(!myBufferedEntity && myResponse.hasEntity()) {
diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml
index 1ea96f82d6a..50aabaec8dc 100644
--- a/hapi-fhir-jpaserver-base/pom.xml
+++ b/hapi-fhir-jpaserver-base/pom.xml
@@ -46,7 +46,6 @@
org.apache.commons
commons-csv
- 1.3
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/BaseTest.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/BaseTest.java
new file mode 100644
index 00000000000..fae18f88d9c
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/BaseTest.java
@@ -0,0 +1,19 @@
+package ca.uhn.fhir.jpa;
+
+import com.google.common.base.Charsets;
+import org.apache.commons.io.IOUtils;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+public class BaseTest {
+
+ protected String loadResource(String theClasspath) throws IOException {
+ InputStream stream = BaseTest.class.getResourceAsStream(theClasspath);
+ if (stream==null) {
+ throw new IllegalArgumentException("Unable to find resource: " + theClasspath);
+ }
+ return IOUtils.toString(stream, Charsets.UTF_8);
+ }
+
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java
index 8545b5f05cb..ecd496d08ef 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java
@@ -32,6 +32,13 @@ import ca.uhn.fhir.jpa.subscription.module.cache.LinkedBlockingQueueSubscribable
import ca.uhn.fhir.jpa.subscription.module.channel.ISubscribableChannelFactory;
import ca.uhn.fhir.jpa.subscription.module.matcher.ISubscriptionMatcher;
import ca.uhn.fhir.jpa.subscription.module.matcher.InMemorySubscriptionMatcher;
+import ca.uhn.fhir.jpa.term.TermCodeSystemStorageSvcImpl;
+import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl;
+import ca.uhn.fhir.jpa.term.api.ITermVersionAdapterSvc;
+import ca.uhn.fhir.jpa.term.TermReindexingSvcImpl;
+import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
+import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
+import ca.uhn.fhir.jpa.term.api.ITermReindexingSvc;
import ca.uhn.fhir.rest.server.interceptor.consent.IConsentContextServices;
import org.hibernate.jpa.HibernatePersistenceProvider;
import org.hl7.fhir.utilities.graphql.IGraphQLStorageServices;
@@ -256,7 +263,6 @@ public abstract class BaseConfig {
}
-
public static void configureEntityManagerFactory(LocalContainerEntityManagerFactoryBean theFactory, FhirContext theCtx) {
theFactory.setJpaDialect(hibernateJpaDialect(theCtx.getLocalizer()));
theFactory.setPackagesToScan("ca.uhn.fhir.jpa.model.entity", "ca.uhn.fhir.jpa.entity");
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfigDstu3Plus.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfigDstu3Plus.java
new file mode 100644
index 00000000000..6a63d4093d0
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfigDstu3Plus.java
@@ -0,0 +1,34 @@
+package ca.uhn.fhir.jpa.config;
+
+import ca.uhn.fhir.jpa.term.TermCodeSystemStorageSvcImpl;
+import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl;
+import ca.uhn.fhir.jpa.term.TermReindexingSvcImpl;
+import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
+import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
+import ca.uhn.fhir.jpa.term.api.ITermReindexingSvc;
+import ca.uhn.fhir.jpa.term.api.ITermVersionAdapterSvc;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+@Configuration
+public abstract class BaseConfigDstu3Plus extends BaseConfig {
+
+ @Bean
+ public ITermCodeSystemStorageSvc termCodeSystemStorageSvc() {
+ return new TermCodeSystemStorageSvcImpl();
+ }
+
+ @Bean
+ public ITermDeferredStorageSvc termDeferredStorageSvc() {
+ return new TermDeferredStorageSvcImpl();
+ }
+
+ @Bean
+ public ITermReindexingSvc termReindexingSvc() {
+ return new TermReindexingSvcImpl();
+ }
+
+ @Bean
+ public abstract ITermVersionAdapterSvc terminologyVersionAdapterSvc();
+
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseDstu2Config.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseDstu2Config.java
index f4ccd0a2bd7..d3e5019c89a 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseDstu2Config.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseDstu2Config.java
@@ -7,8 +7,10 @@ import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.searchparam.extractor.SearchParamExtractorDstu2;
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryDstu2;
-import ca.uhn.fhir.jpa.term.HapiTerminologySvcDstu2;
-import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
+import ca.uhn.fhir.jpa.term.TermReadSvcDstu2;
+import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
+import ca.uhn.fhir.jpa.term.api.ITermVersionAdapterSvc;
+import ca.uhn.fhir.jpa.term.TermVersionAdapterSvcDstu2;
import ca.uhn.fhir.jpa.util.ResourceCountCache;
import ca.uhn.fhir.model.dstu2.composite.MetaDt;
import ca.uhn.fhir.validation.IValidatorModule;
@@ -134,8 +136,8 @@ public class BaseDstu2Config extends BaseConfig {
}
@Bean(autowire = Autowire.BY_TYPE)
- public IHapiTerminologySvc terminologyService() {
- return new HapiTerminologySvcDstu2();
+ public ITermReadSvc terminologyService() {
+ return new TermReadSvcDstu2();
}
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/dstu3/BaseDstu3Config.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/dstu3/BaseDstu3Config.java
index 95499fcb207..6eb16b01356 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/dstu3/BaseDstu3Config.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/dstu3/BaseDstu3Config.java
@@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.config.dstu3;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.ParserOptions;
import ca.uhn.fhir.jpa.config.BaseConfig;
+import ca.uhn.fhir.jpa.config.BaseConfigDstu3Plus;
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl;
import ca.uhn.fhir.jpa.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
@@ -12,10 +13,12 @@ import ca.uhn.fhir.jpa.provider.GraphQLProvider;
import ca.uhn.fhir.jpa.searchparam.extractor.SearchParamExtractorDstu3;
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryDstu3;
-import ca.uhn.fhir.jpa.term.HapiTerminologySvcDstu3;
-import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
-import ca.uhn.fhir.jpa.term.IHapiTerminologySvcDstu3;
-import ca.uhn.fhir.jpa.term.TerminologyLoaderSvcImpl;
+import ca.uhn.fhir.jpa.term.TermReadSvcDstu3;
+import ca.uhn.fhir.jpa.term.TermLoaderSvcImpl;
+import ca.uhn.fhir.jpa.term.TermVersionAdapterSvcDstu3;
+import ca.uhn.fhir.jpa.term.api.ITermReadSvcDstu3;
+import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
+import ca.uhn.fhir.jpa.term.api.ITermVersionAdapterSvc;
import ca.uhn.fhir.jpa.util.ResourceCountCache;
import ca.uhn.fhir.jpa.validation.JpaValidationSupportChainDstu3;
import ca.uhn.fhir.validation.IValidatorModule;
@@ -26,7 +29,6 @@ import org.hl7.fhir.dstu3.hapi.validation.CachingValidationSupport;
import org.hl7.fhir.dstu3.hapi.validation.FhirInstanceValidator;
import org.hl7.fhir.dstu3.model.Bundle;
import org.hl7.fhir.r5.utils.IResourceValidator;
-import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Lazy;
@@ -55,13 +57,19 @@ import org.springframework.transaction.annotation.EnableTransactionManagement;
@Configuration
@EnableTransactionManagement
-public class BaseDstu3Config extends BaseConfig {
+public class BaseDstu3Config extends BaseConfigDstu3Plus {
@Override
public FhirContext fhirContext() {
return fhirContextDstu3();
}
+ @Bean
+ @Override
+ public ITermVersionAdapterSvc terminologyVersionAdapterSvc() {
+ return new TermVersionAdapterSvcDstu3();
+ }
+
@Bean
@Primary
public FhirContext fhirContextDstu3() {
@@ -109,10 +117,9 @@ public class BaseDstu3Config extends BaseConfig {
return new JpaValidationSupportChainDstu3();
}
- @Bean(name = "myJpaValidationSupportDstu3", autowire = Autowire.BY_NAME)
+ @Bean(name = "myJpaValidationSupportDstu3")
public ca.uhn.fhir.jpa.dao.dstu3.IJpaValidationSupportDstu3 jpaValidationSupportDstu3() {
- ca.uhn.fhir.jpa.dao.dstu3.JpaValidationSupportDstu3 retVal = new ca.uhn.fhir.jpa.dao.dstu3.JpaValidationSupportDstu3();
- return retVal;
+ return new ca.uhn.fhir.jpa.dao.dstu3.JpaValidationSupportDstu3();
}
@Bean(name = "myResourceCountsCache")
@@ -122,13 +129,12 @@ public class BaseDstu3Config extends BaseConfig {
return retVal;
}
- @Bean(autowire = Autowire.BY_TYPE)
+ @Bean
public IFulltextSearchSvc searchDaoDstu3() {
- FulltextSearchSvcImpl searchDao = new FulltextSearchSvcImpl();
- return searchDao;
+ return new FulltextSearchSvcImpl();
}
- @Bean(autowire = Autowire.BY_TYPE)
+ @Bean
public SearchParamExtractorDstu3 searchParamExtractor() {
return new SearchParamExtractorDstu3();
}
@@ -138,10 +144,9 @@ public class BaseDstu3Config extends BaseConfig {
return new SearchParamRegistryDstu3();
}
- @Bean(name = "mySystemDaoDstu3", autowire = Autowire.BY_NAME)
+ @Bean(name = "mySystemDaoDstu3")
public IFhirSystemDao systemDaoDstu3() {
- ca.uhn.fhir.jpa.dao.dstu3.FhirSystemDaoDstu3 retVal = new ca.uhn.fhir.jpa.dao.dstu3.FhirSystemDaoDstu3();
- return retVal;
+ return new ca.uhn.fhir.jpa.dao.dstu3.FhirSystemDaoDstu3();
}
@Bean(name = "mySystemProviderDstu3")
@@ -152,18 +157,18 @@ public class BaseDstu3Config extends BaseConfig {
return retVal;
}
- @Bean(autowire = Autowire.BY_TYPE)
- public IHapiTerminologyLoaderSvc terminologyLoaderService() {
- return new TerminologyLoaderSvcImpl();
+ @Bean
+ public ITermLoaderSvc termLoaderService() {
+ return new TermLoaderSvcImpl();
}
- @Bean(autowire = Autowire.BY_TYPE)
- public IHapiTerminologySvcDstu3 terminologyService() {
- return new HapiTerminologySvcDstu3();
+ @Bean
+ public ITermReadSvcDstu3 terminologyService() {
+ return new TermReadSvcDstu3();
}
@Primary
- @Bean(autowire = Autowire.BY_NAME, name = "myJpaValidationSupportChainDstu3")
+ @Bean(name = "myJpaValidationSupportChainDstu3")
public IValidationSupport validationSupportChainDstu3() {
return new CachingValidationSupport(jpaValidationSupportChain());
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4/BaseR4Config.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4/BaseR4Config.java
index 9c5cfd5d17b..98924125348 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4/BaseR4Config.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4/BaseR4Config.java
@@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.config.r4;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.ParserOptions;
import ca.uhn.fhir.jpa.config.BaseConfig;
+import ca.uhn.fhir.jpa.config.BaseConfigDstu3Plus;
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl;
import ca.uhn.fhir.jpa.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
@@ -12,10 +13,10 @@ import ca.uhn.fhir.jpa.provider.GraphQLProvider;
import ca.uhn.fhir.jpa.searchparam.extractor.SearchParamExtractorR4;
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryR4;
-import ca.uhn.fhir.jpa.term.HapiTerminologySvcR4;
-import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
-import ca.uhn.fhir.jpa.term.IHapiTerminologySvcR4;
-import ca.uhn.fhir.jpa.term.TerminologyLoaderSvcImpl;
+import ca.uhn.fhir.jpa.term.*;
+import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
+import ca.uhn.fhir.jpa.term.api.ITermReadSvcR4;
+import ca.uhn.fhir.jpa.term.api.ITermVersionAdapterSvc;
import ca.uhn.fhir.jpa.util.ResourceCountCache;
import ca.uhn.fhir.jpa.validation.JpaValidationSupportChainR4;
import ca.uhn.fhir.validation.IValidatorModule;
@@ -55,13 +56,19 @@ import org.springframework.transaction.annotation.EnableTransactionManagement;
@Configuration
@EnableTransactionManagement
-public class BaseR4Config extends BaseConfig {
+public class BaseR4Config extends BaseConfigDstu3Plus {
@Override
public FhirContext fhirContext() {
return fhirContextR4();
}
+ @Bean
+ @Override
+ public ITermVersionAdapterSvc terminologyVersionAdapterSvc() {
+ return new TermVersionAdapterSvcR4();
+ }
+
@Bean
@Primary
public FhirContext fhirContextR4() {
@@ -154,13 +161,13 @@ public class BaseR4Config extends BaseConfig {
}
@Bean(autowire = Autowire.BY_TYPE)
- public IHapiTerminologyLoaderSvc terminologyLoaderService() {
- return new TerminologyLoaderSvcImpl();
+ public ITermLoaderSvc termLoaderService() {
+ return new TermLoaderSvcImpl();
}
@Bean(autowire = Autowire.BY_TYPE)
- public IHapiTerminologySvcR4 terminologyService() {
- return new HapiTerminologySvcR4();
+ public ITermReadSvcR4 terminologyService() {
+ return new TermReadSvcR4();
}
@Primary
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r5/BaseR5Config.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r5/BaseR5Config.java
index 3bce313f662..e8726d12b1a 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r5/BaseR5Config.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r5/BaseR5Config.java
@@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.config.r5;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.ParserOptions;
import ca.uhn.fhir.jpa.config.BaseConfig;
+import ca.uhn.fhir.jpa.config.BaseConfigDstu3Plus;
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl;
import ca.uhn.fhir.jpa.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
@@ -12,10 +13,10 @@ import ca.uhn.fhir.jpa.provider.GraphQLProvider;
import ca.uhn.fhir.jpa.searchparam.extractor.SearchParamExtractorR5;
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryR5;
-import ca.uhn.fhir.jpa.term.HapiTerminologySvcR5;
-import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
-import ca.uhn.fhir.jpa.term.IHapiTerminologySvcR5;
-import ca.uhn.fhir.jpa.term.TerminologyLoaderSvcImpl;
+import ca.uhn.fhir.jpa.term.*;
+import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
+import ca.uhn.fhir.jpa.term.api.ITermReadSvcR5;
+import ca.uhn.fhir.jpa.term.api.ITermVersionAdapterSvc;
import ca.uhn.fhir.jpa.util.ResourceCountCache;
import ca.uhn.fhir.jpa.validation.JpaValidationSupportChainR5;
import ca.uhn.fhir.validation.IValidatorModule;
@@ -55,13 +56,19 @@ import org.springframework.transaction.annotation.EnableTransactionManagement;
@Configuration
@EnableTransactionManagement
-public class BaseR5Config extends BaseConfig {
+public class BaseR5Config extends BaseConfigDstu3Plus {
@Override
public FhirContext fhirContext() {
return fhirContextR5();
}
+ @Bean
+ @Override
+ public ITermVersionAdapterSvc terminologyVersionAdapterSvc() {
+ return new TermVersionAdapterSvcR5();
+ }
+
@Bean
@Primary
public FhirContext fhirContextR5() {
@@ -154,13 +161,13 @@ public class BaseR5Config extends BaseConfig {
}
@Bean(autowire = Autowire.BY_TYPE)
- public IHapiTerminologyLoaderSvc terminologyLoaderService() {
- return new TerminologyLoaderSvcImpl();
+ public ITermLoaderSvc terminologyLoaderService() {
+ return new TermLoaderSvcImpl();
}
@Bean(autowire = Autowire.BY_TYPE)
- public IHapiTerminologySvcR5 terminologyService() {
- return new HapiTerminologySvcR5();
+ public ITermReadSvcR5 terminologyService() {
+ return new TermReadSvcR5();
}
@Primary
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
index 3753f7a8411..f602b4905c8 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
@@ -20,13 +20,13 @@ import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.search.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
-import ca.uhn.fhir.jpa.search.cache.ISearchResultCacheSvc;
import ca.uhn.fhir.jpa.searchparam.ResourceMetaParams;
import ca.uhn.fhir.jpa.searchparam.extractor.LogicalReferenceHelper;
import ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams;
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
-import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
+import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
+import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
import ca.uhn.fhir.jpa.util.AddRemoveCount;
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
import ca.uhn.fhir.model.api.IResource;
@@ -53,7 +53,6 @@ import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetai
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.util.CoverageIgnore;
import ca.uhn.fhir.util.MetaUtil;
-import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.XmlUtil;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
@@ -62,8 +61,6 @@ import com.google.common.hash.HashFunction;
import com.google.common.hash.Hashing;
import org.apache.commons.lang3.NotImplementedException;
import org.apache.commons.lang3.Validate;
-import org.hibernate.Session;
-import org.hibernate.internal.SessionImpl;
import org.hl7.fhir.instance.model.api.*;
import org.hl7.fhir.r4.model.Bundle.HTTPVerb;
import org.slf4j.Logger;
@@ -81,7 +78,6 @@ import javax.annotation.PostConstruct;
import javax.persistence.*;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
-import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import javax.xml.stream.events.Characters;
import javax.xml.stream.events.XMLEvent;
@@ -139,7 +135,7 @@ public abstract class BaseHapiFhirDao implements IDao,
@Autowired
protected ISearchParamRegistry mySerarchParamRegistry;
@Autowired
- protected IHapiTerminologySvc myTerminologySvc;
+ protected ITermReadSvc myTerminologySvc;
@Autowired
protected IResourceHistoryTableDao myResourceHistoryTableDao;
@Autowired
@@ -161,8 +157,6 @@ public abstract class BaseHapiFhirDao implements IDao,
@Autowired
private ISearchCacheSvc mySearchCacheSvc;
@Autowired
- private ISearchResultCacheSvc mySearchResultCacheSvc;
- @Autowired
private ISearchParamPresenceSvc mySearchParamPresenceSvc;
@Autowired
private DaoRegistry myDaoRegistry;
@@ -192,20 +186,18 @@ public abstract class BaseHapiFhirDao implements IDao,
* none was created, returns null.
*/
protected ForcedId createForcedIdIfNeeded(ResourceTable theEntity, IIdType theId, boolean theCreateForPureNumericIds) {
+ ForcedId retVal = null;
if (theId.isEmpty() == false && theId.hasIdPart() && theEntity.getForcedId() == null) {
- if (!theCreateForPureNumericIds && IdHelperService.isValidPid(theId)) {
- return null;
+ if (theCreateForPureNumericIds || !IdHelperService.isValidPid(theId)) {
+ retVal = new ForcedId();
+ retVal.setResourceType(theEntity.getResourceType());
+ retVal.setForcedId(theId.getIdPart());
+ retVal.setResource(theEntity);
+ theEntity.setForcedId(retVal);
}
-
- ForcedId fid = new ForcedId();
- fid.setResourceType(theEntity.getResourceType());
- fid.setForcedId(theId.getIdPart());
- fid.setResource(theEntity);
- theEntity.setForcedId(fid);
- return fid;
}
- return null;
+ return retVal;
}
private void extractTagsHapi(IResource theResource, ResourceTable theEntity, Set allDefs) {
@@ -285,39 +277,6 @@ public abstract class BaseHapiFhirDao implements IDao,
}
- private void findMatchingTagIds(RequestDetails theRequest, String theResourceName, IIdType theResourceId, Set tagIds, Class extends BaseTag> entityClass) {
- {
- CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
- CriteriaQuery cq = builder.createTupleQuery();
- Root extends BaseTag> from = cq.from(entityClass);
- cq.multiselect(from.get("myTagId").as(Long.class)).distinct(true);
-
- if (theResourceName != null) {
- Predicate typePredicate = builder.equal(from.get("myResourceType"), theResourceName);
- if (theResourceId != null) {
- cq.where(typePredicate, builder.equal(from.get("myResourceId"), myIdHelperService.translateForcedIdToPid(theResourceName, theResourceId.getIdPart(), theRequest)));
- } else {
- cq.where(typePredicate);
- }
- }
-
- TypedQuery query = myEntityManager.createQuery(cq);
- for (Tuple next : query.getResultList()) {
- tagIds.add(next.get(0, Long.class));
- }
- }
- }
-
- protected void flushJpaSession() {
- SessionImpl session = (SessionImpl) myEntityManager.unwrap(Session.class);
- int insertionCount = session.getActionQueue().numberOfInsertions();
- int updateCount = session.getActionQueue().numberOfUpdates();
-
- StopWatch sw = new StopWatch();
- myEntityManager.flush();
- ourLog.debug("Session flush took {}ms for {} inserts and {} updates", sw.getMillis(), insertionCount, updateCount);
- }
-
private Set getAllTagDefinitions(ResourceTable theEntity) {
HashSet retVal = Sets.newHashSet();
if (theEntity.isHasTags()) {
@@ -358,7 +317,6 @@ public abstract class BaseHapiFhirDao implements IDao,
}
}
- @SuppressWarnings("unchecked")
public IFhirResourceDao getDao(Class theType) {
return myDaoRegistry.getResourceDaoOrNull(theType);
}
@@ -446,6 +404,7 @@ public abstract class BaseHapiFhirDao implements IDao,
newVersion = Long.toString(newVersionLong);
}
+ assert theResourceId != null;
IIdType newId = theResourceId.withVersion(newVersion);
theResource.getIdElement().setValue(newId.getValue());
theSavedEntity.setVersion(newVersionLong);
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilder.java
index 6c174bc1dfe..363bdf005b0 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilder.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilder.java
@@ -39,7 +39,7 @@ import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.ResourceMetaParams;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
-import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
+import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
import ca.uhn.fhir.jpa.term.VersionIndependentConcept;
import ca.uhn.fhir.jpa.util.*;
import ca.uhn.fhir.model.api.*;
@@ -64,7 +64,6 @@ import ca.uhn.fhir.util.UrlUtil;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
-import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
@@ -135,7 +134,7 @@ public class SearchBuilder implements ISearchBuilder {
@Autowired
private ISearchParamRegistry mySearchParamRegistry;
@Autowired
- private IHapiTerminologySvc myTerminologySvc;
+ private ITermReadSvc myTerminologySvc;
@Autowired
private MatchUrlService myMatchUrlService;
private List myAlsoIncludePids;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoCodeSystemDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoCodeSystemDstu3.java
index 6759858c732..21a4e28a615 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoCodeSystemDstu3.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoCodeSystemDstu3.java
@@ -26,6 +26,7 @@ import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
+import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
import ca.uhn.fhir.jpa.util.LogicUtil;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.TokenParam;
@@ -41,6 +42,7 @@ import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.springframework.beans.factory.annotation.Autowired;
+import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
@@ -54,21 +56,22 @@ public class FhirResourceDaoCodeSystemDstu3 extends FhirResourceDaoDstu3 findCodeSystemIdsContainingSystemAndCode(String theCode, String theSystem, RequestDetails theRequest) {
- List valueSetIds;
Set ids = searchForIds(new SearchParameterMap(CodeSystem.SP_CODE, new TokenParam(theSystem, theCode)), theRequest );
- valueSetIds = new ArrayList<>();
+ List valueSetIds = new ArrayList<>();
for (Long next : ids) {
valueSetIds.add(new IdType("CodeSystem", next));
}
return valueSetIds;
}
+ @Nonnull
@Override
public IContextValidationSupport.LookupCodeResult lookupCode(IPrimitiveType theCode, IPrimitiveType theSystem, Coding theCoding, RequestDetails theRequestDetails) {
boolean haveCoding = theCoding != null && isNotBlank(theCoding.getSystem()) && isNotBlank(theCoding.getCode());
@@ -119,7 +122,7 @@ public class FhirResourceDaoCodeSystemDstu3 extends FhirResourceDaoDstu3 implements IFhirResourceDaoConceptMap {
@Autowired
- private IHapiTerminologySvc myHapiTerminologySvc;
+ private ITermReadSvc myHapiTerminologySvc;
@Override
public TranslationResult translate(TranslationRequest theTranslationRequest, RequestDetails theRequestDetails) {
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java
index 891c243e76a..2a090d57a5b 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java
@@ -24,7 +24,7 @@ import ca.uhn.fhir.context.support.IContextValidationSupport;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
-import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
+import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
import ca.uhn.fhir.jpa.util.LogicUtil;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
@@ -62,7 +62,7 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3
private static final Logger ourLog = LoggerFactory.getLogger(FhirResourceDaoValueSetDstu3.class);
@Autowired
- private IHapiTerminologySvc myHapiTerminologySvc;
+ private ITermReadSvc myHapiTerminologySvc;
@Autowired
private DefaultProfileValidationSupport myDefaultProfileValidationSupport;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoCodeSystemR4.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoCodeSystemR4.java
index bc9768ca162..46d38a2fd06 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoCodeSystemR4.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoCodeSystemR4.java
@@ -26,6 +26,7 @@ import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
+import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
import ca.uhn.fhir.jpa.util.LogicUtil;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.TokenParam;
@@ -56,6 +57,8 @@ public class FhirResourceDaoCodeSystemR4 extends FhirResourceDaoR4 i
private ITermCodeSystemDao myCsDao;
@Autowired
private ValidationSupportChain myValidationSupport;
+ @Autowired
+ protected ITermCodeSystemStorageSvc myTerminologyCodeSystemStorageSvc;
@Override
public List findCodeSystemIdsContainingSystemAndCode(String theCode, String theSystem, RequestDetails theRequest) {
@@ -122,7 +125,7 @@ public class FhirResourceDaoCodeSystemR4 extends FhirResourceDaoR4 i
if (isNotBlank(codeSystemUrl)) {
TermCodeSystem persCs = myCsDao.findByCodeSystemUri(codeSystemUrl);
if (persCs != null) {
- myTerminologySvc.deleteCodeSystem(persCs);
+ myTerminologyCodeSystemStorageSvc.deleteCodeSystem(persCs);
}
}
}
@@ -135,7 +138,7 @@ public class FhirResourceDaoCodeSystemR4 extends FhirResourceDaoR4 i
CodeSystem cs = (CodeSystem) theResource;
addPidToResource(theEntity, theResource);
- myTerminologySvc.storeNewCodeSystemVersionIfNeeded(cs, theEntity);
+ myTerminologyCodeSystemStorageSvc.storeNewCodeSystemVersionIfNeeded(cs, theEntity);
return retVal;
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoConceptMapR4.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoConceptMapR4.java
index b553e42baa6..d715b273018 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoConceptMapR4.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoConceptMapR4.java
@@ -24,7 +24,7 @@ import ca.uhn.fhir.jpa.dao.IFhirResourceDaoConceptMap;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElement;
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElementTarget;
-import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
+import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
import ca.uhn.fhir.jpa.term.TranslationMatch;
import ca.uhn.fhir.jpa.term.TranslationRequest;
import ca.uhn.fhir.jpa.term.TranslationResult;
@@ -38,11 +38,9 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
-import static org.apache.commons.lang3.StringUtils.isNotBlank;
-
public class FhirResourceDaoConceptMapR4 extends FhirResourceDaoR4 implements IFhirResourceDaoConceptMap {
@Autowired
- private IHapiTerminologySvc myHapiTerminologySvc;
+ private ITermReadSvc myHapiTerminologySvc;
@Override
public TranslationResult translate(TranslationRequest theTranslationRequest, RequestDetails theRequestDetails) {
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java
index 60f83003197..d94509e6bb7 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java
@@ -24,7 +24,7 @@ import ca.uhn.fhir.context.support.IContextValidationSupport;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
-import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
+import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
import ca.uhn.fhir.jpa.util.LogicUtil;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
@@ -56,7 +56,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4 implements IFhirResourceDaoValueSet {
@Autowired
- private IHapiTerminologySvc myHapiTerminologySvc;
+ private ITermReadSvc myHapiTerminologySvc;
@Autowired
private DefaultProfileValidationSupport myDefaultProfileValidationSupport;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoCodeSystemR5.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoCodeSystemR5.java
index 515355bc044..fbdea21197e 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoCodeSystemR5.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoCodeSystemR5.java
@@ -26,6 +26,7 @@ import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
+import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
import ca.uhn.fhir.jpa.util.LogicUtil;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.TokenParam;
@@ -56,6 +57,8 @@ public class FhirResourceDaoCodeSystemR5 extends FhirResourceDaoR5 i
private ITermCodeSystemDao myCsDao;
@Autowired
private ValidationSupportChain myValidationSupport;
+ @Autowired
+ protected ITermCodeSystemStorageSvc myTerminologyCodeSystemStorageSvc;
@Override
public List findCodeSystemIdsContainingSystemAndCode(String theCode, String theSystem, RequestDetails theRequest) {
@@ -122,7 +125,7 @@ public class FhirResourceDaoCodeSystemR5 extends FhirResourceDaoR5 i
if (isNotBlank(codeSystemUrl)) {
TermCodeSystem persCs = myCsDao.findByCodeSystemUri(codeSystemUrl);
if (persCs != null) {
- myTerminologySvc.deleteCodeSystem(persCs);
+ myTerminologyCodeSystemStorageSvc.deleteCodeSystem(persCs);
}
}
}
@@ -135,7 +138,7 @@ public class FhirResourceDaoCodeSystemR5 extends FhirResourceDaoR5 i
CodeSystem cs = (CodeSystem) theResource;
addPidToResource(theEntity, theResource);
- myTerminologySvc.storeNewCodeSystemVersionIfNeeded(org.hl7.fhir.convertors.conv40_50.CodeSystem.convertCodeSystem(cs), theEntity);
+ myTerminologyCodeSystemStorageSvc.storeNewCodeSystemVersionIfNeeded(org.hl7.fhir.convertors.conv40_50.CodeSystem.convertCodeSystem(cs), theEntity);
return retVal;
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoConceptMapR5.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoConceptMapR5.java
index dab3f9c764d..ea7b2525463 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoConceptMapR5.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoConceptMapR5.java
@@ -24,7 +24,7 @@ import ca.uhn.fhir.jpa.dao.IFhirResourceDaoConceptMap;
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElement;
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElementTarget;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
-import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
+import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
import ca.uhn.fhir.jpa.term.TranslationMatch;
import ca.uhn.fhir.jpa.term.TranslationRequest;
import ca.uhn.fhir.jpa.term.TranslationResult;
@@ -41,7 +41,7 @@ import java.util.Set;
public class FhirResourceDaoConceptMapR5 extends FhirResourceDaoR5 implements IFhirResourceDaoConceptMap {
@Autowired
- private IHapiTerminologySvc myHapiTerminologySvc;
+ private ITermReadSvc myHapiTerminologySvc;
@Override
public TranslationResult translate(TranslationRequest theTranslationRequest, RequestDetails theRequestDetails) {
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java
index a5bb4fcbf0f..ce1cbdcc515 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java
@@ -24,7 +24,7 @@ import ca.uhn.fhir.context.support.IContextValidationSupport;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
-import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
+import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
import ca.uhn.fhir.jpa.util.LogicUtil;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
@@ -56,7 +56,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
public class FhirResourceDaoValueSetR5 extends FhirResourceDaoR5 implements IFhirResourceDaoValueSet {
@Autowired
- private IHapiTerminologySvc myHapiTerminologySvc;
+ private ITermReadSvc myHapiTerminologySvc;
@Autowired
private DefaultProfileValidationSupport myDefaultProfileValidationSupport;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermCodeSystem.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermCodeSystem.java
index 2c9ec0bd4ce..a15ca22fd5c 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermCodeSystem.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermCodeSystem.java
@@ -22,6 +22,8 @@ package ca.uhn.fhir.jpa.entity;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.util.ValidateUtil;
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
@@ -39,17 +41,17 @@ import static org.apache.commons.lang3.StringUtils.length;
@Entity()
//@formatter:on
public class TermCodeSystem implements Serializable {
- private static final long serialVersionUID = 1L;
-
- private static final int MAX_NAME_LENGTH = 200;
public static final int MAX_URL_LENGTH = 200;
-
+ private static final long serialVersionUID = 1L;
+ private static final int MAX_NAME_LENGTH = 200;
@Column(name = "CODE_SYSTEM_URI", nullable = false, length = MAX_URL_LENGTH)
private String myCodeSystemUri;
@OneToOne()
@JoinColumn(name = "CURRENT_VERSION_PID", referencedColumnName = "PID", nullable = true, foreignKey = @ForeignKey(name = "FK_TRMCODESYSTEM_CURVER"))
private TermCodeSystemVersion myCurrentVersion;
+ @Column(name = "CURRENT_VERSION_PID", nullable = true, insertable = false, updatable = false)
+ private Long myCurrentVersionPid;
@Id()
@SequenceGenerator(name = "SEQ_CODESYSTEM_PID", sequenceName = "SEQ_CODESYSTEM_PID")
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_CODESYSTEM_PID")
@@ -70,12 +72,32 @@ public class TermCodeSystem implements Serializable {
super();
}
- public String getCodeSystemUri() {
- return myCodeSystemUri;
+ @Override
+ public boolean equals(Object theO) {
+ if (this == theO) {
+ return true;
+ }
+
+ if (theO == null || getClass() != theO.getClass()) {
+ return false;
+ }
+
+ TermCodeSystem that = (TermCodeSystem) theO;
+
+ EqualsBuilder b = new EqualsBuilder();
+ b.append(myCodeSystemUri, that.myCodeSystemUri);
+ return b.isEquals();
}
- public String getName() {
- return myName;
+ @Override
+ public int hashCode() {
+ HashCodeBuilder b = new HashCodeBuilder(17, 37);
+ b.append(myCodeSystemUri);
+ return b.toHashCode();
+ }
+
+ public String getCodeSystemUri() {
+ return myCodeSystemUri;
}
public TermCodeSystem setCodeSystemUri(@Nonnull String theCodeSystemUri) {
@@ -86,6 +108,15 @@ public class TermCodeSystem implements Serializable {
return this;
}
+ public String getName() {
+ return myName;
+ }
+
+ public TermCodeSystem setName(String theName) {
+ myName = left(theName, MAX_NAME_LENGTH);
+ return this;
+ }
+
public TermCodeSystemVersion getCurrentVersion() {
return myCurrentVersion;
}
@@ -103,11 +134,6 @@ public class TermCodeSystem implements Serializable {
return myResource;
}
- public TermCodeSystem setName(String theName) {
- myName = left(theName, MAX_NAME_LENGTH);
- return this;
- }
-
public TermCodeSystem setResource(ResourceTable theResource) {
myResource = theResource;
return this;
@@ -115,12 +141,13 @@ public class TermCodeSystem implements Serializable {
@Override
public String toString() {
- return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
- .append("codeSystemUri", myCodeSystemUri)
- .append("currentVersion", myCurrentVersion)
- .append("pid", myPid)
- .append("resourcePid", myResourcePid)
- .append("name", myName)
+ ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE);
+ b.append("pid", myPid);
+ b.append("codeSystemUri", myCodeSystemUri);
+ b.append("currentVersionPid", myCurrentVersionPid);
+ b.append("resourcePid", myResourcePid);
+ b.append("name", myName);
+ return b
.toString();
}
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermCodeSystemVersion.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermCodeSystemVersion.java
index 25a74c21dc1..a0de29a76a8 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermCodeSystemVersion.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermCodeSystemVersion.java
@@ -21,8 +21,11 @@ package ca.uhn.fhir.jpa.entity;
*/
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
-import ca.uhn.fhir.util.CoverageIgnore;
import ca.uhn.fhir.util.ValidateUtil;
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+import org.apache.commons.lang3.builder.ToStringBuilder;
+import org.apache.commons.lang3.builder.ToStringStyle;
import javax.persistence.*;
import java.io.Serializable;
@@ -36,10 +39,8 @@ import static org.apache.commons.lang3.StringUtils.length;
)
@Entity()
public class TermCodeSystemVersion implements Serializable {
- private static final long serialVersionUID = 1L;
-
public static final int MAX_VERSION_LENGTH = 200;
-
+ private static final long serialVersionUID = 1L;
@OneToMany(fetch = FetchType.LAZY, mappedBy = "myCodeSystem")
private Collection myConcepts;
@@ -84,34 +85,6 @@ public class TermCodeSystemVersion implements Serializable {
super();
}
- @CoverageIgnore
- @Override
- public boolean equals(Object obj) {
- if (this == obj) {
- return true;
- }
- if (obj == null) {
- return false;
- }
- if (!(obj instanceof TermCodeSystemVersion)) {
- return false;
- }
- TermCodeSystemVersion other = (TermCodeSystemVersion) obj;
- if ((myResource.getId() == null) != (other.myResource.getId() == null)) {
- return false;
- } else if (!myResource.getId().equals(other.myResource.getId())) {
- return false;
- }
-
- if (myCodeSystemVersionId == null) {
- if (other.myCodeSystemVersionId != null) {
- return false;
- }
- } else if (!myCodeSystemVersionId.equals(other.myCodeSystemVersionId)) {
- return false;
- }
- return true;
- }
public TermCodeSystem getCodeSystem() {
return myCodeSystem;
@@ -154,13 +127,30 @@ public class TermCodeSystemVersion implements Serializable {
return this;
}
+ @Override
+ public boolean equals(Object theO) {
+ if (this == theO) {
+ return true;
+ }
+
+ if (theO == null || getClass() != theO.getClass()) {
+ return false;
+ }
+
+ TermCodeSystemVersion that = (TermCodeSystemVersion) theO;
+
+ return new EqualsBuilder()
+ .append(myCodeSystemVersionId, that.myCodeSystemVersionId)
+ .append(myCodeSystemPid, that.myCodeSystemPid)
+ .isEquals();
+ }
+
@Override
public int hashCode() {
- final int prime = 31;
- int result = 1;
- result = prime * result + ((myResource.getId() == null) ? 0 : myResource.getId().hashCode());
- result = prime * result + ((myCodeSystemVersionId == null) ? 0 : myCodeSystemVersionId.hashCode());
- return result;
+ HashCodeBuilder b = new HashCodeBuilder(17, 37);
+ b.append(myCodeSystemVersionId);
+ b.append(myCodeSystemPid);
+ return b.toHashCode();
}
public String getCodeSystemDisplayName() {
@@ -180,4 +170,19 @@ public class TermCodeSystemVersion implements Serializable {
getConcepts().add(concept);
return concept;
}
+
+ @Override
+ public String toString() {
+ ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE);
+ b.append("pid", myId);
+ b.append("codeSystemResourcePid", myResourcePid);
+ b.append("codeSystemPid", myCodeSystemPid);
+ b.append("codeSystemVersionId", myCodeSystemVersionId);
+ return b.toString();
+ }
+
+ TermCodeSystemVersion setCodeSystemPidForUnitTest(long theCodeSystemPid) {
+ myCodeSystemPid = theCodeSystemPid;
+ return this;
+ }
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java
index 1ee569d8867..fae709aa307 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java
@@ -37,6 +37,7 @@ import javax.persistence.Index;
import javax.persistence.*;
import java.io.Serializable;
import java.util.*;
+import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.left;
import static org.apache.commons.lang3.StringUtils.length;
@@ -108,6 +109,13 @@ public class TermConcept implements Serializable {
setCode(theCode);
}
+ public TermConcept addChild(RelationshipTypeEnum theRelationshipType) {
+ TermConcept child = new TermConcept();
+ child.setCodeSystemVersion(myCodeSystem);
+ addChild(child, theRelationshipType);
+ return child;
+ }
+
public TermConceptParentChildLink addChild(TermConcept theChild, RelationshipTypeEnum theRelationshipType) {
Validate.notNull(theRelationshipType, "theRelationshipType must not be null");
TermConceptParentChildLink link = new TermConceptParentChildLink();
@@ -200,7 +208,7 @@ public class TermConcept implements Serializable {
public TermConcept setCodeSystemVersion(TermCodeSystemVersion theCodeSystemVersion) {
myCodeSystem = theCodeSystemVersion;
- if (theCodeSystemVersion.getPid() != null) {
+ if (theCodeSystemVersion != null && theCodeSystemVersion.getPid() != null) {
myCodeSystemVersionPid = theCodeSystemVersion.getPid();
}
return this;
@@ -365,10 +373,13 @@ public class TermConcept implements Serializable {
@Override
public String toString() {
- return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
- .append("code", myCode)
- .append("display", myDisplay)
- .build();
+ ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE);
+ b.append("code", myCode);
+ b.append("display", myDisplay);
+ if (mySequence != null) {
+ b.append("sequence", mySequence);
+ }
+ return b.build();
}
public List toValidationProperties() {
@@ -387,4 +398,13 @@ public class TermConcept implements Serializable {
}
return retVal;
}
+
+ /**
+ * Returns a view of {@link #getChildren()} but containing the actual child codes
+ */
+ public List getChildCodes() {
+ return getChildren().stream().map(t -> t.getChild()).collect(Collectors.toList());
+ }
+
+
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptParentChildLink.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptParentChildLink.java
index c7d38eed131..87ebf0c05b8 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptParentChildLink.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptParentChildLink.java
@@ -20,11 +20,16 @@ package ca.uhn.fhir.jpa.entity;
* #L%
*/
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Fields;
+
import javax.persistence.*;
import java.io.Serializable;
@Entity
-@Table(name = "TRM_CONCEPT_PC_LINK")
+@Table(name = "TRM_CONCEPT_PC_LINK", indexes = {
+ @Index(name = "IDX_TRMCONCPCLNK_CSV", columnList = "CODESYSTEM_PID")
+})
public class TermConceptParentChildLink implements Serializable {
private static final long serialVersionUID = 1L;
@@ -39,6 +44,10 @@ public class TermConceptParentChildLink implements Serializable {
@JoinColumn(name = "CODESYSTEM_PID", nullable = false, foreignKey = @ForeignKey(name = "FK_TERM_CONCEPTPC_CS"))
private TermCodeSystemVersion myCodeSystem;
+ @Column(name = "CODESYSTEM_PID", insertable = false, updatable = false, nullable = false)
+ @Fields({@Field(name = "myCodeSystemVersionPid")})
+ private long myCodeSystemVersionPid;
+
@ManyToOne(cascade = {})
@JoinColumn(name = "PARENT_PID", nullable = false, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_TERM_CONCEPTPC_PARENT"))
private TermConcept myParent;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/TerminologyUploaderProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/TerminologyUploaderProvider.java
index ddb78d7c6b9..8665fe744d8 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/TerminologyUploaderProvider.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/TerminologyUploaderProvider.java
@@ -22,9 +22,8 @@ package ca.uhn.fhir.jpa.provider;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
-import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
-import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc.UploadStatistics;
-import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
+import ca.uhn.fhir.jpa.term.UploadStatistics;
+import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
import ca.uhn.fhir.rest.annotation.Operation;
import ca.uhn.fhir.rest.annotation.OperationParam;
import ca.uhn.fhir.rest.api.server.RequestDetails;
@@ -34,140 +33,46 @@ import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.util.AttachmentUtil;
import ca.uhn.fhir.util.ParametersUtil;
import ca.uhn.fhir.util.ValidateUtil;
-import org.hl7.fhir.convertors.VersionConvertor_30_40;
import org.hl7.fhir.instance.model.api.IBaseParameters;
-import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.ICompositeType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
-import org.hl7.fhir.r4.model.CodeSystem;
import org.springframework.beans.factory.annotation.Autowired;
+import javax.annotation.Nonnull;
import javax.servlet.http.HttpServletRequest;
import java.io.*;
import java.util.ArrayList;
import java.util.List;
-import java.util.concurrent.atomic.AtomicInteger;
import static org.apache.commons.lang3.StringUtils.*;
public class TerminologyUploaderProvider extends BaseJpaProvider {
- public static final String CONCEPT_COUNT = "conceptCount";
- public static final String TARGET = "target";
- public static final String PARENT_CODE = "parentCode";
- public static final String VALUE = "value";
+ public static final String PARAM_FILE = "file";
+ public static final String PARAM_SYSTEM = "system";
+ private static final String RESP_PARAM_CONCEPT_COUNT = "conceptCount";
+ private static final String RESP_PARAM_TARGET = "target";
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyUploaderProvider.class);
- private static final String PACKAGE = "package";
+ private static final String RESP_PARAM_SUCCESS = "success";
@Autowired
private FhirContext myCtx;
@Autowired
- private IHapiTerminologyLoaderSvc myTerminologyLoaderSvc;
- @Autowired
- private IHapiTerminologySvc myTerminologySvc;
+ private ITermLoaderSvc myTerminologyLoaderSvc;
/**
* Constructor
*/
public TerminologyUploaderProvider() {
- this(null, null, null);
+ this(null, null);
}
/**
* Constructor
*/
- public TerminologyUploaderProvider(FhirContext theContext, IHapiTerminologyLoaderSvc theTerminologyLoaderSvc, IHapiTerminologySvc theTerminologySvc) {
+ public TerminologyUploaderProvider(FhirContext theContext, ITermLoaderSvc theTerminologyLoaderSvc) {
myCtx = theContext;
myTerminologyLoaderSvc = theTerminologyLoaderSvc;
- myTerminologySvc = theTerminologySvc;
- }
-
-
- /**
- *
- * $apply-codesystem-delta-add
- *
- */
- @Operation(typeName = "CodeSystem", name = JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD, idempotent = false, returnParameters = {
- })
- public IBaseParameters applyCodeSystemDeltaAdd(
- HttpServletRequest theServletRequest,
- @OperationParam(name = PARENT_CODE, min = 0, max = 1) IPrimitiveType theParentCode,
- @OperationParam(name = VALUE, min = 0, max = 1) IBaseResource theValue,
- RequestDetails theRequestDetails
- ) {
-
- startRequest(theServletRequest);
- try {
-
- CodeSystem value;
- if (theValue instanceof CodeSystem) {
- value = (CodeSystem) theValue;
- } else if (theValue instanceof org.hl7.fhir.dstu3.model.CodeSystem) {
- value = VersionConvertor_30_40.convertCodeSystem((org.hl7.fhir.dstu3.model.CodeSystem) theValue);
- } else if (theValue instanceof org.hl7.fhir.r5.model.CodeSystem) {
- value = org.hl7.fhir.convertors.conv40_50.CodeSystem.convertCodeSystem((org.hl7.fhir.r5.model.CodeSystem) theValue);
- } else {
- throw new InvalidRequestException("Value must be present and be a CodeSystem");
- }
-
- String system = value.getUrl();
- String parentCode = theParentCode != null ? theParentCode.getValue() : null;
-
- AtomicInteger counter = myTerminologySvc.applyDeltaCodesystemsAdd(system, parentCode, value);
-
- IBaseParameters retVal = ParametersUtil.newInstance(myCtx);
- ParametersUtil.addParameterToParametersBoolean(myCtx, retVal, "success", true);
- ParametersUtil.addParameterToParametersInteger(myCtx, retVal, "addedConcepts", counter.get());
- return retVal;
-
- } finally {
- endRequest(theServletRequest);
- }
-
- }
-
-
- /**
- *
- * $apply-codesystem-delta-remove
- *
- */
- @Operation(typeName = "CodeSystem", name = JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_REMOVE, idempotent = false, returnParameters = {
- })
- public IBaseParameters applyCodeSystemDeltaRemove(
- HttpServletRequest theServletRequest,
- @OperationParam(name = VALUE, min = 1, max = 1) IBaseResource theValue,
- RequestDetails theRequestDetails
- ) {
-
- startRequest(theServletRequest);
- try {
-
- CodeSystem value;
- if (theValue instanceof CodeSystem) {
- value = (CodeSystem) theValue;
- } else if (theValue instanceof org.hl7.fhir.dstu3.model.CodeSystem) {
- value = VersionConvertor_30_40.convertCodeSystem((org.hl7.fhir.dstu3.model.CodeSystem) theValue);
- } else if (theValue instanceof org.hl7.fhir.r5.model.CodeSystem) {
- value = org.hl7.fhir.convertors.conv40_50.CodeSystem.convertCodeSystem((org.hl7.fhir.r5.model.CodeSystem) theValue);
- } else {
- throw new InvalidRequestException("Value must be present and be a CodeSystem");
- }
-
- String system = value.getUrl();
-
- AtomicInteger counter = myTerminologySvc.applyDeltaCodesystemsRemove(system, value);
-
- IBaseParameters retVal = ParametersUtil.newInstance(myCtx);
- ParametersUtil.addParameterToParametersBoolean(myCtx, retVal, "success", true);
- ParametersUtil.addParameterToParametersInteger(myCtx, retVal, "removedConcepts", counter.get());
- return retVal;
-
- } finally {
- endRequest(theServletRequest);
- }
-
}
/**
@@ -178,28 +83,31 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
@Operation(typeName = "CodeSystem", name = JpaConstants.OPERATION_UPLOAD_EXTERNAL_CODE_SYSTEM, idempotent = false, returnParameters = {
// @OperationParam(name = "conceptCount", type = IntegerType.class, min = 1)
})
- public IBaseParameters uploadExternalCodeSystem(
+ public IBaseParameters uploadSnapshot(
HttpServletRequest theServletRequest,
- @OperationParam(name = "url", min = 1, typeName = "uri") IPrimitiveType theCodeSystemUrl,
- @OperationParam(name = "contentMode", min = 0, typeName = "code") IPrimitiveType theContentMode,
+ @OperationParam(name = PARAM_SYSTEM, min = 1, typeName = "uri") IPrimitiveType theCodeSystemUrl,
@OperationParam(name = "localfile", min = 1, max = OperationParam.MAX_UNLIMITED, typeName = "string") List> theLocalFile,
- @OperationParam(name = PACKAGE, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "attachment") List thePackage,
+ @OperationParam(name = PARAM_FILE, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "attachment") List theFiles,
RequestDetails theRequestDetails
) {
startRequest(theServletRequest);
+ if (theCodeSystemUrl == null || isBlank(theCodeSystemUrl.getValueAsString())) {
+ throw new InvalidRequestException("Missing mandatory parameter: " + PARAM_SYSTEM);
+ }
+
if (theLocalFile == null || theLocalFile.size() == 0) {
- if (thePackage == null || thePackage.size() == 0) {
+ if (theFiles == null || theFiles.size() == 0) {
throw new InvalidRequestException("No 'localfile' or 'package' parameter, or package had no data");
}
- for (ICompositeType next : thePackage) {
+ for (ICompositeType next : theFiles) {
ValidateUtil.isTrueOrThrowInvalidRequest(myCtx.getElementDefinition(next.getClass()).getName().equals("Attachment"), "Package must be of type Attachment");
}
}
try {
- List localFiles = new ArrayList<>();
+ List localFiles = new ArrayList<>();
if (theLocalFile != null && theLocalFile.size() > 0) {
for (IPrimitiveType nextLocalFile : theLocalFile) {
if (isNotBlank(nextLocalFile.getValue())) {
@@ -208,7 +116,7 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
if (!nextFile.exists() || !nextFile.isFile()) {
throw new InvalidRequestException("Unknown file: " + nextFile.getName());
}
- localFiles.add(new IHapiTerminologyLoaderSvc.FileDescriptor() {
+ localFiles.add(new ITermLoaderSvc.FileDescriptor() {
@Override
public String getFilename() {
return nextFile.getAbsolutePath();
@@ -227,15 +135,15 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
}
}
- if (thePackage != null) {
- for (ICompositeType nextPackage : thePackage) {
+ if (theFiles != null) {
+ for (ICompositeType nextPackage : theFiles) {
final String url = AttachmentUtil.getOrCreateUrl(myCtx, nextPackage).getValueAsString();
if (isBlank(url)) {
throw new UnprocessableEntityException("Package is missing mandatory url element");
}
- localFiles.add(new IHapiTerminologyLoaderSvc.FileDescriptor() {
+ localFiles.add(new ITermLoaderSvc.FileDescriptor() {
@Override
public String getFilename() {
return url;
@@ -250,33 +158,29 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
}
}
- String codeSystemUrl = theCodeSystemUrl != null ? theCodeSystemUrl.getValue() : null;
- codeSystemUrl = defaultString(codeSystemUrl);
+ String codeSystemUrl = theCodeSystemUrl.getValue();
+ codeSystemUrl = trim(codeSystemUrl);
- String contentMode = theContentMode != null ? theContentMode.getValue() : null;
UploadStatistics stats;
- if ("custom".equals(contentMode)) {
- stats = myTerminologyLoaderSvc.loadCustom(codeSystemUrl, localFiles, theRequestDetails);
- } else {
- switch (codeSystemUrl) {
- case IHapiTerminologyLoaderSvc.SCT_URI:
- stats = myTerminologyLoaderSvc.loadSnomedCt(localFiles, theRequestDetails);
- break;
- case IHapiTerminologyLoaderSvc.LOINC_URI:
- stats = myTerminologyLoaderSvc.loadLoinc(localFiles, theRequestDetails);
- break;
- case IHapiTerminologyLoaderSvc.IMGTHLA_URI:
- stats = myTerminologyLoaderSvc.loadImgthla(localFiles, theRequestDetails);
- break;
- default:
- throw new InvalidRequestException("Unknown URL: " + codeSystemUrl);
- }
+ switch (codeSystemUrl) {
+ case ITermLoaderSvc.SCT_URI:
+ stats = myTerminologyLoaderSvc.loadSnomedCt(localFiles, theRequestDetails);
+ break;
+ case ITermLoaderSvc.LOINC_URI:
+ stats = myTerminologyLoaderSvc.loadLoinc(localFiles, theRequestDetails);
+ break;
+ case ITermLoaderSvc.IMGTHLA_URI:
+ stats = myTerminologyLoaderSvc.loadImgthla(localFiles, theRequestDetails);
+ break;
+ default:
+ stats = myTerminologyLoaderSvc.loadCustom(codeSystemUrl, localFiles, theRequestDetails);
+ break;
}
IBaseParameters retVal = ParametersUtil.newInstance(myCtx);
- ParametersUtil.addParameterToParametersBoolean(myCtx, retVal, "success", true);
- ParametersUtil.addParameterToParametersInteger(myCtx, retVal, CONCEPT_COUNT, stats.getConceptCount());
- ParametersUtil.addParameterToParametersReference(myCtx, retVal, TARGET, stats.getTarget().getValue());
+ ParametersUtil.addParameterToParametersBoolean(myCtx, retVal, RESP_PARAM_SUCCESS, true);
+ ParametersUtil.addParameterToParametersInteger(myCtx, retVal, RESP_PARAM_CONCEPT_COUNT, stats.getUpdatedConceptCount());
+ ParametersUtil.addParameterToParametersReference(myCtx, retVal, RESP_PARAM_TARGET, stats.getTarget().getValue());
return retVal;
} finally {
@@ -284,5 +188,100 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
}
}
+ /**
+ *
+ * $apply-codesystem-delta-add
+ *
+ */
+ @Operation(typeName = "CodeSystem", name = JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD, idempotent = false, returnParameters = {
+ })
+ public IBaseParameters uploadDeltaAdd(
+ HttpServletRequest theServletRequest,
+ @OperationParam(name = PARAM_SYSTEM, min = 1, max = 1, typeName = "uri") IPrimitiveType theSystem,
+ @OperationParam(name = PARAM_FILE, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "attachment") List theFiles,
+ RequestDetails theRequestDetails
+ ) {
+
+ startRequest(theServletRequest);
+ try {
+ validateHaveSystem(theSystem);
+ validateHaveFiles(theFiles);
+
+ List files = convertAttachmentsToFileDescriptors(theFiles);
+ UploadStatistics outcome = myTerminologyLoaderSvc.loadDeltaAdd(theSystem.getValue(), files, theRequestDetails);
+ return toDeltaResponse(outcome);
+ } finally {
+ endRequest(theServletRequest);
+ }
+
+ }
+
+
+ /**
+ *
+ * $apply-codesystem-delta-remove
+ *
+ */
+ @Operation(typeName = "CodeSystem", name = JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_REMOVE, idempotent = false, returnParameters = {
+ })
+ public IBaseParameters uploadDeltaRemove(
+ HttpServletRequest theServletRequest,
+ @OperationParam(name = PARAM_SYSTEM, min = 1, max = 1, typeName = "uri") IPrimitiveType theSystem,
+ @OperationParam(name = PARAM_FILE, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "attachment") List theFiles,
+ RequestDetails theRequestDetails
+ ) {
+
+ startRequest(theServletRequest);
+ try {
+ validateHaveSystem(theSystem);
+ validateHaveFiles(theFiles);
+
+ List files = convertAttachmentsToFileDescriptors(theFiles);
+ UploadStatistics outcome = myTerminologyLoaderSvc.loadDeltaRemove(theSystem.getValue(), files, theRequestDetails);
+ return toDeltaResponse(outcome);
+ } finally {
+ endRequest(theServletRequest);
+ }
+
+ }
+
+ private void validateHaveSystem(IPrimitiveType theSystem) {
+ if (theSystem == null || isBlank(theSystem.getValueAsString())) {
+ throw new InvalidRequestException("Missing mandatory parameter: " + PARAM_SYSTEM);
+ }
+ }
+
+ private void validateHaveFiles(List theFiles) {
+ if (theFiles != null) {
+ for (ICompositeType nextFile : theFiles) {
+ if (!nextFile.isEmpty()) {
+ return;
+ }
+ }
+ }
+ throw new InvalidRequestException("Missing mandatory parameter: " + PARAM_FILE);
+ }
+
+ @Nonnull
+ private List convertAttachmentsToFileDescriptors(@OperationParam(name = PARAM_FILE, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "attachment") List theFiles) {
+ List files = new ArrayList<>();
+ for (ICompositeType next : theFiles) {
+ byte[] nextData = AttachmentUtil.getOrCreateData(myCtx, next).getValue();
+ String nextUrl = AttachmentUtil.getOrCreateUrl(myCtx, next).getValue();
+ ValidateUtil.isTrueOrThrowInvalidRequest(nextData != null && nextData.length > 0, "Missing Attachment.data value");
+ ValidateUtil.isNotBlankOrThrowUnprocessableEntity(nextUrl, "Missing Attachment.url value");
+
+ files.add(new ITermLoaderSvc.ByteArrayFileDescriptor(nextUrl, nextData));
+ }
+ return files;
+ }
+
+ private IBaseParameters toDeltaResponse(UploadStatistics theOutcome) {
+ IBaseParameters retVal = ParametersUtil.newInstance(myCtx);
+ ParametersUtil.addParameterToParametersInteger(myCtx, retVal, RESP_PARAM_CONCEPT_COUNT, theOutcome.getUpdatedConceptCount());
+ ParametersUtil.addParameterToParametersReference(myCtx, retVal, RESP_PARAM_TARGET, theOutcome.getTarget().getValue());
+ return retVal;
+ }
+
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java
index d7869c6118a..9f048eca18f 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java
@@ -53,6 +53,7 @@ import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import ca.uhn.fhir.rest.server.method.PageMethodBinding;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.util.ICachedSearchDetails;
+import ca.uhn.fhir.util.AsyncUtil;
import ca.uhn.fhir.util.StopWatch;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang3.Validate;
@@ -76,6 +77,7 @@ import org.springframework.transaction.annotation.Transactional;
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
import org.springframework.transaction.support.TransactionTemplate;
+import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.annotation.PostConstruct;
import javax.persistence.EntityManager;
@@ -154,14 +156,16 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
public void cancelAllActiveSearches() {
for (SearchTask next : myIdToSearchTask.values()) {
next.requestImmediateAbort();
- try {
- next.getCompletionLatch().await(30, TimeUnit.SECONDS);
- } catch (InterruptedException e) {
- ourLog.warn("Failed to wait for completion", e);
- }
+ AsyncUtil.awaitLatchAndIgnoreInterrupt(next.getCompletionLatch(), 30, TimeUnit.SECONDS);
}
}
+ @SuppressWarnings("SameParameterValue")
+ @VisibleForTesting
+ void setMaxMillisToWaitForRemoteResultsForUnitTest(long theMaxMillisToWaitForRemoteResults) {
+ myMaxMillisToWaitForRemoteResults = theMaxMillisToWaitForRemoteResults;
+ }
+
/**
* This method is called by the HTTP client processing thread in order to
* fetch resources.
@@ -189,18 +193,16 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
if (searchTask != null) {
ourLog.trace("Local search found");
List resourcePids = searchTask.getResourcePids(theFrom, theTo);
- if (resourcePids != null) {
- ourLog.trace("Local search returned {} pids, wanted {}-{} - Search: {}", resourcePids.size(), theFrom, theTo, searchTask.getSearch());
+ ourLog.trace("Local search returned {} pids, wanted {}-{} - Search: {}", resourcePids.size(), theFrom, theTo, searchTask.getSearch());
- /*
- * Generally, if a search task is open, the fastest possible thing is to just return its results. This
- * will work most of the time, but can fail if the task hit a search threshold and the client is requesting
- * results beyond that threashold. In that case, we'll keep going below, since that will trigger another
- * task.
- */
- if ((searchTask.getSearch().getNumFound() - searchTask.getSearch().getNumBlocked()) >= theTo || resourcePids.size() == (theTo - theFrom)) {
- return resourcePids;
- }
+ /*
+ * Generally, if a search task is open, the fastest possible thing is to just return its results. This
+ * will work most of the time, but can fail if the task hit a search threshold and the client is requesting
+ * results beyond that threashold. In that case, we'll keep going below, since that will trigger another
+ * task.
+ */
+ if ((searchTask.getSearch().getNumFound() - searchTask.getSearch().getNumBlocked()) >= theTo || resourcePids.size() == (theTo - theFrom)) {
+ return resourcePids;
}
}
}
@@ -244,11 +246,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
}
}
- try {
- Thread.sleep(500);
- } catch (InterruptedException e) {
- // ignore
- }
+ AsyncUtil.sleep(500);
}
ourLog.trace("Finished looping");
@@ -627,14 +625,8 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
Integer awaitInitialSync() {
ourLog.trace("Awaiting initial sync");
do {
- try {
- if (getInitialCollectionLatch().await(250, TimeUnit.MILLISECONDS)) {
- break;
- }
- } catch (InterruptedException e) {
- // Shouldn't happen
- Thread.currentThread().interrupt();
- throw new InternalErrorException(e);
+ if (AsyncUtil.awaitLatchAndThrowInternalErrorExceptionOnInterrupt(getInitialCollectionLatch(), 250L, TimeUnit.MILLISECONDS)) {
+ break;
}
} while (getSearch().getStatus() == SearchStatusEnum.LOADING);
ourLog.trace("Initial sync completed");
@@ -663,7 +655,8 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
return sb;
}
- public List getResourcePids(int theFromIndex, int theToIndex) {
+ @Nonnull
+ List getResourcePids(int theFromIndex, int theToIndex) {
ourLog.debug("Requesting search PIDs from {}-{}", theFromIndex, theToIndex);
boolean keepWaiting;
@@ -698,11 +691,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
if (keepWaiting) {
ourLog.info("Waiting as we only have {} results - Search status: {}", mySyncedPids.size(), mySearch.getStatus());
- try {
- Thread.sleep(500);
- } catch (InterruptedException theE) {
- // ignore
- }
+ AsyncUtil.sleep(500L);
}
} while (keepWaiting);
@@ -1081,11 +1070,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
}
if (myLoadingThrottleForUnitTests != null) {
- try {
- Thread.sleep(myLoadingThrottleForUnitTests);
- } catch (InterruptedException e) {
- // ignore
- }
+ AsyncUtil.sleep(myLoadingThrottleForUnitTests);
}
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchCacheSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchCacheSvcImpl.java
index 678c457c601..5bf82275086 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchCacheSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchCacheSvcImpl.java
@@ -48,17 +48,15 @@ import java.util.List;
import java.util.Optional;
public class DatabaseSearchCacheSvcImpl extends BaseSearchCacheSvcImpl {
- private static final Logger ourLog = LoggerFactory.getLogger(DatabaseSearchCacheSvcImpl.class);
-
/*
* Be careful increasing this number! We use the number of params here in a
- * DELETE FROM foo WHERE params IN (aaaa)
+ * DELETE FROM foo WHERE params IN (term,term,term...)
* type query and this can fail if we have 1000s of params
*/
public static final int DEFAULT_MAX_RESULTS_TO_DELETE_IN_ONE_STMT = 500;
public static final int DEFAULT_MAX_RESULTS_TO_DELETE_IN_ONE_PAS = 20000;
public static final long DEFAULT_CUTOFF_SLACK = 10 * DateUtils.MILLIS_PER_SECOND;
-
+ private static final Logger ourLog = LoggerFactory.getLogger(DatabaseSearchCacheSvcImpl.class);
private static int ourMaximumResultsToDeleteInOneStatement = DEFAULT_MAX_RESULTS_TO_DELETE_IN_ONE_STMT;
private static int ourMaximumResultsToDeleteInOnePass = DEFAULT_MAX_RESULTS_TO_DELETE_IN_ONE_PAS;
private static Long ourNowForUnitTests;
@@ -108,6 +106,14 @@ public class DatabaseSearchCacheSvcImpl extends BaseSearchCacheSvcImpl {
}
+ void setSearchDaoForUnitTest(ISearchDao theSearchDao) {
+ mySearchDao = theSearchDao;
+ }
+
+ void setTxManagerForUnitTest(PlatformTransactionManager theTxManager) {
+ myTxManager = theTxManager;
+ }
+
@Override
@Transactional(Transactional.TxType.NEVER)
public Optional tryToMarkSearchAsInProgress(Search theSearch) {
@@ -185,7 +191,7 @@ public class DatabaseSearchCacheSvcImpl extends BaseSearchCacheSvcImpl {
int count = toDelete.getContent().size();
if (count > 0) {
- if (ourLog.isDebugEnabled()) {
+ if (ourLog.isDebugEnabled() || "true".equalsIgnoreCase(System.getProperty("test"))) {
Long total = tt.execute(t -> mySearchDao.count());
ourLog.debug("Deleted {} searches, {} remaining", count, total);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseTermReadSvcImpl.java
similarity index 69%
rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java
rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseTermReadSvcImpl.java
index a9983c9d13b..94b48481a87 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseTermReadSvcImpl.java
@@ -30,13 +30,15 @@ import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
+import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
+import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
+import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
+import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
import ca.uhn.fhir.jpa.util.ScrollableResultsIterator;
-import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
-import ca.uhn.fhir.util.ObjectUtil;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.UrlUtil;
import ca.uhn.fhir.util.ValidateUtil;
@@ -44,7 +46,6 @@ import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Stopwatch;
-import com.google.common.collect.ArrayListMultimap;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.time.DateUtils;
@@ -59,8 +60,6 @@ import org.hibernate.search.query.dsl.BooleanJunction;
import org.hibernate.search.query.dsl.QueryBuilder;
import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.instance.model.api.IBaseCoding;
-import org.hl7.fhir.instance.model.api.IBaseResource;
-import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.hl7.fhir.r4.model.*;
import org.hl7.fhir.r4.model.codesystems.ConceptSubsumptionOutcome;
@@ -70,16 +69,12 @@ import org.springframework.beans.BeansException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
-import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Slice;
-import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionDefinition;
-import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
-import org.springframework.transaction.support.TransactionCallbackWithoutResult;
import org.springframework.transaction.support.TransactionTemplate;
import javax.annotation.Nonnull;
@@ -94,17 +89,14 @@ import javax.validation.constraints.NotNull;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
-import java.util.function.Supplier;
import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.*;
-public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, ApplicationContextAware {
+public abstract class BaseTermReadSvcImpl implements ITermReadSvc, ApplicationContextAware {
public static final int DEFAULT_FETCH_SIZE = 250;
- private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiTerminologySvcImpl.class);
- private static final Object PLACEHOLDER_OBJECT = new Object();
- private static boolean ourForceSaveDeferredAlwaysForUnitTest;
+ private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseTermReadSvcImpl.class);
private static boolean ourLastResultsFromTranslationCache; // For testing.
private static boolean ourLastResultsFromTranslationWithReverseCache; // For testing.
@Autowired
@@ -135,22 +127,10 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
protected FhirContext myContext;
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
protected EntityManager myEntityManager;
- private ArrayListMultimap myChildToParentPidCache;
@Autowired
private ITermCodeSystemVersionDao myCodeSystemVersionDao;
- private List myConceptLinksToSaveLater = new ArrayList<>();
- @Autowired
- private ITermConceptParentChildLinkDao myConceptParentChildLinkDao;
- private List myDeferredConcepts = Collections.synchronizedList(new ArrayList<>());
- private List myDeferredValueSets = Collections.synchronizedList(new ArrayList<>());
- private List myDeferredConceptMaps = Collections.synchronizedList(new ArrayList<>());
@Autowired
private DaoConfig myDaoConfig;
- private long myNextReindexPass;
- private boolean myProcessDeferred = true;
- @Autowired
- private PlatformTransactionManager myTransactionMgr;
- private IFhirResourceDaoCodeSystem, ?, ?> myCodeSystemResourceDao;
private IFhirResourceDaoValueSet, ?, ?> myValueSetResourceDao;
private Cache> myTranslationCache;
private Cache> myTranslationWithReverseCache;
@@ -165,8 +145,12 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
private PlatformTransactionManager myTxManager;
@Autowired
private ITermValueSetConceptViewDao myTermValueSetConceptViewDao;
- @Autowired
- private ISchedulerService mySchedulerService;
+ @Autowired
+ private ISchedulerService mySchedulerService;
+ @Autowired(required = false)
+ private ITermDeferredStorageSvc myDeferredStorageSvc;
+ @Autowired(required = false)
+ private ITermCodeSystemStorageSvc myConceptStorageSvc;
private void addCodeIfNotAlreadyAdded(IValueSetConceptAccumulator theValueSetCodeAccumulator, Set theAddedCodes, TermConcept theConcept, boolean theAdd, AtomicInteger theCodeCounter) {
String codeSystem = theConcept.getCodeSystemVersion().getCodeSystem().getCodeSystemUri();
@@ -208,7 +192,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
boolean retVal = theSetToPopulate.add(theConcept);
if (retVal) {
if (theSetToPopulate.size() >= myDaoConfig.getMaximumExpansionSize()) {
- String msg = myContext.getLocalizer().getMessage(BaseHapiTerminologySvcImpl.class, "expansionTooLarge", myDaoConfig.getMaximumExpansionSize());
+ String msg = myContext.getLocalizer().getMessage(BaseTermReadSvcImpl.class, "expansionTooLarge", myDaoConfig.getMaximumExpansionSize());
throw new InvalidRequestException(msg);
}
}
@@ -232,16 +216,6 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
.build();
}
- /**
- * This method is present only for unit tests, do not call from client code
- */
- @VisibleForTesting
- public void clearDeferred() {
- myDeferredValueSets.clear();
- myDeferredConceptMaps.clear();
- myDeferredConcepts.clear();
- }
-
/**
* This method is present only for unit tests, do not call from client code
*/
@@ -258,88 +232,6 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
myTranslationWithReverseCache.invalidateAll();
}
- protected abstract IIdType createOrUpdateCodeSystem(CodeSystem theCodeSystemResource);
-
- protected void validateCodeSystemForStorage(CodeSystem theCodeSystemResource) {
- ValidateUtil.isNotBlankOrThrowUnprocessableEntity(theCodeSystemResource.getUrl(), "Can not store a CodeSystem without a valid URL");
- }
-
- protected abstract void createOrUpdateConceptMap(ConceptMap theNextConceptMap);
-
- abstract void createOrUpdateValueSet(ValueSet theValueSet);
-
- @Override
- public void deleteCodeSystem(TermCodeSystem theCodeSystem) {
- ourLog.info(" * Deleting code system {}", theCodeSystem.getPid());
-
- myEntityManager.flush();
- TermCodeSystem cs = myCodeSystemDao.findById(theCodeSystem.getPid()).orElseThrow(IllegalStateException::new);
- cs.setCurrentVersion(null);
- myCodeSystemDao.save(cs);
- myCodeSystemDao.flush();
-
- int i = 0;
- List codeSystemVersions = myCodeSystemVersionDao.findByCodeSystemPid(theCodeSystem.getPid());
- for (TermCodeSystemVersion next : codeSystemVersions) {
- deleteCodeSystemVersion(next.getPid());
- }
- myCodeSystemVersionDao.deleteForCodeSystem(theCodeSystem);
- myCodeSystemDao.delete(theCodeSystem);
-
- myEntityManager.flush();
- }
-
- public void deleteCodeSystemVersion(final Long theCodeSystemVersionPid) {
- ourLog.info(" * Deleting code system version {}", theCodeSystemVersionPid);
-
- PageRequest page1000 = PageRequest.of(0, 1000);
-
- // Parent/Child links
- {
- String descriptor = "parent/child links";
- Supplier> loader = () -> myConceptParentChildLinkDao.findByCodeSystemVersion(page1000, theCodeSystemVersionPid);
- Supplier counter = () -> myConceptParentChildLinkDao.countByCodeSystemVersion(theCodeSystemVersionPid);
- doDelete(descriptor, loader, counter, myConceptParentChildLinkDao);
- }
-
- // Properties
- {
- String descriptor = "concept properties";
- Supplier> loader = () -> myConceptPropertyDao.findByCodeSystemVersion(page1000, theCodeSystemVersionPid);
- Supplier counter = () -> myConceptPropertyDao.countByCodeSystemVersion(theCodeSystemVersionPid);
- doDelete(descriptor, loader, counter, myConceptPropertyDao);
- }
-
- // Designations
- {
- String descriptor = "concept designations";
- Supplier> loader = () -> myConceptDesignationDao.findByCodeSystemVersion(page1000, theCodeSystemVersionPid);
- Supplier counter = () -> myConceptDesignationDao.countByCodeSystemVersion(theCodeSystemVersionPid);
- doDelete(descriptor, loader, counter, myConceptDesignationDao);
- }
-
- // Concepts
- {
- String descriptor = "concepts";
- // For some reason, concepts are much slower to delete, so use a smaller batch size
- PageRequest page100 = PageRequest.of(0, 100);
- Supplier> loader = () -> myConceptDao.findByCodeSystemVersion(page100, theCodeSystemVersionPid);
- Supplier counter = () -> myConceptDao.countByCodeSystemVersion(theCodeSystemVersionPid);
- doDelete(descriptor, loader, counter, myConceptDao);
- }
-
- Optional codeSystemOpt = myCodeSystemDao.findWithCodeSystemVersionAsCurrentVersion(theCodeSystemVersionPid);
- if (codeSystemOpt.isPresent()) {
- TermCodeSystem codeSystem = codeSystemOpt.get();
- ourLog.info(" * Removing code system version {} as current version of code system {}", theCodeSystemVersionPid, codeSystem.getPid());
- codeSystem.setCurrentVersion(null);
- myCodeSystemDao.save(codeSystem);
- }
-
- ourLog.info(" * Deleting code system version");
- myCodeSystemVersionDao.deleteById(theCodeSystemVersionPid);
-
- }
public void deleteConceptMap(ResourceTable theResourceTable) {
// Get existing entity so it can be deleted.
@@ -396,50 +288,6 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
deleteValueSet(theResourceTable);
}
- private void doDelete(String theDescriptor, Supplier> theLoader, Supplier theCounter, JpaRepository theDao) {
- int count;
- ourLog.info(" * Deleting {}", theDescriptor);
- int totalCount = theCounter.get();
- StopWatch sw = new StopWatch();
- count = 0;
- while (true) {
- Slice link = theLoader.get();
- if (!link.hasContent()) {
- break;
- }
-
- TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager);
- txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRED);
- txTemplate.execute(t -> {
- theDao.deleteAll(link);
- return null;
- });
-
- count += link.getNumberOfElements();
- ourLog.info(" * {} {} deleted - {}/sec - ETA: {}", count, theDescriptor, sw.formatThroughput(count, TimeUnit.SECONDS), sw.getEstimatedTimeRemaining(count, totalCount));
- }
- theDao.flush();
- }
-
- private int ensureParentsSaved(Collection theParents) {
- ourLog.trace("Checking {} parents", theParents.size());
- int retVal = 0;
-
- for (TermConceptParentChildLink nextLink : theParents) {
- if (nextLink.getRelationshipType() == RelationshipTypeEnum.ISA) {
- TermConcept nextParent = nextLink.getParent();
- retVal += ensureParentsSaved(nextParent.getParents());
- if (nextParent.getId() == null) {
- nextParent.setUpdated(new Date());
- myConceptDao.saveAndFlush(nextParent);
- retVal++;
- ourLog.debug("Saved parent code {} and got id {}", nextParent.getCode(), nextParent.getId());
- }
- }
- }
-
- return retVal;
- }
@Override
@Transactional(propagation = Propagation.REQUIRED)
@@ -469,7 +317,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
Optional optionalTermValueSet;
if (theValueSetToExpand.hasId()) {
- Long valueSetResourcePid = getValueSetResourcePid(theValueSetToExpand.getIdElement());
+ Long valueSetResourcePid = myConceptStorageSvc.getValueSetResourcePid(theValueSetToExpand.getIdElement());
optionalTermValueSet = myValueSetDao.findByResourcePid(valueSetResourcePid);
} else if (theValueSetToExpand.hasUrl()) {
optionalTermValueSet = myValueSetDao.findByUrl(theValueSetToExpand.getUrl());
@@ -915,7 +763,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
}
private boolean isCodeSystemLoinc(String theSystem) {
- return IHapiTerminologyLoaderSvc.LOINC_URI.equals(theSystem);
+ return ITermLoaderSvc.LOINC_URI.equals(theSystem);
}
private void handleFilterDisplay(QueryBuilder theQb, BooleanJunction> theBool, ValueSet.ConceptSetFilterComponent theFilter) {
@@ -958,6 +806,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
}
}
+ @SuppressWarnings("EnumSwitchStatementWhichMissesCases")
private void handleFilterLoincParentChild(QueryBuilder theQb, BooleanJunction> theBool, ValueSet.ConceptSetFilterComponent theFilter) {
switch (theFilter.getOp()) {
case EQUAL:
@@ -990,6 +839,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
return new Term(TermConceptPropertyFieldBridge.CONCEPT_FIELD_PROPERTY_PREFIX + theProperty, theValue);
}
+ @SuppressWarnings("EnumSwitchStatementWhichMissesCases")
private void handleFilterLoincAncestor(String theSystem, QueryBuilder theQb, BooleanJunction> theBool, ValueSet.ConceptSetFilterComponent theFilter) {
switch (theFilter.getOp()) {
case EQUAL:
@@ -1033,6 +883,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
return retVal;
}
+ @SuppressWarnings("EnumSwitchStatementWhichMissesCases")
private void handleFilterLoincDescendant(String theSystem, QueryBuilder theQb, BooleanJunction> theBool, ValueSet.ConceptSetFilterComponent theFilter) {
switch (theFilter.getOp()) {
case EQUAL:
@@ -1178,7 +1029,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
@Override
public boolean isValueSetPreExpandedForCodeValidation(ValueSet theValueSet) {
- Long valueSetResourcePid = getValueSetResourcePid(theValueSet.getIdElement());
+ Long valueSetResourcePid = myConceptStorageSvc.getValueSetResourcePid(theValueSet.getIdElement());
Optional optionalTermValueSet = myValueSetDao.findByResourcePid(valueSetResourcePid);
if (!optionalTermValueSet.isPresent()) {
@@ -1201,7 +1052,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
ValueSet theValueSet, String theSystem, String theCode, String theDisplay, Coding theCoding, CodeableConcept theCodeableConcept) {
ValidateUtil.isNotNullOrThrowUnprocessableEntity(theValueSet.hasId(), "ValueSet.id is required");
- Long valueSetResourcePid = getValueSetResourcePid(theValueSet.getIdElement());
+ Long valueSetResourcePid = myConceptStorageSvc.getValueSetResourcePid(theValueSet.getIdElement());
List concepts = new ArrayList<>();
if (isNotBlank(theCode)) {
@@ -1241,9 +1092,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
private List findByValueSetResourcePidSystemAndCode(Long theResourcePid, String theSystem, String theCode) {
List retVal = new ArrayList<>();
Optional optionalTermValueSetConcept = myValueSetConceptDao.findByValueSetResourcePidSystemAndCode(theResourcePid, theSystem, theCode);
- if (optionalTermValueSetConcept.isPresent()) {
- retVal.add(optionalTermValueSetConcept.get());
- }
+ optionalTermValueSetConcept.ifPresent(retVal::add);
return retVal;
}
@@ -1293,7 +1142,11 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager);
txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_MANDATORY);
return txTemplate.execute(t -> {
- TermCodeSystemVersion csv = findCurrentCodeSystemVersionForSystem(theCodeSystem);
+ TermCodeSystemVersion csv = null;
+ TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(theCodeSystem);
+ if (cs != null && cs.getCurrentVersion() != null) {
+ csv = cs.getCurrentVersion();
+ }
return myConceptDao.findByCodeSystemAndCode(csv, theCode);
});
}
@@ -1360,621 +1213,30 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
return toVersionIndependentConcepts(theSystem, codes);
}
- private TermCodeSystemVersion findCurrentCodeSystemVersionForSystem(String theCodeSystem) {
- TermCodeSystem cs = getCodeSystem(theCodeSystem);
- if (cs == null || cs.getCurrentVersion() == null) {
- return null;
- }
- return cs.getCurrentVersion();
- }
-
private TermCodeSystem getCodeSystem(String theSystem) {
return myCodeSystemDao.findByCodeSystemUri(theSystem);
}
- protected abstract CodeSystem getCodeSystemFromContext(String theSystem);
-
- private Long getCodeSystemResourcePid(IIdType theIdType) {
- return getCodeSystemResourcePid(theIdType, null);
- }
-
- private Long getCodeSystemResourcePid(IIdType theIdType, RequestDetails theRequestDetails) {
- return getResourcePid(myCodeSystemResourceDao, theIdType, theRequestDetails);
- }
-
- private Long getValueSetResourcePid(IIdType theIdType) {
- return getValueSetResourcePid(theIdType, null);
- }
-
- private Long getValueSetResourcePid(IIdType theIdType, RequestDetails theRequestDetails) {
- return getResourcePid(myValueSetResourceDao, theIdType, theRequestDetails);
- }
-
- private Long getResourcePid(IFhirResourceDao extends IBaseResource> theResourceDao, IIdType theIdType, RequestDetails theRequestDetails) {
- ResourceTable resourceTable = (ResourceTable) theResourceDao.readEntity(theIdType, theRequestDetails);
- return resourceTable.getId();
- }
-
- private void persistChildren(TermConcept theConcept, TermCodeSystemVersion theCodeSystem, IdentityHashMap theConceptsStack, int theTotalConcepts) {
- if (theConceptsStack.put(theConcept, PLACEHOLDER_OBJECT) != null) {
- return;
- }
-
- if (theConceptsStack.size() == 1 || theConceptsStack.size() % 10000 == 0) {
- float pct = (float) theConceptsStack.size() / (float) theTotalConcepts;
- ourLog.info("Have processed {}/{} concepts ({}%)", theConceptsStack.size(), theTotalConcepts, (int) (pct * 100.0f));
- }
-
- theConcept.setCodeSystemVersion(theCodeSystem);
- theConcept.setIndexStatus(BaseHapiFhirDao.INDEX_STATUS_INDEXED);
-
- if (theConceptsStack.size() <= myDaoConfig.getDeferIndexingForCodesystemsOfSize()) {
- saveConcept(theConcept);
- } else {
- myDeferredConcepts.add(theConcept);
- }
-
- for (TermConceptParentChildLink next : theConcept.getChildren()) {
- persistChildren(next.getChild(), theCodeSystem, theConceptsStack, theTotalConcepts);
- }
-
- for (TermConceptParentChildLink next : theConcept.getChildren()) {
- if (theConceptsStack.size() <= myDaoConfig.getDeferIndexingForCodesystemsOfSize()) {
- saveConceptLink(next);
- } else {
- myConceptLinksToSaveLater.add(next);
- }
- }
-
- }
-
- private void populateVersion(TermConcept theNext, TermCodeSystemVersion theCodeSystemVersion) {
- if (theNext.getCodeSystemVersion() != null) {
- return;
- }
- theNext.setCodeSystemVersion(theCodeSystemVersion);
- for (TermConceptParentChildLink next : theNext.getChildren()) {
- populateVersion(next.getChild(), theCodeSystemVersion);
- }
- }
-
- private void processDeferredConceptMaps() {
- int count = Math.min(myDeferredConceptMaps.size(), 20);
- for (ConceptMap nextConceptMap : new ArrayList<>(myDeferredConceptMaps.subList(0, count))) {
- ourLog.info("Creating ConceptMap: {}", nextConceptMap.getId());
- createOrUpdateConceptMap(nextConceptMap);
- myDeferredConceptMaps.remove(nextConceptMap);
- }
- ourLog.info("Saved {} deferred ConceptMap resources, have {} remaining", count, myDeferredConceptMaps.size());
- }
-
- private void processDeferredConcepts() {
- int codeCount = 0, relCount = 0;
- StopWatch stopwatch = new StopWatch();
-
- int count = Math.min(myDaoConfig.getDeferIndexingForCodesystemsOfSize(), myDeferredConcepts.size());
- ourLog.info("Saving {} deferred concepts...", count);
- while (codeCount < count && myDeferredConcepts.size() > 0) {
- TermConcept next = myDeferredConcepts.remove(0);
- codeCount += saveConcept(next);
- }
-
- if (codeCount > 0) {
- ourLog.info("Saved {} deferred concepts ({} codes remain and {} relationships remain) in {}ms ({}ms / code)",
- codeCount, myDeferredConcepts.size(), myConceptLinksToSaveLater.size(), stopwatch.getMillis(), stopwatch.getMillisPerOperation(codeCount));
- }
-
- if (codeCount == 0) {
- count = Math.min(myDaoConfig.getDeferIndexingForCodesystemsOfSize(), myConceptLinksToSaveLater.size());
- ourLog.info("Saving {} deferred concept relationships...", count);
- while (relCount < count && myConceptLinksToSaveLater.size() > 0) {
- TermConceptParentChildLink next = myConceptLinksToSaveLater.remove(0);
-
- if (!myConceptDao.findById(next.getChild().getId()).isPresent() || !myConceptDao.findById(next.getParent().getId()).isPresent()) {
- ourLog.warn("Not inserting link from child {} to parent {} because it appears to have been deleted", next.getParent().getCode(), next.getChild().getCode());
- continue;
- }
-
- saveConceptLink(next);
- relCount++;
- }
- }
-
- if (relCount > 0) {
- ourLog.info("Saved {} deferred relationships ({} remain) in {}ms ({}ms / entry)",
- relCount, myConceptLinksToSaveLater.size(), stopwatch.getMillis(), stopwatch.getMillisPerOperation(relCount));
- }
-
- if ((myDeferredConcepts.size() + myConceptLinksToSaveLater.size()) == 0) {
- ourLog.info("All deferred concepts and relationships have now been synchronized to the database");
- }
- }
-
- private void processDeferredValueSets() {
- int count = Math.min(myDeferredValueSets.size(), 20);
- for (ValueSet nextValueSet : new ArrayList<>(myDeferredValueSets.subList(0, count))) {
- ourLog.info("Creating ValueSet: {}", nextValueSet.getId());
- createOrUpdateValueSet(nextValueSet);
- myDeferredValueSets.remove(nextValueSet);
- }
- ourLog.info("Saved {} deferred ValueSet resources, have {} remaining", count, myDeferredValueSets.size());
- }
-
- private void processReindexing() {
- if (System.currentTimeMillis() < myNextReindexPass && !ourForceSaveDeferredAlwaysForUnitTest) {
- return;
- }
-
- TransactionTemplate tt = new TransactionTemplate(myTransactionMgr);
- tt.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
- tt.execute(new TransactionCallbackWithoutResult() {
- private void createParentsString(StringBuilder theParentsBuilder, Long theConceptPid) {
- Validate.notNull(theConceptPid, "theConceptPid must not be null");
- List parents = myChildToParentPidCache.get(theConceptPid);
- if (parents.contains(-1L)) {
- return;
- } else if (parents.isEmpty()) {
- Collection parentLinks = myConceptParentChildLinkDao.findAllWithChild(theConceptPid);
- if (parentLinks.isEmpty()) {
- myChildToParentPidCache.put(theConceptPid, -1L);
- ourLog.info("Found {} parent concepts of concept {} (cache has {})", 0, theConceptPid, myChildToParentPidCache.size());
- return;
- } else {
- for (Long next : parentLinks) {
- myChildToParentPidCache.put(theConceptPid, next);
- }
- int parentCount = myChildToParentPidCache.get(theConceptPid).size();
- ourLog.info("Found {} parent concepts of concept {} (cache has {})", parentCount, theConceptPid, myChildToParentPidCache.size());
- }
- }
-
- for (Long nextParent : parents) {
- if (theParentsBuilder.length() > 0) {
- theParentsBuilder.append(' ');
- }
- theParentsBuilder.append(nextParent);
- createParentsString(theParentsBuilder, nextParent);
- }
-
- }
-
- @Override
- protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
- int maxResult = 1000;
- Page concepts = myConceptDao.findResourcesRequiringReindexing(PageRequest.of(0, maxResult));
- if (!concepts.hasContent()) {
- if (myChildToParentPidCache != null) {
- ourLog.info("Clearing parent concept cache");
- myNextReindexPass = System.currentTimeMillis() + DateUtils.MILLIS_PER_MINUTE;
- myChildToParentPidCache = null;
- }
- return;
- }
-
- if (myChildToParentPidCache == null) {
- myChildToParentPidCache = ArrayListMultimap.create();
- }
-
- ourLog.info("Indexing {} / {} concepts", concepts.getContent().size(), concepts.getTotalElements());
-
- int count = 0;
- StopWatch stopwatch = new StopWatch();
-
- for (TermConcept nextConcept : concepts) {
-
- if (isBlank(nextConcept.getParentPidsAsString())) {
- StringBuilder parentsBuilder = new StringBuilder();
- createParentsString(parentsBuilder, nextConcept.getId());
- nextConcept.setParentPids(parentsBuilder.toString());
- }
-
- saveConcept(nextConcept);
- count++;
- }
-
- ourLog.info("Indexed {} / {} concepts in {}ms - Avg {}ms / resource", count, concepts.getContent().size(), stopwatch.getMillis(), stopwatch.getMillisPerOperation(count));
- }
- });
-
- }
-
- /**
- * Returns the number of saved concepts
- */
- private int saveOrUpdateConcept(TermConcept theConcept) {
-
- TermCodeSystemVersion csv = theConcept.getCodeSystemVersion();
- Optional existing = myConceptDao.findByCodeSystemAndCode(csv, theConcept.getCode());
- if (existing.isPresent()) {
- TermConcept existingConcept = existing.get();
- boolean haveChanges = false;
- if (!StringUtils.equals(existingConcept.getDisplay(), theConcept.getDisplay())) {
- existingConcept.setDisplay(theConcept.getDisplay());
- haveChanges = true;
- }
-
- if (!haveChanges) {
- return 0;
- }
-
- myConceptDao.save(existingConcept);
- return 1;
-
- } else {
- return saveConcept(theConcept);
- }
-
- }
-
- /**
- * Returns the number of saved concepts
- */
- private int saveConcept(TermConcept theConcept) {
- int retVal = 0;
-
- /*
- * If the concept has an ID, we're reindexing, so there's no need to
- * save parent concepts first (it's way too slow to do that)
- */
- if (theConcept.getId() == null) {
- retVal += ensureParentsSaved(theConcept.getParents());
- }
-
- if (theConcept.getId() == null || theConcept.getIndexStatus() == null) {
- retVal++;
- theConcept.setIndexStatus(BaseHapiFhirDao.INDEX_STATUS_INDEXED);
- theConcept.setUpdated(new Date());
- myConceptDao.save(theConcept);
-
- for (TermConceptProperty next : theConcept.getProperties()) {
- myConceptPropertyDao.save(next);
- }
-
- for (TermConceptDesignation next : theConcept.getDesignations()) {
- myConceptDesignationDao.save(next);
- }
- }
-
- ourLog.trace("Saved {} and got PID {}", theConcept.getCode(), theConcept.getId());
- return retVal;
- }
-
- private void saveConceptLink(TermConceptParentChildLink next) {
- if (next.getId() == null) {
- myConceptParentChildLinkDao.save(next);
- }
- }
-
- @Transactional(propagation = Propagation.NEVER)
- @Override
- public synchronized void saveDeferred() {
- if (isProcessDeferredPaused()) {
- return;
- } else if (isNoDeferredConceptsAndNoConceptLinksToSaveLater()) {
- processReindexing();
- }
-
- TransactionTemplate tt = new TransactionTemplate(myTransactionMgr);
- tt.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
- if (isDeferredConceptsOrConceptLinksToSaveLater()) {
- tt.execute(t -> {
- processDeferredConcepts();
- return null;
- });
- }
-
- if (isDeferredValueSets()) {
- tt.execute(t -> {
- processDeferredValueSets();
- return null;
- });
- }
- if (isDeferredConceptMaps()) {
- tt.execute(t -> {
- processDeferredConceptMaps();
- return null;
- });
- }
-
- }
-
- private boolean isProcessDeferredPaused() {
- return !myProcessDeferred;
- }
-
- private boolean isNoDeferredConceptsAndNoConceptLinksToSaveLater() {
- return isNoDeferredConcepts() && isNoConceptLinksToSaveLater();
- }
-
- private boolean isDeferredConceptsOrConceptLinksToSaveLater() {
- return isDeferredConcepts() || isConceptLinksToSaveLater();
- }
-
- private boolean isDeferredConcepts() {
- return !myDeferredConcepts.isEmpty();
- }
-
- private boolean isNoDeferredConcepts() {
- return myDeferredConcepts.isEmpty();
- }
-
- private boolean isConceptLinksToSaveLater() {
- return !myConceptLinksToSaveLater.isEmpty();
- }
-
- private boolean isNoConceptLinksToSaveLater() {
- return myConceptLinksToSaveLater.isEmpty();
- }
-
- private boolean isDeferredValueSets() {
- return !myDeferredValueSets.isEmpty();
- }
-
- private boolean isDeferredConceptMaps() {
- return !myDeferredConceptMaps.isEmpty();
- }
@Override
public void setApplicationContext(ApplicationContext theApplicationContext) throws BeansException {
myApplicationContext = theApplicationContext;
}
- @Override
- public void setProcessDeferred(boolean theProcessDeferred) {
- myProcessDeferred = theProcessDeferred;
- }
-
@PostConstruct
public void start() {
- myCodeSystemResourceDao = myApplicationContext.getBean(IFhirResourceDaoCodeSystem.class);
myValueSetResourceDao = myApplicationContext.getBean(IFhirResourceDaoValueSet.class);
myTxTemplate = new TransactionTemplate(myTransactionManager);
}
@PostConstruct
public void registerScheduledJob() {
- // Register scheduled job to save deferred concepts
- // In the future it would be great to make this a cluster-aware task somehow
- ScheduledJobDefinition jobDefinition = new ScheduledJobDefinition();
- jobDefinition.setId(BaseHapiTerminologySvcImpl.class.getName() + "_saveDeferred");
- jobDefinition.setJobClass(SaveDeferredJob.class);
- mySchedulerService.scheduleFixedDelay(5000, false, jobDefinition);
-
// Register scheduled job to pre-expand ValueSets
// In the future it would be great to make this a cluster-aware task somehow
ScheduledJobDefinition vsJobDefinition = new ScheduledJobDefinition();
- vsJobDefinition.setId(BaseHapiTerminologySvcImpl.class.getName() + "_preExpandValueSets");
+ vsJobDefinition.setId(BaseTermReadSvcImpl.class.getName() + "_preExpandValueSets");
vsJobDefinition.setJobClass(PreExpandValueSetsJob.class);
mySchedulerService.scheduleFixedDelay(10 * DateUtils.MILLIS_PER_MINUTE, true, vsJobDefinition);
-
- }
-
- @Override
- @Transactional(propagation = Propagation.REQUIRED)
- public void storeNewCodeSystemVersion(Long theCodeSystemResourcePid, String theSystemUri, String theSystemName, String theSystemVersionId, TermCodeSystemVersion theCodeSystemVersion, ResourceTable theCodeSystemResourceTable) {
- ourLog.info("Storing code system");
-
- ValidateUtil.isTrueOrThrowInvalidRequest(theCodeSystemVersion.getResource() != null, "No resource supplied");
- ValidateUtil.isNotBlankOrThrowInvalidRequest(theSystemUri, "No system URI supplied");
-
- // Grab the existing versions so we can delete them later
- List existing = myCodeSystemVersionDao.findByCodeSystemResourcePid(theCodeSystemResourcePid);
-
- /*
- * For now we always delete old versions. At some point it would be nice to allow configuration to keep old versions.
- */
-
- ourLog.info("Deleting old code system versions");
- for (TermCodeSystemVersion next : existing) {
- Long codeSystemVersionPid = next.getPid();
- deleteCodeSystemVersion(codeSystemVersionPid);
- }
-
- ourLog.info("Flushing...");
- myConceptDao.flush();
- ourLog.info("Done flushing");
-
- /*
- * Do the upload
- */
-
- TermCodeSystem codeSystem = getOrCreateTermCodeSystem(theCodeSystemResourcePid, theSystemUri, theSystemName, theCodeSystemResourceTable);
-
- theCodeSystemVersion.setCodeSystem(codeSystem);
-
- theCodeSystemVersion.setCodeSystemDisplayName(theSystemName);
- theCodeSystemVersion.setCodeSystemVersionId(theSystemVersionId);
-
- ourLog.info("Validating all codes in CodeSystem for storage (this can take some time for large sets)");
-
- // Validate the code system
- ArrayList conceptsStack = new ArrayList<>();
- IdentityHashMap allConcepts = new IdentityHashMap<>();
- int totalCodeCount = 0;
- for (TermConcept next : theCodeSystemVersion.getConcepts()) {
- totalCodeCount += validateConceptForStorage(next, theCodeSystemVersion, conceptsStack, allConcepts);
- }
-
- ourLog.info("Saving version containing {} concepts", totalCodeCount);
-
- TermCodeSystemVersion codeSystemVersion = myCodeSystemVersionDao.saveAndFlush(theCodeSystemVersion);
-
- ourLog.info("Saving code system");
-
- codeSystem.setCurrentVersion(theCodeSystemVersion);
- codeSystem = myCodeSystemDao.saveAndFlush(codeSystem);
-
- ourLog.info("Setting CodeSystemVersion[{}] on {} concepts...", codeSystem.getPid(), totalCodeCount);
-
- for (TermConcept next : theCodeSystemVersion.getConcepts()) {
- populateVersion(next, codeSystemVersion);
- }
-
- ourLog.info("Saving {} concepts...", totalCodeCount);
-
- IdentityHashMap conceptsStack2 = new IdentityHashMap<>();
- for (TermConcept next : theCodeSystemVersion.getConcepts()) {
- persistChildren(next, codeSystemVersion, conceptsStack2, totalCodeCount);
- }
-
- ourLog.info("Done saving concepts, flushing to database");
-
- myConceptDao.flush();
- myConceptParentChildLinkDao.flush();
-
- ourLog.info("Done deleting old code system versions");
-
- if (myDeferredConcepts.size() > 0 || myConceptLinksToSaveLater.size() > 0) {
- ourLog.info("Note that some concept saving was deferred - still have {} concepts and {} relationships", myDeferredConcepts.size(), myConceptLinksToSaveLater.size());
- }
- }
-
- @Nonnull
- private TermCodeSystem getOrCreateTermCodeSystem(Long theCodeSystemResourcePid, String theSystemUri, String theSystemName, ResourceTable theCodeSystemResourceTable) {
- TermCodeSystem codeSystem = getCodeSystem(theSystemUri);
- if (codeSystem == null) {
- codeSystem = myCodeSystemDao.findByResourcePid(theCodeSystemResourcePid);
- if (codeSystem == null) {
- codeSystem = new TermCodeSystem();
- }
- codeSystem.setResource(theCodeSystemResourceTable);
- } else {
- if (!ObjectUtil.equals(codeSystem.getResource().getId(), theCodeSystemResourceTable.getId())) {
- String msg = myContext.getLocalizer().getMessage(BaseHapiTerminologySvcImpl.class, "cannotCreateDuplicateCodeSystemUrl", theSystemUri,
- codeSystem.getResource().getIdDt().toUnqualifiedVersionless().getValue());
- throw new UnprocessableEntityException(msg);
- }
- }
-
- codeSystem.setCodeSystemUri(theSystemUri);
- codeSystem.setName(theSystemName);
- codeSystem = myCodeSystemDao.save(codeSystem);
- return codeSystem;
- }
-
- @Override
- @Transactional(propagation = Propagation.REQUIRED)
- public IIdType storeNewCodeSystemVersion(CodeSystem theCodeSystemResource, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequest, List theValueSets, List theConceptMaps) {
- Validate.notBlank(theCodeSystemResource.getUrl(), "theCodeSystemResource must have a URL");
-
- IIdType csId = createOrUpdateCodeSystem(theCodeSystemResource);
-
- ResourceTable resource = (ResourceTable) myCodeSystemResourceDao.readEntity(csId, theRequest);
- Long codeSystemResourcePid = resource.getId();
-
- ourLog.info("CodeSystem resource has ID: {}", csId.getValue());
-
- populateCodeSystemVersionProperties(theCodeSystemVersion, theCodeSystemResource, resource);
-
- storeNewCodeSystemVersion(codeSystemResourcePid, theCodeSystemResource.getUrl(), theCodeSystemResource.getName(), theCodeSystemResource.getVersion(), theCodeSystemVersion, resource);
-
- myDeferredConceptMaps.addAll(theConceptMaps);
- myDeferredValueSets.addAll(theValueSets);
-
- return csId;
- }
-
- private void populateCodeSystemVersionProperties(TermCodeSystemVersion theCodeSystemVersion, CodeSystem theCodeSystemResource, ResourceTable theResourceTable) {
- theCodeSystemVersion.setResource(theResourceTable);
- theCodeSystemVersion.setCodeSystemDisplayName(theCodeSystemResource.getName());
- theCodeSystemVersion.setCodeSystemVersionId(theCodeSystemResource.getVersion());
- }
-
- @Override
- @Transactional(propagation = Propagation.MANDATORY)
- public void storeNewCodeSystemVersionIfNeeded(CodeSystem theCodeSystem, ResourceTable theResourceEntity) {
- if (theCodeSystem != null && isNotBlank(theCodeSystem.getUrl())) {
- String codeSystemUrl = theCodeSystem.getUrl();
- if (theCodeSystem.getContent() == CodeSystem.CodeSystemContentMode.COMPLETE || theCodeSystem.getContent() == null || theCodeSystem.getContent() == CodeSystem.CodeSystemContentMode.NOTPRESENT) {
- ourLog.info("CodeSystem {} has a status of {}, going to store concepts in terminology tables", theResourceEntity.getIdDt().getValue(), theCodeSystem.getContentElement().getValueAsString());
-
- Long codeSystemResourcePid = getCodeSystemResourcePid(theCodeSystem.getIdElement());
-
- /*
- * If this is a not-present codesystem, we don't want to store a new version if one
- * already exists, since that will wipe out the existing concepts. We do create or update
- * the TermCodeSystem table though, since that allows the DB to reject changes
- * that would result in duplicate CodeSysten.url values.
- */
- if (theCodeSystem.getContent() == CodeSystem.CodeSystemContentMode.NOTPRESENT) {
- TermCodeSystem codeSystem = myCodeSystemDao.findByCodeSystemUri(theCodeSystem.getUrl());
- if (codeSystem != null) {
- getOrCreateTermCodeSystem(codeSystemResourcePid, theCodeSystem.getUrl(), theCodeSystem.getUrl(), theResourceEntity);
- return;
- }
- }
-
- TermCodeSystemVersion persCs = new TermCodeSystemVersion();
-
- populateCodeSystemVersionProperties(persCs, theCodeSystem, theResourceEntity);
-
- persCs.getConcepts().addAll(toPersistedConcepts(theCodeSystem.getConcept(), persCs));
- ourLog.info("Code system has {} concepts", persCs.getConcepts().size());
- storeNewCodeSystemVersion(codeSystemResourcePid, codeSystemUrl, theCodeSystem.getName(), theCodeSystem.getVersion(), persCs, theResourceEntity);
- }
-
- }
- }
-
- private List toPersistedConcepts(List theConcept, TermCodeSystemVersion theCodeSystemVersion) {
- ArrayList retVal = new ArrayList<>();
-
- for (CodeSystem.ConceptDefinitionComponent next : theConcept) {
- if (isNotBlank(next.getCode())) {
- TermConcept termConcept = toTermConcept(next, theCodeSystemVersion);
- retVal.add(termConcept);
- }
- }
-
- return retVal;
- }
-
- @Nonnull
- private TermConcept toTermConcept(CodeSystem.ConceptDefinitionComponent theConceptDefinition, TermCodeSystemVersion theCodeSystemVersion) {
- TermConcept termConcept = new TermConcept();
- termConcept.setCode(theConceptDefinition.getCode());
- termConcept.setCodeSystemVersion(theCodeSystemVersion);
- termConcept.setDisplay(theConceptDefinition.getDisplay());
- termConcept.addChildren(toPersistedConcepts(theConceptDefinition.getConcept(), theCodeSystemVersion), RelationshipTypeEnum.ISA);
-
- for (CodeSystem.ConceptDefinitionDesignationComponent designationComponent : theConceptDefinition.getDesignation()) {
- if (isNotBlank(designationComponent.getValue())) {
- TermConceptDesignation designation = termConcept.addDesignation();
- designation.setLanguage(designationComponent.hasLanguage() ? designationComponent.getLanguage() : null);
- if (designationComponent.hasUse()) {
- designation.setUseSystem(designationComponent.getUse().hasSystem() ? designationComponent.getUse().getSystem() : null);
- designation.setUseCode(designationComponent.getUse().hasCode() ? designationComponent.getUse().getCode() : null);
- designation.setUseDisplay(designationComponent.getUse().hasDisplay() ? designationComponent.getUse().getDisplay() : null);
- }
- designation.setValue(designationComponent.getValue());
- }
- }
-
- for (CodeSystem.ConceptPropertyComponent next : theConceptDefinition.getProperty()) {
- TermConceptProperty property = new TermConceptProperty();
-
- property.setKey(next.getCode());
- property.setConcept(termConcept);
- property.setCodeSystemVersion(theCodeSystemVersion);
-
- if (next.getValue() instanceof StringType) {
- property.setType(TermConceptPropertyTypeEnum.STRING);
- property.setValue(next.getValueStringType().getValue());
- } else if (next.getValue() instanceof Coding) {
- Coding nextCoding = next.getValueCoding();
- property.setType(TermConceptPropertyTypeEnum.CODING);
- property.setCodeSystem(nextCoding.getSystem());
- property.setValue(nextCoding.getCode());
- property.setDisplay(nextCoding.getDisplay());
- } else if (next.getValue() != null) {
- // TODO: LOINC has properties of type BOOLEAN that we should handle
- ourLog.warn("Don't know how to handle properties of type: " + next.getValue().getClass());
- continue;
- }
-
- termConcept.getProperties().add(property);
- }
- return termConcept;
}
@Override
@@ -2083,7 +1345,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
TermConceptMap existingTermConceptMap = optionalExistingTermConceptMapByUrl.get();
String msg = myContext.getLocalizer().getMessage(
- BaseHapiTerminologySvcImpl.class,
+ BaseTermReadSvcImpl.class,
"cannotCreateDuplicateConceptMapUrl",
conceptMapUrl,
existingTermConceptMap.getResource().getIdDt().toUnqualifiedVersionless().getValue());
@@ -2144,31 +1406,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
}
private boolean isNotSafeToPreExpandValueSets() {
- return !isSafeToPreExpandValueSets();
- }
-
- private boolean isSafeToPreExpandValueSets() {
- if (isProcessDeferredPaused()) {
- return false;
- }
-
- if (isDeferredConcepts()) {
- return false;
- }
-
- if (isConceptLinksToSaveLater()) {
- return false;
- }
-
- if (isDeferredValueSets()) {
- return false;
- }
-
- if (isDeferredConceptMaps()) {
- return false;
- }
-
- return true;
+ return !myDeferredStorageSvc.isStorageQueueEmpty();
}
protected abstract ValueSet getValueSetFromResourceTable(ResourceTable theResourceTable);
@@ -2213,7 +1451,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
TermValueSet existingTermValueSet = optionalExistingTermValueSetByUrl.get();
String msg = myContext.getLocalizer().getMessage(
- BaseHapiTerminologySvcImpl.class,
+ BaseTermReadSvcImpl.class,
"cannotCreateDuplicateValueSetUrl",
url,
existingTermValueSet.getResource().getIdDt().toUnqualifiedVersionless().getValue());
@@ -2254,91 +1492,6 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
return new IFhirResourceDaoCodeSystem.SubsumesResult(subsumes);
}
- @Transactional
- @Override
- public AtomicInteger applyDeltaCodesystemsAdd(String theSystem, @Nullable String theParent, CodeSystem theValue) {
- TermCodeSystem cs = getCodeSystem(theSystem);
- if (cs == null) {
- List codes = theValue.getConcept();
- theValue.setConcept(null);
- createOrUpdateCodeSystem(theValue);
- cs = getCodeSystem(theSystem);
- theValue.setConcept(codes);
- }
-
- TermCodeSystemVersion csv = cs.getCurrentVersion();
-
- AtomicInteger addedCodeCounter = new AtomicInteger(0);
-
- TermConcept parentCode = null;
- if (isNotBlank(theParent)) {
- parentCode = myConceptDao
- .findByCodeSystemAndCode(csv, theParent)
- .orElseThrow(() -> new InvalidRequestException("Unknown code [" + theSystem + "|" + theParent + "]"));
- }
-
- List concepts = new ArrayList<>();
- for (CodeSystem.ConceptDefinitionComponent next : theValue.getConcept()) {
- TermConcept concept = toTermConcept(next, csv);
- if (parentCode != null) {
- parentCode.addChild(concept, RelationshipTypeEnum.ISA);
- }
- concepts.add(concept);
- }
-
- // The first pass just saves any concepts that were added to the
- // root of the CodeSystem
- List links = new ArrayList<>();
- for (TermConcept next : concepts) {
- int addedCount = saveOrUpdateConcept(next);
- addedCodeCounter.addAndGet(addedCount);
- extractLinksFromConceptAndChildren(next, links);
- }
-
- // This second pass saves any child concepts
- for (TermConceptParentChildLink next : links) {
- next.setCodeSystem(csv);
- int addedCount = saveOrUpdateConcept(next.getChild());
- addedCodeCounter.addAndGet(addedCount);
- myConceptParentChildLinkDao.save(next);
- }
-
- return addedCodeCounter;
- }
-
- @Transactional
- @Override
- public AtomicInteger applyDeltaCodesystemsRemove(String theSystem, CodeSystem theValue) {
- TermCodeSystem cs = getCodeSystem(theSystem);
- if (cs == null) {
- throw new InvalidRequestException("Unknown code system: " + theSystem);
- }
-
- AtomicInteger removeCounter = new AtomicInteger(0);
-
- for (CodeSystem.ConceptDefinitionComponent next : theValue.getConcept()) {
- Optional conceptOpt = findCode(theSystem, next.getCode());
- if (conceptOpt.isPresent()) {
- TermConcept concept = conceptOpt.get();
- deleteConceptChildrenAndConcept(concept, removeCounter);
- }
- }
-
- return removeCounter;
- }
-
- private void deleteConceptChildrenAndConcept(TermConcept theConcept, AtomicInteger theRemoveCounter) {
- for (TermConceptParentChildLink nextChildLink : theConcept.getChildren()) {
- deleteConceptChildrenAndConcept(nextChildLink.getChild(), theRemoveCounter);
- myConceptParentChildLinkDao.delete(nextChildLink);
- }
-
- myConceptDesignationDao.deleteAll(theConcept.getDesignations());
- myConceptPropertyDao.deleteAll(theConcept.getProperties());
- myConceptDao.delete(theConcept);
- theRemoveCounter.incrementAndGet();
- }
-
protected IContextValidationSupport.LookupCodeResult lookupCode(FhirContext theContext, String theSystem, String theCode) {
TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager);
return txTemplate.execute(t -> {
@@ -2600,37 +1753,81 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
return retVal;
}
- private int validateConceptForStorage(TermConcept theConcept, TermCodeSystemVersion theCodeSystem, ArrayList theConceptsStack,
- IdentityHashMap theAllConcepts) {
- ValidateUtil.isTrueOrThrowInvalidRequest(theConcept.getCodeSystemVersion() != null, "CodeSystemVersion is null");
- ValidateUtil.isTrueOrThrowInvalidRequest(theConcept.getCodeSystemVersion() == theCodeSystem, "CodeSystems are not equal");
- ValidateUtil.isNotBlankOrThrowInvalidRequest(theConcept.getCode(), "CodeSystem contains a code with no code value");
- if (theConceptsStack.contains(theConcept.getCode())) {
- throw new InvalidRequestException("CodeSystem contains circular reference around code " + theConcept.getCode());
+ protected void throwInvalidValueSet(String theValueSet) {
+ throw new ResourceNotFoundException("Unknown ValueSet: " + UrlUtil.escapeUrlParam(theValueSet));
+ }
+
+ public static class PreExpandValueSetsJob implements Job {
+
+ @Autowired
+ private ITermReadSvc myTerminologySvc;
+
+ @Override
+ public void execute(JobExecutionContext theContext) {
+ myTerminologySvc.preExpandDeferredValueSetsToTerminologyTables();
}
- theConceptsStack.add(theConcept.getCode());
+ }
- int retVal = 0;
- if (theAllConcepts.put(theConcept, theAllConcepts) == null) {
- if (theAllConcepts.size() % 1000 == 0) {
- ourLog.info("Have validated {} concepts", theAllConcepts.size());
+ static List toPersistedConcepts(List theConcept, TermCodeSystemVersion theCodeSystemVersion) {
+ ArrayList retVal = new ArrayList<>();
+
+ for (CodeSystem.ConceptDefinitionComponent next : theConcept) {
+ if (isNotBlank(next.getCode())) {
+ TermConcept termConcept = toTermConcept(next, theCodeSystemVersion);
+ retVal.add(termConcept);
}
- retVal = 1;
}
- for (TermConceptParentChildLink next : theConcept.getChildren()) {
- next.setCodeSystem(theCodeSystem);
- retVal += validateConceptForStorage(next.getChild(), theCodeSystem, theConceptsStack, theAllConcepts);
- }
-
- theConceptsStack.remove(theConceptsStack.size() - 1);
-
return retVal;
}
- protected void throwInvalidValueSet(String theValueSet) {
- throw new ResourceNotFoundException("Unknown ValueSet: " + UrlUtil.escapeUrlParam(theValueSet));
+ @Nonnull
+ static TermConcept toTermConcept(CodeSystem.ConceptDefinitionComponent theConceptDefinition, TermCodeSystemVersion theCodeSystemVersion) {
+ TermConcept termConcept = new TermConcept();
+ termConcept.setCode(theConceptDefinition.getCode());
+ termConcept.setCodeSystemVersion(theCodeSystemVersion);
+ termConcept.setDisplay(theConceptDefinition.getDisplay());
+ termConcept.addChildren(toPersistedConcepts(theConceptDefinition.getConcept(), theCodeSystemVersion), RelationshipTypeEnum.ISA);
+
+ for (CodeSystem.ConceptDefinitionDesignationComponent designationComponent : theConceptDefinition.getDesignation()) {
+ if (isNotBlank(designationComponent.getValue())) {
+ TermConceptDesignation designation = termConcept.addDesignation();
+ designation.setLanguage(designationComponent.hasLanguage() ? designationComponent.getLanguage() : null);
+ if (designationComponent.hasUse()) {
+ designation.setUseSystem(designationComponent.getUse().hasSystem() ? designationComponent.getUse().getSystem() : null);
+ designation.setUseCode(designationComponent.getUse().hasCode() ? designationComponent.getUse().getCode() : null);
+ designation.setUseDisplay(designationComponent.getUse().hasDisplay() ? designationComponent.getUse().getDisplay() : null);
+ }
+ designation.setValue(designationComponent.getValue());
+ }
+ }
+
+ for (CodeSystem.ConceptPropertyComponent next : theConceptDefinition.getProperty()) {
+ TermConceptProperty property = new TermConceptProperty();
+
+ property.setKey(next.getCode());
+ property.setConcept(termConcept);
+ property.setCodeSystemVersion(theCodeSystemVersion);
+
+ if (next.getValue() instanceof StringType) {
+ property.setType(TermConceptPropertyTypeEnum.STRING);
+ property.setValue(next.getValueStringType().getValue());
+ } else if (next.getValue() instanceof Coding) {
+ Coding nextCoding = next.getValueCoding();
+ property.setType(TermConceptPropertyTypeEnum.CODING);
+ property.setCodeSystem(nextCoding.getSystem());
+ property.setValue(nextCoding.getCode());
+ property.setDisplay(nextCoding.getDisplay());
+ } else if (next.getValue() != null) {
+ // TODO: LOINC has properties of type BOOLEAN that we should handle
+ ourLog.warn("Don't know how to handle properties of type: " + next.getValue().getClass());
+ continue;
+ }
+
+ termConcept.getProperties().add(property);
+ }
+ return termConcept;
}
private static void extractLinksFromConceptAndChildren(TermConcept theConcept, List theLinks) {
@@ -2651,28 +1848,6 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
return new VersionIndependentConcept(system, code);
}
- public static class SaveDeferredJob implements Job {
-
- @Autowired
- private IHapiTerminologySvc myTerminologySvc;
-
- @Override
- public void execute(JobExecutionContext theContext) {
- myTerminologySvc.saveDeferred();
- }
- }
-
- public static class PreExpandValueSetsJob implements Job {
-
- @Autowired
- private IHapiTerminologySvc myTerminologySvc;
-
- @Override
- public void execute(JobExecutionContext theContext) {
- myTerminologySvc.preExpandDeferredValueSetsToTerminologyTables();
- }
- }
-
/**
* This method is present only for unit tests, do not call from client code
*/
@@ -2705,13 +1880,6 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
return ourLastResultsFromTranslationWithReverseCache;
}
- /**
- * This method is present only for unit tests, do not call from client code
- */
- @VisibleForTesting
- public static void setForceSaveDeferredAlwaysForUnitTest(boolean theForceSaveDeferredAlwaysForUnitTest) {
- ourForceSaveDeferredAlwaysForUnitTest = theForceSaveDeferredAlwaysForUnitTest;
- }
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseTermVersionAdapterSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseTermVersionAdapterSvcImpl.java
new file mode 100644
index 00000000000..f6dfca9973c
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseTermVersionAdapterSvcImpl.java
@@ -0,0 +1,14 @@
+package ca.uhn.fhir.jpa.term;
+
+import ca.uhn.fhir.jpa.term.api.ITermVersionAdapterSvc;
+import ca.uhn.fhir.util.ValidateUtil;
+import org.hl7.fhir.r4.model.CodeSystem;
+
+public abstract class BaseTermVersionAdapterSvcImpl implements ITermVersionAdapterSvc {
+
+
+ protected void validateCodeSystemForStorage(CodeSystem theCodeSystemResource) {
+ ValidateUtil.isNotBlankOrThrowUnprocessableEntity(theCodeSystemResource.getUrl(), "Can not store a CodeSystem without a valid URL");
+ }
+
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/LoadedFileDescriptors.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/LoadedFileDescriptors.java
new file mode 100644
index 00000000000..a12f14c78ae
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/LoadedFileDescriptors.java
@@ -0,0 +1,123 @@
+package ca.uhn.fhir.jpa.term;
+
+import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
+import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
+import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.io.input.BOMInputStream;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.*;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
+
+public class LoadedFileDescriptors implements Closeable {
+ private static final Logger ourLog = LoggerFactory.getLogger(LoadedFileDescriptors.class);
+ private List myTemporaryFiles = new ArrayList<>();
+ private List myUncompressedFileDescriptors = new ArrayList<>();
+
+ LoadedFileDescriptors(List theFileDescriptors) {
+ try {
+ for (ITermLoaderSvc.FileDescriptor next : theFileDescriptors) {
+ if (next.getFilename().toLowerCase().endsWith(".zip")) {
+ ourLog.info("Uncompressing {} into temporary files", next.getFilename());
+ try (InputStream inputStream = next.getInputStream()) {
+ try (BufferedInputStream bufferedInputStream = new BufferedInputStream(inputStream)) {
+ try (ZipInputStream zis = new ZipInputStream(bufferedInputStream)) {
+ for (ZipEntry nextEntry; (nextEntry = zis.getNextEntry()) != null; ) {
+ BOMInputStream fis = new BOMInputStream(zis);
+ File nextTemporaryFile = File.createTempFile("hapifhir", ".tmp");
+ ourLog.info("Creating temporary file: {}", nextTemporaryFile.getAbsolutePath());
+ nextTemporaryFile.deleteOnExit();
+ try (FileOutputStream fos = new FileOutputStream(nextTemporaryFile, false)) {
+ IOUtils.copy(fis, fos);
+ String nextEntryFileName = nextEntry.getName();
+ myUncompressedFileDescriptors.add(new ITermLoaderSvc.FileDescriptor() {
+ @Override
+ public String getFilename() {
+ return nextEntryFileName;
+ }
+
+ @Override
+ public InputStream getInputStream() {
+ try {
+ return new FileInputStream(nextTemporaryFile);
+ } catch (FileNotFoundException e) {
+ throw new InternalErrorException(e);
+ }
+ }
+ });
+ myTemporaryFiles.add(nextTemporaryFile);
+ }
+ }
+ }
+ }
+ }
+ } else {
+ myUncompressedFileDescriptors.add(next);
+ }
+
+ }
+ } catch (IOException e) {
+ throw new InternalErrorException(e);
+ }
+
+ }
+
+ public boolean hasFile(String theFilename) {
+ return myUncompressedFileDescriptors
+ .stream()
+ .map(t -> t.getFilename().replaceAll(".*[\\\\/]", "")) // Strip the path from the filename
+ .anyMatch(t -> t.equals(theFilename));
+ }
+
+ @Override
+ public void close() {
+ for (File next : myTemporaryFiles) {
+ ourLog.info("Deleting temporary file: {}", next.getAbsolutePath());
+ FileUtils.deleteQuietly(next);
+ }
+ }
+
+ List getUncompressedFileDescriptors() {
+ return myUncompressedFileDescriptors;
+ }
+
+ private List notFound(List theExpectedFilenameFragments) {
+ Set foundFragments = new HashSet<>();
+ for (String nextExpected : theExpectedFilenameFragments) {
+ for (ITermLoaderSvc.FileDescriptor next : myUncompressedFileDescriptors) {
+ if (next.getFilename().contains(nextExpected)) {
+ foundFragments.add(nextExpected);
+ break;
+ }
+ }
+ }
+
+ ArrayList notFoundFileNameFragments = new ArrayList<>(theExpectedFilenameFragments);
+ notFoundFileNameFragments.removeAll(foundFragments);
+ return notFoundFileNameFragments;
+ }
+
+ void verifyMandatoryFilesExist(List theExpectedFilenameFragments) {
+ List notFound = notFound(theExpectedFilenameFragments);
+ if (!notFound.isEmpty()) {
+ throw new UnprocessableEntityException("Could not find the following mandatory files in input: " + notFound);
+ }
+ }
+
+ void verifyOptionalFilesExist(List theExpectedFilenameFragments) {
+ List notFound = notFound(theExpectedFilenameFragments);
+ if (!notFound.isEmpty()) {
+ ourLog.warn("Could not find the following optional files: " + notFound);
+ }
+ }
+
+
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java
new file mode 100644
index 00000000000..6ea0d6c1274
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java
@@ -0,0 +1,757 @@
+package ca.uhn.fhir.jpa.term;
+
+import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.context.FhirVersionEnum;
+import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
+import ca.uhn.fhir.jpa.dao.DaoConfig;
+import ca.uhn.fhir.jpa.dao.data.*;
+import ca.uhn.fhir.jpa.dao.index.IdHelperService;
+import ca.uhn.fhir.jpa.entity.*;
+import ca.uhn.fhir.jpa.model.entity.ResourceTable;
+import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
+import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
+import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
+import ca.uhn.fhir.jpa.term.api.ITermVersionAdapterSvc;
+import ca.uhn.fhir.jpa.term.custom.CustomTerminologySet;
+import ca.uhn.fhir.jpa.util.ScrollableResultsIterator;
+import ca.uhn.fhir.rest.api.server.RequestDetails;
+import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
+import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
+import ca.uhn.fhir.util.ObjectUtil;
+import ca.uhn.fhir.util.StopWatch;
+import ca.uhn.fhir.util.ValidateUtil;
+import com.google.common.collect.ArrayListMultimap;
+import com.google.common.collect.ListMultimap;
+import org.apache.commons.lang3.Validate;
+import org.hibernate.ScrollMode;
+import org.hibernate.ScrollableResults;
+import org.hl7.fhir.instance.model.api.IIdType;
+import org.hl7.fhir.r4.model.CodeSystem;
+import org.hl7.fhir.r4.model.ConceptMap;
+import org.hl7.fhir.r4.model.ValueSet;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.data.domain.PageRequest;
+import org.springframework.data.domain.Slice;
+import org.springframework.data.jpa.repository.JpaRepository;
+import org.springframework.transaction.PlatformTransactionManager;
+import org.springframework.transaction.TransactionDefinition;
+import org.springframework.transaction.annotation.Propagation;
+import org.springframework.transaction.annotation.Transactional;
+import org.springframework.transaction.support.TransactionTemplate;
+
+import javax.annotation.Nonnull;
+import javax.persistence.EntityManager;
+import javax.persistence.PersistenceContext;
+import javax.persistence.PersistenceContextType;
+import javax.persistence.TypedQuery;
+import javax.persistence.criteria.CriteriaBuilder;
+import javax.persistence.criteria.CriteriaQuery;
+import javax.persistence.criteria.Predicate;
+import javax.persistence.criteria.Root;
+import java.util.*;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.function.Supplier;
+import java.util.stream.Collectors;
+
+import static org.apache.commons.lang3.StringUtils.isNotBlank;
+
+public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
+ private static final Logger ourLog = LoggerFactory.getLogger(TermCodeSystemStorageSvcImpl.class);
+ private static final Object PLACEHOLDER_OBJECT = new Object();
+ @PersistenceContext(type = PersistenceContextType.TRANSACTION)
+ protected EntityManager myEntityManager;
+ @Autowired
+ protected ITermCodeSystemDao myCodeSystemDao;
+ @Autowired
+ protected ITermCodeSystemVersionDao myCodeSystemVersionDao;
+ @Autowired
+ protected ITermConceptDao myConceptDao;
+ @Autowired
+ protected ITermConceptPropertyDao myConceptPropertyDao;
+ @Autowired
+ protected ITermConceptDesignationDao myConceptDesignationDao;
+ @Autowired
+ protected IdHelperService myIdHelperService;
+ @Autowired
+ private PlatformTransactionManager myTransactionManager;
+ @Autowired
+ private ITermConceptParentChildLinkDao myConceptParentChildLinkDao;
+ @Autowired
+ private ITermVersionAdapterSvc myTerminologyVersionAdapterSvc;
+ @Autowired
+ private ITermDeferredStorageSvc myDeferredStorageSvc;
+ @Autowired
+ private FhirContext myContext;
+ @Autowired
+ private ITermReadSvc myTerminologySvc;
+ @Autowired
+ private DaoConfig myDaoConfig;
+ @Autowired
+ private IResourceTableDao myResourceTableDao;
+
+ @Override
+ public Long getValueSetResourcePid(IIdType theIdType) {
+ return getValueSetResourcePid(theIdType, null);
+ }
+
+ private Long getValueSetResourcePid(IIdType theIdType, RequestDetails theRequestDetails) {
+ return myIdHelperService.translateForcedIdToPid(theIdType, theRequestDetails);
+ }
+
+ @Transactional
+ @Override
+ public UploadStatistics applyDeltaCodeSystemsAdd(String theSystem, CustomTerminologySet theAdditions) {
+ ValidateUtil.isNotBlankOrThrowInvalidRequest(theSystem, "No system provided");
+ validateDstu3OrNewer();
+ theAdditions.validateNoCycleOrThrowInvalidRequest();
+
+ TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(theSystem);
+ if (cs == null) {
+ CodeSystem codeSystemResource = new CodeSystem();
+ codeSystemResource.setUrl(theSystem);
+ codeSystemResource.setContent(CodeSystem.CodeSystemContentMode.NOTPRESENT);
+ myTerminologyVersionAdapterSvc.createOrUpdateCodeSystem(codeSystemResource);
+
+ cs = myCodeSystemDao.findByCodeSystemUri(theSystem);
+ }
+
+ TermCodeSystemVersion csv = cs.getCurrentVersion();
+ Validate.notNull(csv);
+
+ CodeSystem codeSystem = myTerminologySvc.getCodeSystemFromContext(theSystem);
+ if (codeSystem.getContent() != CodeSystem.CodeSystemContentMode.NOTPRESENT) {
+ throw new InvalidRequestException("CodeSystem with url[" + theSystem + "] can not apply a delta - wrong content mode: " + codeSystem.getContent());
+ }
+
+ Validate.notNull(cs);
+ Validate.notNull(cs.getPid());
+
+ IIdType codeSystemId = cs.getResource().getIdDt();
+
+ // Load all concepts for the code system
+ Map codeToConceptPid = new HashMap<>();
+ {
+ ourLog.info("Loading all concepts in CodeSystem versionPid[{}] and url[{}]", cs.getPid(), theSystem);
+ StopWatch sw = new StopWatch();
+ CriteriaBuilder criteriaBuilder = myEntityManager.getCriteriaBuilder();
+ CriteriaQuery query = criteriaBuilder.createQuery(TermConcept.class);
+ Root root = query.from(TermConcept.class);
+ Predicate predicate = criteriaBuilder.equal(root.get("myCodeSystemVersionPid").as(Long.class), csv.getPid());
+ query.where(predicate);
+ TypedQuery typedQuery = myEntityManager.createQuery(query.select(root));
+ org.hibernate.query.Query hibernateQuery = (org.hibernate.query.Query) typedQuery;
+ ScrollableResults scrollableResults = hibernateQuery.scroll(ScrollMode.FORWARD_ONLY);
+ try (ScrollableResultsIterator scrollableResultsIterator = new ScrollableResultsIterator<>(scrollableResults)) {
+ while (scrollableResultsIterator.hasNext()) {
+ TermConcept next = scrollableResultsIterator.next();
+ codeToConceptPid.put(next.getCode(), next.getId());
+ }
+ }
+ ourLog.info("Loaded {} concepts in {}", codeToConceptPid.size(), sw.toString());
+ }
+
+ // Load all parent/child links
+ ListMultimap parentCodeToChildCodes = ArrayListMultimap.create();
+ ListMultimap childCodeToParentCodes = ArrayListMultimap.create();
+ {
+ ourLog.info("Loading all parent/child relationships in CodeSystem url[" + theSystem + "]");
+ int count = 0;
+ StopWatch sw = new StopWatch();
+ CriteriaBuilder criteriaBuilder = myEntityManager.getCriteriaBuilder();
+ CriteriaQuery query = criteriaBuilder.createQuery(TermConceptParentChildLink.class);
+ Root root = query.from(TermConceptParentChildLink.class);
+ Predicate predicate = criteriaBuilder.equal(root.get("myCodeSystemVersionPid").as(Long.class), csv.getPid());
+ root.fetch("myChild");
+ root.fetch("myParent");
+ query.where(predicate);
+ TypedQuery typedQuery = myEntityManager.createQuery(query.select(root));
+ org.hibernate.query.Query hibernateQuery = (org.hibernate.query.Query) typedQuery;
+ ScrollableResults scrollableResults = hibernateQuery.scroll(ScrollMode.FORWARD_ONLY);
+ try (ScrollableResultsIterator scrollableResultsIterator = new ScrollableResultsIterator<>(scrollableResults)) {
+ while (scrollableResultsIterator.hasNext()) {
+ TermConceptParentChildLink next = scrollableResultsIterator.next();
+ String parentCode = next.getParent().getCode();
+ String childCode = next.getChild().getCode();
+ parentCodeToChildCodes.put(parentCode, childCode);
+ childCodeToParentCodes.put(childCode, parentCode);
+ count++;
+ }
+ }
+ ourLog.info("Loaded {} parent/child relationships in {}", count, sw.toString());
+ }
+
+ // Account for root codes in the parent->child map
+ for (String nextCode : codeToConceptPid.keySet()) {
+ if (childCodeToParentCodes.get(nextCode).isEmpty()) {
+ parentCodeToChildCodes.put("", nextCode);
+ }
+ }
+
+ UploadStatistics retVal = new UploadStatistics(codeSystemId);
+
+ // Add root concepts
+ for (TermConcept nextRootConcept : theAdditions.getRootConcepts()) {
+ List parentCodes = Collections.emptyList();
+ addConcept(csv, codeToConceptPid, parentCodes, nextRootConcept, parentCodeToChildCodes, retVal, true);
+ }
+
+ // Add unanchored child concepts
+ for (TermConcept nextUnanchoredChild : theAdditions.getUnanchoredChildConceptsToParentCodes().keySet()) {
+ List nextParentCodes = theAdditions.getUnanchoredChildConceptsToParentCodes().get(nextUnanchoredChild);
+ addConcept(csv, codeToConceptPid, nextParentCodes, nextUnanchoredChild, parentCodeToChildCodes, retVal, true);
+ }
+
+ return retVal;
+ }
+
+ @Transactional
+ @Override
+ public UploadStatistics applyDeltaCodeSystemsRemove(String theSystem, CustomTerminologySet theValue) {
+ ValidateUtil.isNotBlankOrThrowInvalidRequest(theSystem, "No system provided");
+ validateDstu3OrNewer();
+
+ TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(theSystem);
+ if (cs == null) {
+ throw new InvalidRequestException("Unknown code system: " + theSystem);
+ }
+
+ AtomicInteger removeCounter = new AtomicInteger(0);
+
+ for (TermConcept nextSuppliedConcept : theValue.getRootConcepts()) {
+ Optional conceptOpt = myTerminologySvc.findCode(theSystem, nextSuppliedConcept.getCode());
+ if (conceptOpt.isPresent()) {
+ TermConcept concept = conceptOpt.get();
+ deleteConceptChildrenAndConcept(concept, removeCounter);
+ }
+ }
+
+ IIdType target = cs.getResource().getIdDt();
+ return new UploadStatistics(removeCounter.get(), target);
+ }
+
+ @Override
+ public void deleteCodeSystem(TermCodeSystem theCodeSystem) {
+ ourLog.info(" * Deleting code system {}", theCodeSystem.getPid());
+
+ myEntityManager.flush();
+ TermCodeSystem cs = myCodeSystemDao.findById(theCodeSystem.getPid()).orElseThrow(IllegalStateException::new);
+ cs.setCurrentVersion(null);
+ myCodeSystemDao.save(cs);
+ myCodeSystemDao.flush();
+
+ List codeSystemVersions = myCodeSystemVersionDao.findByCodeSystemPid(theCodeSystem.getPid());
+ for (TermCodeSystemVersion next : codeSystemVersions) {
+ deleteCodeSystemVersion(next.getPid());
+ }
+ myCodeSystemVersionDao.deleteForCodeSystem(theCodeSystem);
+ myCodeSystemDao.delete(theCodeSystem);
+
+ myEntityManager.flush();
+ }
+
+ /**
+ * Returns the number of saved concepts
+ */
+ @Override
+ public int saveConcept(TermConcept theConcept) {
+ int retVal = 0;
+
+ /*
+ * If the concept has an ID, we're reindexing, so there's no need to
+ * save parent concepts first (it's way too slow to do that)
+ */
+ if (theConcept.getId() == null) {
+ retVal += ensureParentsSaved(theConcept.getParents());
+ }
+
+ if (theConcept.getId() == null || theConcept.getIndexStatus() == null) {
+ retVal++;
+ theConcept.setIndexStatus(BaseHapiFhirDao.INDEX_STATUS_INDEXED);
+ theConcept.setUpdated(new Date());
+ myConceptDao.save(theConcept);
+
+ for (TermConceptProperty next : theConcept.getProperties()) {
+ myConceptPropertyDao.save(next);
+ }
+
+ for (TermConceptDesignation next : theConcept.getDesignations()) {
+ myConceptDesignationDao.save(next);
+ }
+ }
+
+ ourLog.trace("Saved {} and got PID {}", theConcept.getCode(), theConcept.getId());
+ return retVal;
+ }
+
+ @Override
+ @Transactional(propagation = Propagation.MANDATORY)
+ public void storeNewCodeSystemVersionIfNeeded(CodeSystem theCodeSystem, ResourceTable theResourceEntity) {
+ if (theCodeSystem != null && isNotBlank(theCodeSystem.getUrl())) {
+ String codeSystemUrl = theCodeSystem.getUrl();
+ if (theCodeSystem.getContent() == CodeSystem.CodeSystemContentMode.COMPLETE || theCodeSystem.getContent() == null || theCodeSystem.getContent() == CodeSystem.CodeSystemContentMode.NOTPRESENT) {
+ ourLog.info("CodeSystem {} has a status of {}, going to store concepts in terminology tables", theResourceEntity.getIdDt().getValue(), theCodeSystem.getContentElement().getValueAsString());
+
+ Long codeSystemResourcePid = getCodeSystemResourcePid(theCodeSystem.getIdElement());
+
+ /*
+ * If this is a not-present codesystem, we don't want to store a new version if one
+ * already exists, since that will wipe out the existing concepts. We do create or update
+ * the TermCodeSystem table though, since that allows the DB to reject changes
+ * that would result in duplicate CodeSysten.url values.
+ */
+ if (theCodeSystem.getContent() == CodeSystem.CodeSystemContentMode.NOTPRESENT) {
+ TermCodeSystem codeSystem = myCodeSystemDao.findByCodeSystemUri(theCodeSystem.getUrl());
+ if (codeSystem != null) {
+ getOrCreateTermCodeSystem(codeSystemResourcePid, theCodeSystem.getUrl(), theCodeSystem.getUrl(), theResourceEntity);
+ return;
+ }
+ }
+
+ TermCodeSystemVersion persCs = new TermCodeSystemVersion();
+
+ populateCodeSystemVersionProperties(persCs, theCodeSystem, theResourceEntity);
+
+ persCs.getConcepts().addAll(BaseTermReadSvcImpl.toPersistedConcepts(theCodeSystem.getConcept(), persCs));
+ ourLog.info("Code system has {} concepts", persCs.getConcepts().size());
+ storeNewCodeSystemVersion(codeSystemResourcePid, codeSystemUrl, theCodeSystem.getName(), theCodeSystem.getVersion(), persCs, theResourceEntity);
+ }
+
+ }
+ }
+
+ @Override
+ @Transactional(propagation = Propagation.REQUIRED)
+ public IIdType storeNewCodeSystemVersion(CodeSystem theCodeSystemResource, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequest, List theValueSets, List theConceptMaps) {
+ Validate.notBlank(theCodeSystemResource.getUrl(), "theCodeSystemResource must have a URL");
+
+ IIdType csId = myTerminologyVersionAdapterSvc.createOrUpdateCodeSystem(theCodeSystemResource);
+ Long codeSystemResourcePid = myIdHelperService.translateForcedIdToPid(csId, theRequest);
+ ResourceTable resource = myResourceTableDao.getOne(codeSystemResourcePid);
+
+ ourLog.info("CodeSystem resource has ID: {}", csId.getValue());
+
+ populateCodeSystemVersionProperties(theCodeSystemVersion, theCodeSystemResource, resource);
+
+ storeNewCodeSystemVersion(codeSystemResourcePid, theCodeSystemResource.getUrl(), theCodeSystemResource.getName(), theCodeSystemResource.getVersion(), theCodeSystemVersion, resource);
+
+ myDeferredStorageSvc.addConceptMapsToStorageQueue(theConceptMaps);
+ myDeferredStorageSvc.addValueSetsToStorageQueue(theValueSets);
+
+ return csId;
+ }
+
+ @Override
+ @Transactional(propagation = Propagation.REQUIRED)
+ public void storeNewCodeSystemVersion(Long theCodeSystemResourcePid, String theSystemUri, String theSystemName, String theSystemVersionId, TermCodeSystemVersion theCodeSystemVersion, ResourceTable theCodeSystemResourceTable) {
+ ourLog.info("Storing code system");
+
+ ValidateUtil.isTrueOrThrowInvalidRequest(theCodeSystemVersion.getResource() != null, "No resource supplied");
+ ValidateUtil.isNotBlankOrThrowInvalidRequest(theSystemUri, "No system URI supplied");
+
+ // Grab the existing versions so we can delete them later
+ List existing = myCodeSystemVersionDao.findByCodeSystemResourcePid(theCodeSystemResourcePid);
+
+ /*
+ * For now we always delete old versions. At some point it would be nice to allow configuration to keep old versions.
+ */
+
+ ourLog.info("Deleting old code system versions");
+ for (TermCodeSystemVersion next : existing) {
+ Long codeSystemVersionPid = next.getPid();
+ deleteCodeSystemVersion(codeSystemVersionPid);
+ }
+
+ ourLog.info("Flushing...");
+ myConceptDao.flush();
+ ourLog.info("Done flushing");
+
+ /*
+ * Do the upload
+ */
+
+ TermCodeSystem codeSystem = getOrCreateTermCodeSystem(theCodeSystemResourcePid, theSystemUri, theSystemName, theCodeSystemResourceTable);
+
+ theCodeSystemVersion.setCodeSystem(codeSystem);
+
+ theCodeSystemVersion.setCodeSystemDisplayName(theSystemName);
+ theCodeSystemVersion.setCodeSystemVersionId(theSystemVersionId);
+
+ ourLog.info("Validating all codes in CodeSystem for storage (this can take some time for large sets)");
+
+ // Validate the code system
+ ArrayList conceptsStack = new ArrayList<>();
+ IdentityHashMap allConcepts = new IdentityHashMap<>();
+ int totalCodeCount = 0;
+ for (TermConcept next : theCodeSystemVersion.getConcepts()) {
+ totalCodeCount += validateConceptForStorage(next, theCodeSystemVersion, conceptsStack, allConcepts);
+ }
+
+ ourLog.info("Saving version containing {} concepts", totalCodeCount);
+
+ TermCodeSystemVersion codeSystemVersion = myCodeSystemVersionDao.saveAndFlush(theCodeSystemVersion);
+
+ ourLog.info("Saving code system");
+
+ codeSystem.setCurrentVersion(theCodeSystemVersion);
+ codeSystem = myCodeSystemDao.saveAndFlush(codeSystem);
+
+ ourLog.info("Setting CodeSystemVersion[{}] on {} concepts...", codeSystem.getPid(), totalCodeCount);
+
+ for (TermConcept next : theCodeSystemVersion.getConcepts()) {
+ populateVersion(next, codeSystemVersion);
+ }
+
+ ourLog.info("Saving {} concepts...", totalCodeCount);
+
+ IdentityHashMap conceptsStack2 = new IdentityHashMap<>();
+ for (TermConcept next : theCodeSystemVersion.getConcepts()) {
+ persistChildren(next, codeSystemVersion, conceptsStack2, totalCodeCount);
+ }
+
+ ourLog.info("Done saving concepts, flushing to database");
+
+ myConceptDao.flush();
+ myConceptParentChildLinkDao.flush();
+
+ ourLog.info("Done deleting old code system versions");
+
+ if (myDeferredStorageSvc.isStorageQueueEmpty() == false) {
+ ourLog.info("Note that some concept saving has been deferred");
+ }
+ }
+
+ private void deleteCodeSystemVersion(final Long theCodeSystemVersionPid) {
+ ourLog.info(" * Deleting code system version {}", theCodeSystemVersionPid);
+
+ PageRequest page1000 = PageRequest.of(0, 1000);
+
+ // Parent/Child links
+ {
+ String descriptor = "parent/child links";
+ Supplier> loader = () -> myConceptParentChildLinkDao.findByCodeSystemVersion(page1000, theCodeSystemVersionPid);
+ Supplier counter = () -> myConceptParentChildLinkDao.countByCodeSystemVersion(theCodeSystemVersionPid);
+ doDelete(descriptor, loader, counter, myConceptParentChildLinkDao);
+ }
+
+ // Properties
+ {
+ String descriptor = "concept properties";
+ Supplier> loader = () -> myConceptPropertyDao.findByCodeSystemVersion(page1000, theCodeSystemVersionPid);
+ Supplier counter = () -> myConceptPropertyDao.countByCodeSystemVersion(theCodeSystemVersionPid);
+ doDelete(descriptor, loader, counter, myConceptPropertyDao);
+ }
+
+ // Designations
+ {
+ String descriptor = "concept designations";
+ Supplier> loader = () -> myConceptDesignationDao.findByCodeSystemVersion(page1000, theCodeSystemVersionPid);
+ Supplier counter = () -> myConceptDesignationDao.countByCodeSystemVersion(theCodeSystemVersionPid);
+ doDelete(descriptor, loader, counter, myConceptDesignationDao);
+ }
+
+ // Concepts
+ {
+ String descriptor = "concepts";
+ // For some reason, concepts are much slower to delete, so use a smaller batch size
+ PageRequest page100 = PageRequest.of(0, 100);
+ Supplier> loader = () -> myConceptDao.findByCodeSystemVersion(page100, theCodeSystemVersionPid);
+ Supplier counter = () -> myConceptDao.countByCodeSystemVersion(theCodeSystemVersionPid);
+ doDelete(descriptor, loader, counter, myConceptDao);
+ }
+
+ Optional codeSystemOpt = myCodeSystemDao.findWithCodeSystemVersionAsCurrentVersion(theCodeSystemVersionPid);
+ if (codeSystemOpt.isPresent()) {
+ TermCodeSystem codeSystem = codeSystemOpt.get();
+ ourLog.info(" * Removing code system version {} as current version of code system {}", theCodeSystemVersionPid, codeSystem.getPid());
+ codeSystem.setCurrentVersion(null);
+ myCodeSystemDao.save(codeSystem);
+ }
+
+ ourLog.info(" * Deleting code system version");
+ myCodeSystemVersionDao.deleteById(theCodeSystemVersionPid);
+
+ }
+
+ private void validateDstu3OrNewer() {
+ Validate.isTrue(myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3), "Terminology operations only supported in DSTU3+ mode");
+ }
+
+ private void addConcept(TermCodeSystemVersion theCsv, Map theCodeToConceptPid, Collection theParentCodes, TermConcept theConceptToAdd, ListMultimap theParentCodeToChildCodes, UploadStatistics theStatisticsTracker, boolean theForceResequence) {
+ TermConcept nextConceptToAdd = theConceptToAdd;
+
+ String nextCodeToAdd = nextConceptToAdd.getCode();
+ String parentDescription = "(root concept)";
+ Set parentConcepts = new HashSet<>();
+ if (!theParentCodes.isEmpty()) {
+ parentDescription = "[" + String.join(", ", theParentCodes) + "]";
+ for (String nextParentCode : theParentCodes) {
+ Long nextParentCodePid = theCodeToConceptPid.get(nextParentCode);
+ if (nextParentCodePid == null) {
+ throw new InvalidRequestException("Unable to add code \"" + nextCodeToAdd + "\" to unknown parent: " + nextParentCode);
+ }
+ parentConcepts.add(myConceptDao.getOne(nextParentCodePid));
+ }
+ }
+
+ ourLog.info("Saving concept {} with parent {}", theStatisticsTracker.getUpdatedConceptCount(), parentDescription);
+
+ if (theCodeToConceptPid.containsKey(nextCodeToAdd)) {
+
+ TermConcept existingCode = myConceptDao.getOne(theCodeToConceptPid.get(nextCodeToAdd));
+ existingCode.setIndexStatus(null);
+ existingCode.setDisplay(nextConceptToAdd.getDisplay());
+ nextConceptToAdd = existingCode;
+
+ }
+
+ if (theConceptToAdd.getSequence() == null || theForceResequence) {
+ // If this is a new code, give it a sequence number based on how many concepts the
+ // parent already has (or the highest number, if the code has multiple parents)
+ int sequence = 0;
+ for (String nextParentCode : theParentCodes) {
+ theParentCodeToChildCodes.put(nextParentCode, nextCodeToAdd);
+ sequence = Math.max(sequence, theParentCodeToChildCodes.get(nextParentCode).size());
+ }
+ if (theParentCodes.isEmpty()) {
+ theParentCodeToChildCodes.put("", nextCodeToAdd);
+ sequence = Math.max(sequence, theParentCodeToChildCodes.get("").size());
+ }
+ nextConceptToAdd.setSequence(sequence);
+ }
+
+
+ // Drop any old parent-child links if they aren't explicitly specified in the
+ // hierarchy being added
+ for (Iterator