From dfd098c374c30dd6b09b93f51230ba813eae9928 Mon Sep 17 00:00:00 2001 From: Diederik Muylwyk Date: Mon, 19 Aug 2019 02:26:12 -0400 Subject: [PATCH 01/23] Incremental work on large ValueSet expansion support; added use of terminology tables when expanding. --- .../java/ca/uhn/fhir/util/ValidateUtil.java | 2 +- .../java/ca/uhn/fhir/jpa/dao/DaoConfig.java | 92 +++++++ .../jpa/dao/FhirResourceDaoValueSetDstu2.java | 59 ++-- .../jpa/dao/IFhirResourceDaoValueSet.java | 6 + .../dstu3/FhirResourceDaoValueSetDstu3.java | 84 +++++- .../jpa/dao/r4/FhirResourceDaoValueSetR4.java | 78 +++++- .../jpa/dao/r5/FhirResourceDaoValueSetR5.java | 84 +++++- .../fhir/jpa/provider/BaseJpaProvider.java | 8 +- .../BaseJpaResourceProviderValueSetDstu3.java | 51 +++- .../BaseJpaResourceProviderConceptMapR4.java | 2 +- .../r4/BaseJpaResourceProviderValueSetR4.java | 51 +++- .../r5/BaseJpaResourceProviderValueSetR5.java | 51 +++- .../jpa/term/BaseHapiTerminologySvcImpl.java | 63 ++++- .../jpa/term/HapiTerminologySvcDstu2.java | 5 + .../jpa/term/HapiTerminologySvcDstu3.java | 14 + .../fhir/jpa/term/HapiTerminologySvcR4.java | 6 + .../fhir/jpa/term/HapiTerminologySvcR5.java | 7 + .../fhir/jpa/term/IHapiTerminologySvc.java | 7 + .../ResourceProviderDstu3ValueSetTest.java | 31 ++- .../r4/ResourceProviderR4ValueSetTest.java | 31 ++- .../jpa/term/TerminologySvcImplR4Test.java | 258 ++++++++++++++++++ 21 files changed, 922 insertions(+), 68 deletions(-) diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ValidateUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ValidateUtil.java index 4dd9ed42b22..2826f6db314 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ValidateUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ValidateUtil.java @@ -76,7 +76,7 @@ public class ValidateUtil { } public static void isTrueOrThrowInvalidRequest(boolean theSuccess, String theMessage, Object... theValues) { - if (theSuccess == false) { + if (!theSuccess) { throw new InvalidRequestException(String.format(theMessage, theValues)); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoConfig.java index 4cc40737de0..a585d9104ca 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoConfig.java @@ -149,6 +149,18 @@ public class DaoConfig { * EXPERIMENTAL - Do not use in production! Do not change default of {@code false}! */ private boolean myPreExpandValueSetsExperimental = false; + /** + * EXPERIMENTAL - Do not use in production! Do not change default of {@code 0}! + */ + private int myPreExpandValueSetsDefaultOffsetExperimental = 0; + /** + * EXPERIMENTAL - Do not use in production! Do not change default of {@code 1000}! + */ + private int myPreExpandValueSetsDefaultCountExperimental = 1000; + /** + * EXPERIMENTAL - Do not use in production! Do not change default of {@code 1000}! + */ + private int myPreExpandValueSetsMaxCountExperimental = 1000; /** * Constructor @@ -1632,6 +1644,86 @@ public class DaoConfig { myPreExpandValueSetsExperimental = thePreExpandValueSetsExperimental; } + /** + * EXPERIMENTAL - Do not use in production! + *

+ * This is the default value of {@code offset} parameter for the ValueSet {@code $expand} operation when + * {@link DaoConfig#isPreExpandValueSetsExperimental()} returns {@code true}. + *

+ *

+ * The default value for this setting is {@code 0}. + *

+ */ + public int getPreExpandValueSetsDefaultOffsetExperimental() { + return myPreExpandValueSetsDefaultOffsetExperimental; + } + + /** + * EXPERIMENTAL - Do not use in production! + *

+ * This is the default value of {@code count} parameter for the ValueSet {@code $expand} operation when + * {@link DaoConfig#isPreExpandValueSetsExperimental()} returns {@code true}. + *

+ *

+ * The default value for this setting is {@code 1000}. + *

+ */ + public int getPreExpandValueSetsDefaultCountExperimental() { + return myPreExpandValueSetsDefaultCountExperimental; + } + + /** + * EXPERIMENTAL - Do not use in production! + *

+ * This is the default value of {@code count} parameter for the ValueSet {@code $expand} operation when + * {@link DaoConfig#isPreExpandValueSetsExperimental()} returns {@code true}. + *

+ *

+ * If {@code thePreExpandValueSetsDefaultCountExperimental} is greater than + * {@link DaoConfig#getPreExpandValueSetsMaxCountExperimental()}, the lesser value is used. + *

+ *

+ * The default value for this setting is {@code 1000}. + *

+ */ + public void setPreExpandValueSetsDefaultCountExperimental(int thePreExpandValueSetsDefaultCountExperimental) { + myPreExpandValueSetsDefaultCountExperimental = Math.min(thePreExpandValueSetsDefaultCountExperimental, getPreExpandValueSetsMaxCountExperimental()); + } + + /** + * EXPERIMENTAL - Do not use in production! + *

+ * This is the max value of {@code count} parameter for the ValueSet {@code $expand} operation when + * {@link DaoConfig#isPreExpandValueSetsExperimental()} returns {@code true}. + *

+ *

+ * The default value for this setting is {@code 1000}. + *

+ */ + public int getPreExpandValueSetsMaxCountExperimental() { + return myPreExpandValueSetsMaxCountExperimental; + } + + /** + * EXPERIMENTAL - Do not use in production! + *

+ * This is the max value of {@code count} parameter for the ValueSet {@code $expand} operation when + * {@link DaoConfig#isPreExpandValueSetsExperimental()} returns {@code true}. + *

+ *

+ * If {@code thePreExpandValueSetsMaxCountExperimental} is lesser than + * {@link DaoConfig#getPreExpandValueSetsDefaultCountExperimental()}, the default {@code count} is lowered to the + * new max {@code count}. + *

+ *

+ * The default value for this setting is {@code 1000}. + *

+ */ + public void setPreExpandValueSetsMaxCountExperimental(int thePreExpandValueSetsMaxCountExperimental) { + myPreExpandValueSetsMaxCountExperimental = thePreExpandValueSetsMaxCountExperimental; + setPreExpandValueSetsDefaultCountExperimental(Math.min(getPreExpandValueSetsDefaultCountExperimental(), getPreExpandValueSetsMaxCountExperimental())); + } + public enum IndexEnabledEnum { ENABLED, DISABLED diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoValueSetDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoValueSetDstu2.java index 45148a1d9d5..1db5197f8f7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoValueSetDstu2.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoValueSetDstu2.java @@ -20,15 +20,25 @@ package ca.uhn.fhir.jpa.dao; * #L% */ -import static org.apache.commons.lang3.StringUtils.isBlank; -import static org.apache.commons.lang3.StringUtils.isNotBlank; - -import java.util.*; - -import javax.annotation.PostConstruct; - +import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.support.IContextValidationSupport; +import ca.uhn.fhir.jpa.model.entity.BaseHasResource; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; +import ca.uhn.fhir.model.dstu2.composite.CodeableConceptDt; +import ca.uhn.fhir.model.dstu2.composite.CodingDt; +import ca.uhn.fhir.model.dstu2.resource.ValueSet; +import ca.uhn.fhir.model.dstu2.resource.ValueSet.CodeSystemConcept; +import ca.uhn.fhir.model.dstu2.resource.ValueSet.ComposeInclude; +import ca.uhn.fhir.model.dstu2.resource.ValueSet.ComposeIncludeConcept; +import ca.uhn.fhir.model.dstu2.resource.ValueSet.ExpansionContains; +import ca.uhn.fhir.model.primitive.DateTimeDt; +import ca.uhn.fhir.model.primitive.IdDt; +import ca.uhn.fhir.rest.api.server.IBundleProvider; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.param.TokenParam; +import ca.uhn.fhir.rest.param.UriParam; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import org.apache.commons.codec.binary.StringUtils; import org.hl7.fhir.instance.hapi.validation.CachingValidationSupport; import org.hl7.fhir.instance.hapi.validation.DefaultProfileValidationSupport; @@ -38,20 +48,14 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; -import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.jpa.model.entity.BaseHasResource; -import ca.uhn.fhir.model.dstu2.composite.CodeableConceptDt; -import ca.uhn.fhir.model.dstu2.composite.CodingDt; -import ca.uhn.fhir.model.dstu2.resource.ValueSet; -import ca.uhn.fhir.model.dstu2.resource.ValueSet.*; -import ca.uhn.fhir.model.primitive.DateTimeDt; -import ca.uhn.fhir.model.primitive.IdDt; -import ca.uhn.fhir.rest.api.server.IBundleProvider; -import ca.uhn.fhir.rest.api.server.RequestDetails; -import ca.uhn.fhir.rest.param.TokenParam; -import ca.uhn.fhir.rest.param.UriParam; -import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; -import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; +import javax.annotation.PostConstruct; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Set; + +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.apache.commons.lang3.StringUtils.isNotBlank; public class FhirResourceDaoValueSetDstu2 extends FhirResourceDaoDstu2 implements IFhirResourceDaoValueSet, IFhirResourceDaoCodeSystem { @@ -95,7 +99,11 @@ public class FhirResourceDaoValueSetDstu2 extends FhirResourceDaoDstu2 public ValueSet expand(IIdType theId, String theFilter, RequestDetails theRequest) { ValueSet source = loadValueSetForExpansion(theId, theRequest); return expand(source, theFilter); + } + @Override + public ValueSet expand(IIdType theId, String theFilter, int theOffset, int theCount, RequestDetails theRequest) { + throw new UnsupportedOperationException(); } @Override @@ -131,6 +139,11 @@ public class FhirResourceDaoValueSetDstu2 extends FhirResourceDaoDstu2 return retVal; } + @Override + public ValueSet expand(ValueSet source, String theFilter, int theOffset, int theCount) { + throw new UnsupportedOperationException(); + } + @Override public ValueSet expandByIdentifier(String theUri, String theFilter) { if (isBlank(theUri)) { @@ -153,7 +166,11 @@ public class FhirResourceDaoValueSetDstu2 extends FhirResourceDaoDstu2 } return expand(source, theFilter); + } + @Override + public ValueSet expandByIdentifier(String theUri, String theFilter, int theOffset, int theCount) { + throw new UnsupportedOperationException(); } @Override diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFhirResourceDaoValueSet.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFhirResourceDaoValueSet.java index 1659463242f..80aff0ed58c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFhirResourceDaoValueSet.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFhirResourceDaoValueSet.java @@ -27,10 +27,16 @@ public interface IFhirResourceDaoValueSet exten T expand(IIdType theId, String theFilter, RequestDetails theRequestDetails); + T expand(IIdType theId, String theFilter, int theOffset, int theCount, RequestDetails theRequestDetails); + T expand(T theSource, String theFilter); + T expand(T theSource, String theFilter, int theOffset, int theCount); + T expandByIdentifier(String theUri, String theFilter); + T expandByIdentifier(String theUri, String theFilter, int theOffset, int theCount); + void purgeCaches(); ValidateCodeResult validateCode(IPrimitiveType theValueSetIdentifier, IIdType theId, IPrimitiveType theCode, IPrimitiveType theSystem, IPrimitiveType theDisplay, CD theCoding, CC theCodeableConcept, RequestDetails theRequestDetails); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java index 282b1ab5a19..2b7ca96137c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java @@ -75,6 +75,12 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3 return expand(source, theFilter); } + @Override + public ValueSet expand(IIdType theId, String theFilter, int theOffset, int theCount, RequestDetails theRequestDetails) { + ValueSet source = read(theId, theRequestDetails); + return expand(source, theFilter, theOffset, theCount); + } + private ValueSet doExpand(ValueSet theSource) { validateIncludes("include", theSource.getCompose().getInclude()); @@ -105,7 +111,38 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3 ValueSet retVal = outcome.getValueset(); retVal.setStatus(PublicationStatus.ACTIVE); return retVal; + } + private ValueSet doExpand(ValueSet theSource, int theOffset, int theCount) { + + validateIncludes("include", theSource.getCompose().getInclude()); + validateIncludes("exclude", theSource.getCompose().getExclude()); + + /* + * If all of the code systems are supported by the HAPI FHIR terminology service, let's + * use that as it's more efficient. + */ + + boolean allSystemsAreSuppportedByTerminologyService = true; + for (ConceptSetComponent next : theSource.getCompose().getInclude()) { + if (!myTerminologySvc.supportsSystem(next.getSystem())) { + allSystemsAreSuppportedByTerminologyService = false; + } + } + for (ConceptSetComponent next : theSource.getCompose().getExclude()) { + if (!myTerminologySvc.supportsSystem(next.getSystem())) { + allSystemsAreSuppportedByTerminologyService = false; + } + } + if (allSystemsAreSuppportedByTerminologyService) { + return (ValueSet) myTerminologySvc.expandValueSet(theSource, theOffset, theCount); + } + + HapiWorkerContext workerContext = new HapiWorkerContext(getContext(), myValidationSupport); + ValueSetExpansionOutcome outcome = workerContext.expand(theSource, null); + ValueSet retVal = outcome.getValueset(); + retVal.setStatus(PublicationStatus.ACTIVE); + return retVal; } private void validateIncludes(String name, List listToValidate) { @@ -148,7 +185,28 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3 // } // // return expand(defaultValueSet, theFilter); + } + @Override + public ValueSet expandByIdentifier(String theUri, String theFilter, int theOffset, int theCount) { + if (isBlank(theUri)) { + throw new InvalidRequestException("URI must not be blank or missing"); + } + + ValueSet source = new ValueSet(); + + source.getCompose().addInclude().addValueSet(theUri); + + if (isNotBlank(theFilter)) { + ConceptSetComponent include = source.getCompose().addInclude(); + ConceptSetFilterComponent filter = include.addFilter(); + filter.setProperty("display"); + filter.setOp(FilterOperator.EQUAL); + filter.setValue(theFilter); + } + + ValueSet retVal = doExpand(source, theOffset, theCount); + return retVal; } @Override @@ -179,7 +237,30 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3 } return retVal; + } + @Override + public ValueSet expand(ValueSet source, String theFilter, int theOffset, int theCount) { + ValueSet toExpand = new ValueSet(); + + for (ConceptSetComponent next : source.getCompose().getInclude()) { + toExpand.getCompose().addInclude(next); + addFilterIfPresent(theFilter, next); + } + + if (toExpand.getCompose().isEmpty()) { + throw new InvalidRequestException("ValueSet does not have any compose.include or compose.import values, can not expand"); + } + + toExpand.getCompose().getExclude().addAll(source.getCompose().getExclude()); + + ValueSet retVal = doExpand(toExpand, theOffset, theCount); + + if (isNotBlank(theFilter)) { + applyFilter(retVal.getExpansion().getTotalElement(), retVal.getExpansion().getContains(), theFilter); + } + + return retVal; } private void applyFilter(IntegerType theTotalElement, List theContains, String theFilter) { @@ -246,9 +327,8 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3 } if (vs != null) { - ValueSet expansion = doExpand(vs); + ValueSet expansion = doExpand(vs); // FIXME: DM 2019-08-17 - Need to account for concepts in terminology tables. List contains = expansion.getExpansion().getContains(); - ValidateCodeResult result = validateCodeIsInContains(contains, toStringOrNull(theSystem), toStringOrNull(theCode), theCoding, theCodeableConcept); if (result != null) { if (theDisplay != null && isNotBlank(theDisplay.getValue()) && isNotBlank(result.getDisplay())) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java index 0663d8cd3f7..4deb2c12a19 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java @@ -70,6 +70,12 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4 imple return expand(source, theFilter); } + @Override + public ValueSet expand(IIdType theId, String theFilter, int theOffset, int theCount, RequestDetails theRequestDetails) { + ValueSet source = read(theId, theRequestDetails); + return expand(source, theFilter, theOffset, theCount); + } + private ValueSet doExpand(ValueSet theSource) { /* @@ -109,6 +115,32 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4 imple // return retVal; } + private ValueSet doExpand(ValueSet theSource, int theOffset, int theCount) { + boolean allSystemsAreSuppportedByTerminologyService = true; + for (ConceptSetComponent next : theSource.getCompose().getInclude()) { + if (!isBlank(next.getSystem()) && !myTerminologySvc.supportsSystem(next.getSystem())) { + allSystemsAreSuppportedByTerminologyService = false; + } + } + for (ConceptSetComponent next : theSource.getCompose().getExclude()) { + if (!isBlank(next.getSystem()) && !myTerminologySvc.supportsSystem(next.getSystem())) { + allSystemsAreSuppportedByTerminologyService = false; + } + } + if (allSystemsAreSuppportedByTerminologyService) { + return myTerminologySvc.expandValueSet(theSource, theOffset, theCount); + } + + HapiWorkerContext workerContext = new HapiWorkerContext(getContext(), myValidationSupport); + + ValueSetExpansionOutcome outcome = workerContext.expand(theSource, null); + + ValueSet retVal = outcome.getValueset(); + retVal.setStatus(PublicationStatus.ACTIVE); + + return retVal; + } + private void validateIncludes(String name, List listToValidate) { for (ConceptSetComponent nextExclude : listToValidate) { if (isBlank(nextExclude.getSystem()) && !ElementUtil.isEmpty(nextExclude.getConcept(), nextExclude.getFilter())) { @@ -149,7 +181,28 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4 imple // } // // return expand(defaultValueSet, theFilter); + } + @Override + public ValueSet expandByIdentifier(String theUri, String theFilter, int theOffset, int theCount) { + if (isBlank(theUri)) { + throw new InvalidRequestException("URI must not be blank or missing"); + } + + ValueSet source = new ValueSet(); + + source.getCompose().addInclude().addValueSet(theUri); + + if (isNotBlank(theFilter)) { + ConceptSetComponent include = source.getCompose().addInclude(); + ConceptSetFilterComponent filter = include.addFilter(); + filter.setProperty("display"); + filter.setOp(FilterOperator.EQUAL); + filter.setValue(theFilter); + } + + ValueSet retVal = doExpand(source, theOffset, theCount); + return retVal; } @Override @@ -180,7 +233,30 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4 imple } return retVal; + } + @Override + public ValueSet expand(ValueSet source, String theFilter, int theOffset, int theCount) { + ValueSet toExpand = new ValueSet(); + + for (ConceptSetComponent next : source.getCompose().getInclude()) { + toExpand.getCompose().addInclude(next); + addFilterIfPresent(theFilter, next); + } + + if (toExpand.getCompose().isEmpty()) { + throw new InvalidRequestException("ValueSet does not have any compose.include or compose.import values, can not expand"); + } + + toExpand.getCompose().getExclude().addAll(source.getCompose().getExclude()); + + ValueSet retVal = doExpand(toExpand, theOffset, theCount); + + if (isNotBlank(theFilter)) { + applyFilter(retVal.getExpansion().getTotalElement(), retVal.getExpansion().getContains(), theFilter); + } + + return retVal; } private void applyFilter(IntegerType theTotalElement, List theContains, String theFilter) { @@ -247,7 +323,7 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4 imple } if (vs != null) { - ValueSet expansion = doExpand(vs); + ValueSet expansion = doExpand(vs); // FIXME: DM 2019-08-17 - Need to account for concepts in terminology tables. List contains = expansion.getExpansion().getContains(); ValidateCodeResult result = validateCodeIsInContains(contains, toStringOrNull(theSystem), toStringOrNull(theCode), theCoding, theCodeableConcept); if (result != null) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java index 12be4a64a15..b7dd6796be1 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java @@ -70,6 +70,12 @@ public class FhirResourceDaoValueSetR5 extends FhirResourceDaoR5 imple return expand(source, theFilter); } + @Override + public ValueSet expand(IIdType theId, String theFilter, int theOffset, int theCount, RequestDetails theRequestDetails) { + ValueSet source = read(theId, theRequestDetails); + return expand(source, theFilter, theOffset, theCount); + } + private ValueSet doExpand(ValueSet theSource) { /* @@ -109,6 +115,38 @@ public class FhirResourceDaoValueSetR5 extends FhirResourceDaoR5 imple // return retVal; } + private ValueSet doExpand(ValueSet theSource, int theOffset, int theCount) { + + /* + * If all of the code systems are supported by the HAPI FHIR terminology service, let's + * use that as it's more efficient. + */ + + boolean allSystemsAreSuppportedByTerminologyService = true; + for (ConceptSetComponent next : theSource.getCompose().getInclude()) { + if (!isBlank(next.getSystem()) && !myTerminologySvc.supportsSystem(next.getSystem())) { + allSystemsAreSuppportedByTerminologyService = false; + } + } + for (ConceptSetComponent next : theSource.getCompose().getExclude()) { + if (!isBlank(next.getSystem()) && !myTerminologySvc.supportsSystem(next.getSystem())) { + allSystemsAreSuppportedByTerminologyService = false; + } + } + if (allSystemsAreSuppportedByTerminologyService) { + return (ValueSet) myTerminologySvc.expandValueSet(theSource, theOffset, theCount); + } + + HapiWorkerContext workerContext = new HapiWorkerContext(getContext(), myValidationSupport); + + ValueSetExpansionOutcome outcome = workerContext.expand(theSource, null); + + ValueSet retVal = outcome.getValueset(); + retVal.setStatus(PublicationStatus.ACTIVE); + + return retVal; + } + private void validateIncludes(String name, List listToValidate) { for (ConceptSetComponent nextExclude : listToValidate) { if (isBlank(nextExclude.getSystem()) && !ElementUtil.isEmpty(nextExclude.getConcept(), nextExclude.getFilter())) { @@ -149,7 +187,28 @@ public class FhirResourceDaoValueSetR5 extends FhirResourceDaoR5 imple // } // // return expand(defaultValueSet, theFilter); + } + @Override + public ValueSet expandByIdentifier(String theUri, String theFilter, int theOffset, int theCount) { + if (isBlank(theUri)) { + throw new InvalidRequestException("URI must not be blank or missing"); + } + + ValueSet source = new ValueSet(); + + source.getCompose().addInclude().addValueSet(theUri); + + if (isNotBlank(theFilter)) { + ConceptSetComponent include = source.getCompose().addInclude(); + ConceptSetFilterComponent filter = include.addFilter(); + filter.setProperty("display"); + filter.setOp(FilterOperator.EQUAL); + filter.setValue(theFilter); + } + + ValueSet retVal = doExpand(source, theOffset, theCount); + return retVal; } @Override @@ -180,7 +239,30 @@ public class FhirResourceDaoValueSetR5 extends FhirResourceDaoR5 imple } return retVal; + } + @Override + public ValueSet expand(ValueSet source, String theFilter, int theOffset, int theCount) { + ValueSet toExpand = new ValueSet(); + + for (ConceptSetComponent next : source.getCompose().getInclude()) { + toExpand.getCompose().addInclude(next); + addFilterIfPresent(theFilter, next); + } + + if (toExpand.getCompose().isEmpty()) { + throw new InvalidRequestException("ValueSet does not have any compose.include or compose.import values, can not expand"); + } + + toExpand.getCompose().getExclude().addAll(source.getCompose().getExclude()); + + ValueSet retVal = doExpand(toExpand, theOffset, theCount); + + if (isNotBlank(theFilter)) { + applyFilter(retVal.getExpansion().getTotalElement(), retVal.getExpansion().getContains(), theFilter); + } + + return retVal; } private void applyFilter(IntegerType theTotalElement, List theContains, String theFilter) { @@ -247,7 +329,7 @@ public class FhirResourceDaoValueSetR5 extends FhirResourceDaoR5 imple } if (vs != null) { - ValueSet expansion = doExpand(vs); + ValueSet expansion = doExpand(vs); // FIXME: DM 2019-08-17 - Need to account for concepts in terminology tables. List contains = expansion.getExpansion().getContains(); ValidateCodeResult result = validateCodeIsInContains(contains, toStringOrNull(theSystem), toStringOrNull(theCode), theCoding, theCodeableConcept); if (result != null) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaProvider.java index e0ce7b36245..464adce1f57 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaProvider.java @@ -1,9 +1,10 @@ package ca.uhn.fhir.jpa.provider; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.jpa.dao.DaoConfig; +import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.util.ExpungeOptions; import ca.uhn.fhir.jpa.util.ExpungeOutcome; -import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.rest.param.DateRangeParam; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; @@ -12,6 +13,7 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.hl7.fhir.r4.model.IntegerType; import org.hl7.fhir.r4.model.Parameters; import org.jboss.logging.MDC; +import org.springframework.beans.factory.annotation.Autowired; import javax.servlet.http.HttpServletRequest; import java.util.Date; @@ -42,6 +44,10 @@ import java.util.TreeSet; public class BaseJpaProvider { public static final String REMOTE_ADDR = "req.remoteAddr"; public static final String REMOTE_UA = "req.userAgent"; + + @Autowired + protected DaoConfig myDaoConfig; + private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseJpaProvider.class); private FhirContext myContext; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/BaseJpaResourceProviderValueSetDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/BaseJpaResourceProviderValueSetDstu3.java index e55de8b4a51..884941f053c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/BaseJpaResourceProviderValueSetDstu3.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/BaseJpaResourceProviderValueSetDstu3.java @@ -35,6 +35,7 @@ import javax.servlet.http.HttpServletRequest; import static org.apache.commons.lang3.StringUtils.isNotBlank; public class BaseJpaResourceProviderValueSetDstu3 extends JpaResourceProviderDstu3 { + private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseJpaResourceProviderValueSetDstu3.class); @Operation(name = JpaConstants.OPERATION_EXPAND, idempotent = true) public ValueSet expand( @@ -46,6 +47,8 @@ public class BaseJpaResourceProviderValueSetDstu3 extends JpaResourceProviderDst @OperationParam(name = "url", min = 0, max = 1) UriType theUrl, @OperationParam(name = "identifier", min = 0, max = 1) UriType theIdentifier, @OperationParam(name = "filter", min = 0, max = 1) StringType theFilter, + @OperationParam(name = "offset", min = 0, max = 1) IntegerType theOffset, + @OperationParam(name = "count", min = 0, max = 1) IntegerType theCount, RequestDetails theRequestDetails) { boolean haveId = theId != null && theId.hasIdPart(); @@ -55,27 +58,59 @@ public class BaseJpaResourceProviderValueSetDstu3 extends JpaResourceProviderDst } boolean haveIdentifier = url != null && isNotBlank(url.getValue()); - boolean haveValueSet = theValueSet != null && theValueSet.isEmpty() == false; + boolean haveValueSet = theValueSet != null && !theValueSet.isEmpty(); if (!haveId && !haveIdentifier && !haveValueSet) { - throw new InvalidRequestException("$expand operation at the type level (no ID specified) requires an identifier or a valueSet as a part of the request"); + throw new InvalidRequestException("$expand operation at the type level (no ID specified) requires an identifier or a valueSet as a part of the request."); } if (moreThanOneTrue(haveId, haveIdentifier, haveValueSet)) { throw new InvalidRequestException("$expand must EITHER be invoked at the instance level, or have an identifier specified, or have a ValueSet specified. Can not combine these options."); } + int offset = myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental(); + if (theOffset != null && theOffset.hasValue()) { + if (theOffset.getValue() >= 0) { + offset = theOffset.getValue(); + } else { + throw new InvalidRequestException("offset parameter for $expand operation must be >= 0 when specified. offset: " + theOffset.getValue()); + } + } + + int count = myDaoConfig.getPreExpandValueSetsDefaultCountExperimental(); + if (theCount != null && theCount.hasValue()) { + if (theCount.getValue() >= 0) { + count = theCount.getValue(); + } else { + throw new InvalidRequestException("count parameter for $expand operation must be >= 0 when specified. count: " + theCount.getValue()); + } + } + int countMax = myDaoConfig.getPreExpandValueSetsMaxCountExperimental(); + if (count > countMax) { + ourLog.warn("count parameter for $expand operation of {} exceeds maximum value of {}; using maximum value.", count, countMax); + count = countMax; + } + startRequest(theServletRequest); try { IFhirResourceDaoValueSet dao = (IFhirResourceDaoValueSet) getDao(); - if (haveId) { - return dao.expand(theId, toFilterString(theFilter), theRequestDetails); - } else if (haveIdentifier) { - return dao.expandByIdentifier(url.getValue(), toFilterString(theFilter)); + if (myDaoConfig.isPreExpandValueSetsExperimental()) { + if (haveId) { + return dao.expand(theId, toFilterString(theFilter), offset, count, theRequestDetails); + } else if (haveIdentifier) { + return dao.expandByIdentifier(url.getValue(), toFilterString(theFilter), offset, count); + } else { + return dao.expand(theValueSet, toFilterString(theFilter), offset, count); + } } else { - return dao.expand(theValueSet, toFilterString(theFilter)); + if (haveId) { + return dao.expand(theId, toFilterString(theFilter), theRequestDetails); + } else if (haveIdentifier) { + return dao.expandByIdentifier(url.getValue(), toFilterString(theFilter)); + } else { + return dao.expand(theValueSet, toFilterString(theFilter)); + } } - } finally { endRequest(theServletRequest); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/BaseJpaResourceProviderConceptMapR4.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/BaseJpaResourceProviderConceptMapR4.java index e13582a995e..544670c2a5b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/BaseJpaResourceProviderConceptMapR4.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/BaseJpaResourceProviderConceptMapR4.java @@ -62,7 +62,7 @@ public class BaseJpaResourceProviderConceptMapR4 extends JpaResourceProviderR4 { + private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseJpaResourceProviderValueSetR4.class); @Operation(name = JpaConstants.OPERATION_EXPAND, idempotent = true) public ValueSet expand( @@ -43,31 +44,65 @@ public class BaseJpaResourceProviderValueSetR4 extends JpaResourceProviderR4= 0) { + offset = theOffset.getValue(); + } else { + throw new InvalidRequestException("offset parameter for $expand operation must be >= 0 when specified. offset: " + theOffset.getValue()); + } + } + + int count = myDaoConfig.getPreExpandValueSetsDefaultCountExperimental(); + if (theCount != null && theCount.hasValue()) { + if (theCount.getValue() >= 0) { + count = theCount.getValue(); + } else { + throw new InvalidRequestException("count parameter for $expand operation must be >= 0 when specified. count: " + theCount.getValue()); + } + } + int countMax = myDaoConfig.getPreExpandValueSetsMaxCountExperimental(); + if (count > countMax) { + ourLog.warn("count parameter for $expand operation of {} exceeds maximum value of {}; using maximum value.", count, countMax); + count = countMax; + } + startRequest(theServletRequest); try { IFhirResourceDaoValueSet dao = (IFhirResourceDaoValueSet) getDao(); - if (haveId) { - return dao.expand(theId, toFilterString(theFilter), theRequestDetails); - } else if (haveIdentifier) { - return dao.expandByIdentifier(theUrl.getValue(), toFilterString(theFilter)); + if (myDaoConfig.isPreExpandValueSetsExperimental()) { + if (haveId) { + return dao.expand(theId, toFilterString(theFilter), offset, count, theRequestDetails); + } else if (haveIdentifier) { + return dao.expandByIdentifier(theUrl.getValue(), toFilterString(theFilter), offset, count); + } else { + return dao.expand(theValueSet, toFilterString(theFilter), offset, count); + } } else { - return dao.expand(theValueSet, toFilterString(theFilter)); + if (haveId) { + return dao.expand(theId, toFilterString(theFilter), theRequestDetails); + } else if (haveIdentifier) { + return dao.expandByIdentifier(theUrl.getValue(), toFilterString(theFilter)); + } else { + return dao.expand(theValueSet, toFilterString(theFilter)); + } } - } finally { endRequest(theServletRequest); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r5/BaseJpaResourceProviderValueSetR5.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r5/BaseJpaResourceProviderValueSetR5.java index 65522e01231..f05a8f4cdaa 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r5/BaseJpaResourceProviderValueSetR5.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r5/BaseJpaResourceProviderValueSetR5.java @@ -35,6 +35,7 @@ import javax.servlet.http.HttpServletRequest; import static org.apache.commons.lang3.StringUtils.isNotBlank; public class BaseJpaResourceProviderValueSetR5 extends JpaResourceProviderR5 { + private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseJpaResourceProviderValueSetR5.class); @Operation(name = JpaConstants.OPERATION_EXPAND, idempotent = true) public ValueSet expand( @@ -43,31 +44,65 @@ public class BaseJpaResourceProviderValueSetR5 extends JpaResourceProviderR5= 0) { + offset = theOffset.getValue(); + } else { + throw new InvalidRequestException("offset parameter for $expand operation must be >= 0 when specified. offset: " + theOffset.getValue()); + } + } + + int count = myDaoConfig.getPreExpandValueSetsDefaultCountExperimental(); + if (theCount != null && theCount.hasValue()) { + if (theCount.getValue() >= 0) { + count = theCount.getValue(); + } else { + throw new InvalidRequestException("count parameter for $expand operation must be >= 0 when specified. count: " + theCount.getValue()); + } + } + int countMax = myDaoConfig.getPreExpandValueSetsMaxCountExperimental(); + if (count > countMax) { + ourLog.warn("count parameter for $expand operation of {} exceeds maximum value of {}; using maximum value.", count, countMax); + count = countMax; + } + startRequest(theServletRequest); try { IFhirResourceDaoValueSet dao = (IFhirResourceDaoValueSet) getDao(); - if (haveId) { - return dao.expand(theId, toFilterString(theFilter), theRequestDetails); - } else if (haveIdentifier) { - return dao.expandByIdentifier(theUrl.getValue(), toFilterString(theFilter)); + if (myDaoConfig.isPreExpandValueSetsExperimental()) { + if (haveId) { + return dao.expand(theId, toFilterString(theFilter), offset, count, theRequestDetails); + } else if (haveIdentifier) { + return dao.expandByIdentifier(theUrl.getValue(), toFilterString(theFilter), offset, count); + } else { + return dao.expand(theValueSet, toFilterString(theFilter), offset, count); + } } else { - return dao.expand(theValueSet, toFilterString(theFilter)); + if (haveId) { + return dao.expand(theId, toFilterString(theFilter), theRequestDetails); + } else if (haveIdentifier) { + return dao.expandByIdentifier(theUrl.getValue(), toFilterString(theFilter)); + } else { + return dao.expand(theValueSet, toFilterString(theFilter)); + } } - } finally { endRequest(theServletRequest); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java index c574f84a122..67f16c5cba7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java @@ -478,6 +478,65 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, return valueSet; } + @Override + @Transactional(propagation = Propagation.REQUIRED) + public ValueSet expandValueSet(ValueSet theValueSetToExpand, int theOffset, int theCount) { + ValidateUtil.isNotNullOrThrowUnprocessableEntity(theValueSetToExpand, "ValueSet to expand can not be null"); + ValidateUtil.isTrueOrThrowInvalidRequest(theValueSetToExpand.hasUrl(), "ValueSet to be expanded must provide ValueSet.url", theValueSetToExpand); + ValidateUtil.isNotBlankOrThrowUnprocessableEntity(theValueSetToExpand.getUrl(), theValueSetToExpand.getIdElement().toUnqualifiedVersionless().getValue() + " to be expanded must provide ValueSet.url"); + + Optional optionalTermValueSet = myValueSetDao.findByUrl(theValueSetToExpand.getUrl()); + if (!optionalTermValueSet.isPresent()) { + throw new InvalidRequestException("ValueSet is not present in terminology tables: " + theValueSetToExpand.getUrl()); + } + + TermValueSet termValueSet = optionalTermValueSet.get(); + + if (TermValueSetExpansionStatusEnum.EXPANDED != termValueSet.getExpansionStatus()) { + throw new UnprocessableEntityException("ValueSet is not ready for expansion; current status: " + termValueSet.getExpansionStatus()); + } + + ValueSet.ValueSetExpansionComponent expansionComponent = new ValueSet.ValueSetExpansionComponent(); + expansionComponent.setIdentifier(UUID.randomUUID().toString()); + expansionComponent.setTimestamp(new Date()); + + int numberOfConcepts = termValueSet.getConcepts().size(); + expansionComponent.setTotal(numberOfConcepts); + expansionComponent.setOffset(theOffset); + + List subListOfConcepts = new ArrayList<>(); + if (theCount != 0 && numberOfConcepts != 0) { + int toIndex = Math.min(theOffset + theCount, numberOfConcepts); + subListOfConcepts = termValueSet.getConcepts().subList(theOffset, toIndex); + } + + for (TermValueSetConcept concept : subListOfConcepts) { + ValueSet.ValueSetExpansionContainsComponent containsComponent = expansionComponent.addContains(); + containsComponent.setSystem(concept.getSystem()); + containsComponent.setCode(concept.getCode()); + containsComponent.setDisplay(concept.getDisplay()); + + // TODO: DM 2019-08-17 - Implement includeDesignations parameter for $expand operation. + for (TermValueSetConceptDesignation designation : concept.getDesignations()) { + ValueSet.ConceptReferenceDesignationComponent designationComponent = containsComponent.addDesignation(); + designationComponent.setLanguage(designation.getLanguage()); + if (isNoneBlank(designation.getUseSystem(), designation.getUseCode())) { + designationComponent.setUse(new Coding( + designation.getUseSystem(), + designation.getUseCode(), + designation.getUseDisplay())); + } + designationComponent.setValue(designation.getValue()); + } + } + + ValueSet valueSet = new ValueSet(); + valueSet.setStatus(Enumerations.PublicationStatus.ACTIVE); + valueSet.setCompose(theValueSetToExpand.getCompose()); + valueSet.setExpansion(expansionComponent); + return valueSet; + } + @Override @Transactional(propagation = Propagation.REQUIRED) public void expandValueSet(ValueSet theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator) { @@ -496,9 +555,9 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, // Handle excludes ourLog.debug("Handling excludes"); - for (ValueSet.ConceptSetComponent include : theValueSetToExpand.getCompose().getExclude()) { + for (ValueSet.ConceptSetComponent exclude : theValueSetToExpand.getCompose().getExclude()) { boolean add = false; - expandValueSetHandleIncludeOrExclude(theValueSetCodeAccumulator, addedCodes, include, add, theCodeCounter); + expandValueSetHandleIncludeOrExclude(theValueSetCodeAccumulator, addedCodes, exclude, add, theCodeCounter); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcDstu2.java index 498f25dfe94..a64bb9baccf 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcDstu2.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcDstu2.java @@ -92,6 +92,11 @@ public class HapiTerminologySvcDstu2 extends BaseHapiTerminologySvcImpl { throw new UnsupportedOperationException(); } + @Override + public IBaseResource expandValueSet(IBaseResource theValueSetToExpand, int theOffset, int theCount) { + throw new UnsupportedOperationException(); + } + @Override public void expandValueSet(IBaseResource theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator) { throw new UnsupportedOperationException(); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcDstu3.java index da83782a810..4866ddd45e0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcDstu3.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcDstu3.java @@ -177,6 +177,20 @@ public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvcImpl implemen } } + @Override + public IBaseResource expandValueSet(IBaseResource theInput, int theOffset, int theCount) { + ValueSet valueSetToExpand = (ValueSet) theInput; + + try { + org.hl7.fhir.r4.model.ValueSet valueSetToExpandR4; + valueSetToExpandR4 = VersionConvertor_30_40.convertValueSet(valueSetToExpand); + org.hl7.fhir.r4.model.ValueSet expandedR4 = super.expandValueSet(valueSetToExpandR4, theOffset, theCount); + return VersionConvertor_30_40.convertValueSet(expandedR4); + } catch (FHIRException e) { + throw new InternalErrorException(e); + } + } + @Override public void expandValueSet(IBaseResource theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator) { ValueSet valueSetToExpand = (ValueSet) theValueSetToExpand; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcR4.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcR4.java index b764fcf5ee8..6ded7d0a469 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcR4.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcR4.java @@ -137,6 +137,12 @@ public class HapiTerminologySvcR4 extends BaseHapiTerminologySvcImpl implements return super.expandValueSet(valueSetToExpand); } + @Override + public IBaseResource expandValueSet(IBaseResource theInput, int theOffset, int theCount) { + ValueSet valueSetToExpand = (ValueSet) theInput; + return super.expandValueSet(valueSetToExpand, theOffset, theCount); + } + @Override public void expandValueSet(IBaseResource theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator) { ValueSet valueSetToExpand = (ValueSet) theValueSetToExpand; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcR5.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcR5.java index 2f27d7b1761..c90099016f0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcR5.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcR5.java @@ -143,6 +143,13 @@ public class HapiTerminologySvcR5 extends BaseHapiTerminologySvcImpl implements return org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet(valueSetR4); } + @Override + public IBaseResource expandValueSet(IBaseResource theInput, int theOffset, int theCount) { + org.hl7.fhir.r4.model.ValueSet valueSetToExpand = org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet((ValueSet) theInput); + org.hl7.fhir.r4.model.ValueSet valueSetR4 = super.expandValueSet(valueSetToExpand, theOffset, theCount); + return org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet(valueSetR4); + } + @Override public void expandValueSet(IBaseResource theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator) { org.hl7.fhir.r4.model.ValueSet valueSetToExpand = org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet((ValueSet) theValueSetToExpand); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IHapiTerminologySvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IHapiTerminologySvc.java index 5faf0f72320..195a4192c3a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IHapiTerminologySvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IHapiTerminologySvc.java @@ -44,6 +44,8 @@ public interface IHapiTerminologySvc { ValueSet expandValueSet(ValueSet theValueSetToExpand); + ValueSet expandValueSet(ValueSet theValueSetToExpand, int theOffset, int theCount); + void expandValueSet(ValueSet theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator); /** @@ -51,6 +53,11 @@ public interface IHapiTerminologySvc { */ IBaseResource expandValueSet(IBaseResource theValueSetToExpand); + /** + * Version independent + */ + IBaseResource expandValueSet(IBaseResource theValueSetToExpand, int theOffset, int theCount); + void expandValueSet(IBaseResource theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator); List expandValueSet(String theValueSet); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetTest.java index af7fcd14fac..c789f0a82b9 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetTest.java @@ -397,7 +397,6 @@ public class ResourceProviderDstu3ValueSetTest extends BaseResourceProviderDstu3 @Test public void testExpandInvalidParams() throws IOException { - //@formatter:off try { ourClient .operation() @@ -407,11 +406,9 @@ public class ResourceProviderDstu3ValueSetTest extends BaseResourceProviderDstu3 .execute(); fail(); } catch (InvalidRequestException e) { - assertEquals("HTTP 400 Bad Request: $expand operation at the type level (no ID specified) requires an identifier or a valueSet as a part of the request", e.getMessage()); + assertEquals("HTTP 400 Bad Request: $expand operation at the type level (no ID specified) requires an identifier or a valueSet as a part of the request.", e.getMessage()); } - //@formatter:on - //@formatter:off try { ValueSet toExpand = loadResourceFromClasspath(ValueSet.class, "/extensional-case-dstu3.xml"); ourClient @@ -425,9 +422,7 @@ public class ResourceProviderDstu3ValueSetTest extends BaseResourceProviderDstu3 } catch (InvalidRequestException e) { assertEquals("HTTP 400 Bad Request: $expand must EITHER be invoked at the instance level, or have an identifier specified, or have a ValueSet specified. Can not combine these options.", e.getMessage()); } - //@formatter:on - //@formatter:off try { ValueSet toExpand = loadResourceFromClasspath(ValueSet.class, "/extensional-case-dstu3.xml"); ourClient @@ -441,8 +436,30 @@ public class ResourceProviderDstu3ValueSetTest extends BaseResourceProviderDstu3 } catch (InvalidRequestException e) { assertEquals("HTTP 400 Bad Request: $expand must EITHER be invoked at the instance level, or have an identifier specified, or have a ValueSet specified. Can not combine these options.", e.getMessage()); } - //@formatter:on + try { + ourClient + .operation() + .onInstance(myExtensionalVsId) + .named("expand") + .withParameter(Parameters.class, "offset", new IntegerType(-1)) + .execute(); + fail(); + } catch (InvalidRequestException e) { + assertEquals("HTTP 400 Bad Request: offset parameter for $expand operation must be >= 0 when specified. offset: -1", e.getMessage()); + } + + try { + ourClient + .operation() + .onInstance(myExtensionalVsId) + .named("expand") + .withParameter(Parameters.class, "count", new IntegerType(-1)) + .execute(); + fail(); + } catch (InvalidRequestException e) { + assertEquals("HTTP 400 Bad Request: count parameter for $expand operation must be >= 0 when specified. count: -1", e.getMessage()); + } } @Test diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetTest.java index a6e1011f511..7a69fd83b55 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetTest.java @@ -264,7 +264,6 @@ public class ResourceProviderR4ValueSetTest extends BaseResourceProviderR4Test { @Test public void testExpandInvalidParams() throws IOException { - //@formatter:off try { ourClient .operation() @@ -274,11 +273,9 @@ public class ResourceProviderR4ValueSetTest extends BaseResourceProviderR4Test { .execute(); fail(); } catch (InvalidRequestException e) { - assertEquals("HTTP 400 Bad Request: $expand operation at the type level (no ID specified) requires a url or a valueSet as a part of the request", e.getMessage()); + assertEquals("HTTP 400 Bad Request: $expand operation at the type level (no ID specified) requires a url or a valueSet as a part of the request.", e.getMessage()); } - //@formatter:on - //@formatter:off try { ValueSet toExpand = loadResourceFromClasspath(ValueSet.class, "/r4/extensional-case-r4.xml"); ourClient @@ -292,9 +289,7 @@ public class ResourceProviderR4ValueSetTest extends BaseResourceProviderR4Test { } catch (InvalidRequestException e) { assertEquals("HTTP 400 Bad Request: $expand must EITHER be invoked at the instance level, or have a url specified, or have a ValueSet specified. Can not combine these options.", e.getMessage()); } - //@formatter:on - //@formatter:off try { ValueSet toExpand = loadResourceFromClasspath(ValueSet.class, "/r4/extensional-case.xml"); ourClient @@ -308,8 +303,30 @@ public class ResourceProviderR4ValueSetTest extends BaseResourceProviderR4Test { } catch (InvalidRequestException e) { assertEquals("HTTP 400 Bad Request: $expand must EITHER be invoked at the instance level, or have a url specified, or have a ValueSet specified. Can not combine these options.", e.getMessage()); } - //@formatter:on + try { + ourClient + .operation() + .onInstance(myExtensionalVsId) + .named("expand") + .withParameter(Parameters.class, "offset", new IntegerType(-1)) + .execute(); + fail(); + } catch (InvalidRequestException e) { + assertEquals("HTTP 400 Bad Request: offset parameter for $expand operation must be >= 0 when specified. offset: -1", e.getMessage()); + } + + try { + ourClient + .operation() + .onInstance(myExtensionalVsId) + .named("expand") + .withParameter(Parameters.class, "count", new IntegerType(-1)) + .execute(); + fail(); + } catch (InvalidRequestException e) { + assertEquals("HTTP 400 Bad Request: count parameter for $expand operation must be >= 0 when specified. count: -1", e.getMessage()); + } } @Test diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java index fef85b39cc8..e7b16145091 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java @@ -595,6 +595,264 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test { loadAndPersistValueSet(); } + @Test + public void testExpandTermValueSetAndChildren() throws Exception { + myDaoConfig.setPreExpandValueSetsExperimental(true); + + loadAndPersistCodeSystemAndValueSetWithDesignations(); + + CodeSystem codeSystem = myCodeSystemDao.read(myExtensionalCsId); + ourLog.info("CodeSystem:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(codeSystem)); + + ValueSet valueSet = myValueSetDao.read(myExtensionalVsId); + ourLog.info("ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(valueSet)); + + myTermSvc.preExpandValueSetToTerminologyTables(); + + ValueSet expandedValueSet = myTermSvc.expandValueSet(valueSet, myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental(), myDaoConfig.getPreExpandValueSetsDefaultCountExperimental()); + ourLog.info("Expanded ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expandedValueSet)); + + assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getTotal()); + assertEquals(myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental(), expandedValueSet.getExpansion().getOffset()); + assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getContains().size()); + + ValueSet.ValueSetExpansionContainsComponent containsComponent = expandedValueSet.getExpansion().getContains().get(0); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("8450-9", containsComponent.getCode()); + assertEquals("Systolic blood pressure--expiration", containsComponent.getDisplay()); + assertEquals(2, containsComponent.getDesignation().size()); + + ValueSet.ConceptReferenceDesignationComponent designationComponent = containsComponent.getDesignation().get(0); + assertEquals("nl", designationComponent.getLanguage()); + assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); + assertEquals("900000000000013009", designationComponent.getUse().getCode()); + assertEquals("Synonym", designationComponent.getUse().getDisplay()); + assertEquals("Systolische bloeddruk - expiratie", designationComponent.getValue()); + + designationComponent = containsComponent.getDesignation().get(1); + assertEquals("sv", designationComponent.getLanguage()); + assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); + assertEquals("900000000000013009", designationComponent.getUse().getCode()); + assertEquals("Synonym", designationComponent.getUse().getDisplay()); + assertEquals("Systoliskt blodtryck - utgång", designationComponent.getValue()); + + containsComponent = expandedValueSet.getExpansion().getContains().get(1); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("11378-7", containsComponent.getCode()); + assertEquals("Systolic blood pressure at First encounter", containsComponent.getDisplay()); + assertFalse(containsComponent.hasDesignation()); + + // ... + + containsComponent = expandedValueSet.getExpansion().getContains().get(22); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("8491-3", containsComponent.getCode()); + assertEquals("Systolic blood pressure 1 hour minimum", containsComponent.getDisplay()); + assertEquals(1, containsComponent.getDesignation().size()); + + designationComponent = containsComponent.getDesignation().get(0); + assertEquals("nl", designationComponent.getLanguage()); + assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); + assertEquals("900000000000013009", designationComponent.getUse().getCode()); + assertEquals("Synonym", designationComponent.getUse().getDisplay()); + assertEquals("Systolische bloeddruk minimaal 1 uur", designationComponent.getValue()); + + containsComponent = expandedValueSet.getExpansion().getContains().get(23); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("8492-1", containsComponent.getCode()); + assertEquals("Systolic blood pressure 8 hour minimum", containsComponent.getDisplay()); + assertFalse(containsComponent.hasDesignation()); + } + + @Test + public void testExpandTermValueSetAndChildrenWithCount() throws Exception { + myDaoConfig.setPreExpandValueSetsExperimental(true); + + loadAndPersistCodeSystemAndValueSetWithDesignations(); + + CodeSystem codeSystem = myCodeSystemDao.read(myExtensionalCsId); + ourLog.info("CodeSystem:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(codeSystem)); + + ValueSet valueSet = myValueSetDao.read(myExtensionalVsId); + ourLog.info("ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(valueSet)); + + myTermSvc.preExpandValueSetToTerminologyTables(); + + ValueSet expandedValueSet = myTermSvc.expandValueSet(valueSet, myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental(), 23); + ourLog.info("Expanded ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expandedValueSet)); + + assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getTotal()); + assertEquals(myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental(), expandedValueSet.getExpansion().getOffset()); + assertEquals(23, expandedValueSet.getExpansion().getContains().size()); + + ValueSet.ValueSetExpansionContainsComponent containsComponent = expandedValueSet.getExpansion().getContains().get(0); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("8450-9", containsComponent.getCode()); + assertEquals("Systolic blood pressure--expiration", containsComponent.getDisplay()); + assertEquals(2, containsComponent.getDesignation().size()); + + ValueSet.ConceptReferenceDesignationComponent designationComponent = containsComponent.getDesignation().get(0); + assertEquals("nl", designationComponent.getLanguage()); + assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); + assertEquals("900000000000013009", designationComponent.getUse().getCode()); + assertEquals("Synonym", designationComponent.getUse().getDisplay()); + assertEquals("Systolische bloeddruk - expiratie", designationComponent.getValue()); + + designationComponent = containsComponent.getDesignation().get(1); + assertEquals("sv", designationComponent.getLanguage()); + assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); + assertEquals("900000000000013009", designationComponent.getUse().getCode()); + assertEquals("Synonym", designationComponent.getUse().getDisplay()); + assertEquals("Systoliskt blodtryck - utgång", designationComponent.getValue()); + + containsComponent = expandedValueSet.getExpansion().getContains().get(1); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("11378-7", containsComponent.getCode()); + assertEquals("Systolic blood pressure at First encounter", containsComponent.getDisplay()); + assertFalse(containsComponent.hasDesignation()); + + // ... + + containsComponent = expandedValueSet.getExpansion().getContains().get(22); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("8491-3", containsComponent.getCode()); + assertEquals("Systolic blood pressure 1 hour minimum", containsComponent.getDisplay()); + assertEquals(1, containsComponent.getDesignation().size()); + + designationComponent = containsComponent.getDesignation().get(0); + assertEquals("nl", designationComponent.getLanguage()); + assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); + assertEquals("900000000000013009", designationComponent.getUse().getCode()); + assertEquals("Synonym", designationComponent.getUse().getDisplay()); + assertEquals("Systolische bloeddruk minimaal 1 uur", designationComponent.getValue()); + } + + @Test + public void testExpandTermValueSetAndChildrenWithCountOfZero() throws Exception { + myDaoConfig.setPreExpandValueSetsExperimental(true); + + loadAndPersistCodeSystemAndValueSetWithDesignations(); + + CodeSystem codeSystem = myCodeSystemDao.read(myExtensionalCsId); + ourLog.info("CodeSystem:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(codeSystem)); + + ValueSet valueSet = myValueSetDao.read(myExtensionalVsId); + ourLog.info("ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(valueSet)); + + myTermSvc.preExpandValueSetToTerminologyTables(); + + ValueSet expandedValueSet = myTermSvc.expandValueSet(valueSet, myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental(), 0); + ourLog.info("Expanded ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expandedValueSet)); + + assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getTotal()); + assertEquals(myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental(), expandedValueSet.getExpansion().getOffset()); + assertFalse(expandedValueSet.getExpansion().hasContains()); + } + + @Test + public void testExpandTermValueSetAndChildrenWithOffset() throws Exception { + myDaoConfig.setPreExpandValueSetsExperimental(true); + + loadAndPersistCodeSystemAndValueSetWithDesignations(); + + CodeSystem codeSystem = myCodeSystemDao.read(myExtensionalCsId); + ourLog.info("CodeSystem:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(codeSystem)); + + ValueSet valueSet = myValueSetDao.read(myExtensionalVsId); + ourLog.info("ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(valueSet)); + + myTermSvc.preExpandValueSetToTerminologyTables(); + + ValueSet expandedValueSet = myTermSvc.expandValueSet(valueSet, 1, myDaoConfig.getPreExpandValueSetsDefaultCountExperimental()); + ourLog.info("Expanded ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expandedValueSet)); + + assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getTotal()); + assertEquals(1, expandedValueSet.getExpansion().getOffset()); + assertEquals(codeSystem.getConcept().size() - expandedValueSet.getExpansion().getOffset(), expandedValueSet.getExpansion().getContains().size()); + + ValueSet.ValueSetExpansionContainsComponent containsComponent = expandedValueSet.getExpansion().getContains().get(0); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("11378-7", containsComponent.getCode()); + assertEquals("Systolic blood pressure at First encounter", containsComponent.getDisplay()); + assertFalse(containsComponent.hasDesignation()); + + containsComponent = expandedValueSet.getExpansion().getContains().get(1); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("8493-9", containsComponent.getCode()); + assertEquals("Systolic blood pressure 10 hour minimum", containsComponent.getDisplay()); + assertFalse(containsComponent.hasDesignation()); + + // ... + + containsComponent = expandedValueSet.getExpansion().getContains().get(21); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("8491-3", containsComponent.getCode()); + assertEquals("Systolic blood pressure 1 hour minimum", containsComponent.getDisplay()); + assertEquals(1, containsComponent.getDesignation().size()); + + ValueSet.ConceptReferenceDesignationComponent designationComponent = containsComponent.getDesignation().get(0); + assertEquals("nl", designationComponent.getLanguage()); + assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); + assertEquals("900000000000013009", designationComponent.getUse().getCode()); + assertEquals("Synonym", designationComponent.getUse().getDisplay()); + assertEquals("Systolische bloeddruk minimaal 1 uur", designationComponent.getValue()); + + containsComponent = expandedValueSet.getExpansion().getContains().get(22); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("8492-1", containsComponent.getCode()); + assertEquals("Systolic blood pressure 8 hour minimum", containsComponent.getDisplay()); + assertFalse(containsComponent.hasDesignation()); + } + + @Test + public void testExpandTermValueSetAndChildrenWithOffsetAndCount() throws Exception { + myDaoConfig.setPreExpandValueSetsExperimental(true); + + loadAndPersistCodeSystemAndValueSetWithDesignations(); + + CodeSystem codeSystem = myCodeSystemDao.read(myExtensionalCsId); + ourLog.info("CodeSystem:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(codeSystem)); + + ValueSet valueSet = myValueSetDao.read(myExtensionalVsId); + ourLog.info("ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(valueSet)); + + myTermSvc.preExpandValueSetToTerminologyTables(); + + ValueSet expandedValueSet = myTermSvc.expandValueSet(valueSet, 1, 22); + ourLog.info("Expanded ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expandedValueSet)); + + assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getTotal()); + assertEquals(1, expandedValueSet.getExpansion().getOffset()); + assertEquals(22, expandedValueSet.getExpansion().getContains().size()); + + ValueSet.ValueSetExpansionContainsComponent containsComponent = expandedValueSet.getExpansion().getContains().get(0); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("11378-7", containsComponent.getCode()); + assertEquals("Systolic blood pressure at First encounter", containsComponent.getDisplay()); + assertFalse(containsComponent.hasDesignation()); + + containsComponent = expandedValueSet.getExpansion().getContains().get(1); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("8493-9", containsComponent.getCode()); + assertEquals("Systolic blood pressure 10 hour minimum", containsComponent.getDisplay()); + assertFalse(containsComponent.hasDesignation()); + + // ... + + containsComponent = expandedValueSet.getExpansion().getContains().get(21); + assertEquals("http://acme.org", containsComponent.getSystem()); + assertEquals("8491-3", containsComponent.getCode()); + assertEquals("Systolic blood pressure 1 hour minimum", containsComponent.getDisplay()); + assertEquals(1, containsComponent.getDesignation().size()); + + ValueSet.ConceptReferenceDesignationComponent designationComponent = containsComponent.getDesignation().get(0); + assertEquals("nl", designationComponent.getLanguage()); + assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); + assertEquals("900000000000013009", designationComponent.getUse().getCode()); + assertEquals("Synonym", designationComponent.getUse().getDisplay()); + assertEquals("Systolische bloeddruk minimaal 1 uur", designationComponent.getValue()); + } + @Test public void testExpandValueSetWithValueSetCodeAccumulator() { createCodeSystem(); From 53364a3af111aa5787f48ed77b2f267d6a4cc895 Mon Sep 17 00:00:00 2001 From: Diederik Muylwyk Date: Mon, 19 Aug 2019 02:50:01 -0400 Subject: [PATCH 02/23] Downgraded FIXMEs to TODOs regarding -code for large ValueSets. --- .../uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java | 4 ++-- .../ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java | 2 +- .../ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java index 2b7ca96137c..ad30a841f1e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java @@ -327,7 +327,7 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3 } if (vs != null) { - ValueSet expansion = doExpand(vs); // FIXME: DM 2019-08-17 - Need to account for concepts in terminology tables. + ValueSet expansion = doExpand(vs); // TODO: DM 2019-08-17 - Need to account for concepts in terminology tables. See #1431 List contains = expansion.getExpansion().getContains(); ValidateCodeResult result = validateCodeIsInContains(contains, toStringOrNull(theSystem), toStringOrNull(theCode), theCoding, theCodeableConcept); if (result != null) { @@ -349,7 +349,7 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3 } private ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult validateCodeIsInContains(List contains, String theSystem, String theCode, - Coding theCoding, CodeableConcept theCodeableConcept) { + Coding theCoding, CodeableConcept theCodeableConcept) { for (ValueSetExpansionContainsComponent nextCode : contains) { ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult result = validateCodeIsInContains(nextCode.getContains(), theSystem, theCode, theCoding, theCodeableConcept); if (result != null) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java index 4deb2c12a19..c0b10552dfc 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java @@ -323,7 +323,7 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4 imple } if (vs != null) { - ValueSet expansion = doExpand(vs); // FIXME: DM 2019-08-17 - Need to account for concepts in terminology tables. + ValueSet expansion = doExpand(vs); // TODO: DM 2019-08-17 - Need to account for concepts in terminology tables. See #1431 List contains = expansion.getExpansion().getContains(); ValidateCodeResult result = validateCodeIsInContains(contains, toStringOrNull(theSystem), toStringOrNull(theCode), theCoding, theCodeableConcept); if (result != null) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java index b7dd6796be1..b1f32510b1e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java @@ -329,7 +329,7 @@ public class FhirResourceDaoValueSetR5 extends FhirResourceDaoR5 imple } if (vs != null) { - ValueSet expansion = doExpand(vs); // FIXME: DM 2019-08-17 - Need to account for concepts in terminology tables. + ValueSet expansion = doExpand(vs); // TODO: DM 2019-08-17 - Need to account for concepts in terminology tables. See #1431 List contains = expansion.getExpansion().getContains(); ValidateCodeResult result = validateCodeIsInContains(contains, toStringOrNull(theSystem), toStringOrNull(theCode), theCoding, theCodeableConcept); if (result != null) { From 630bb916b452025c31976ff6ca8852d53b8e1e7b Mon Sep 17 00:00:00 2001 From: Ken Stevens Date: Mon, 19 Aug 2019 12:15:14 -0400 Subject: [PATCH 03/23] awaitility version bump --- pom.xml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index ea8ef24303c..e867b4eec09 100755 --- a/pom.xml +++ b/pom.xml @@ -1062,7 +1062,8 @@ org.awaitility awaitility - 3.1.6 + 4.0.0-rc1 + org.codehaus.plexus From 70aef7fbbb40db31bd3039c30bd9aad1093ad17c Mon Sep 17 00:00:00 2001 From: Diederik Muylwyk Date: Mon, 19 Aug 2019 18:43:43 -0400 Subject: [PATCH 04/23] Incremental work on large ValueSet expansion support; need pageable results while querying TRM_VALUESET_CONCEPT. --- .../java/ca/uhn/fhir/jpa/dao/DaoConfig.java | 2 +- .../dstu3/FhirResourceDaoValueSetDstu3.java | 17 ++++--- .../jpa/dao/r4/FhirResourceDaoValueSetR4.java | 17 ++++--- .../jpa/dao/r5/FhirResourceDaoValueSetR5.java | 17 ++++--- .../jpa/term/BaseHapiTerminologySvcImpl.java | 48 +++++++++++-------- ...ansionComponentWithConceptAccumulator.java | 15 ++++++ 6 files changed, 73 insertions(+), 43 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoConfig.java index a9edc0792be..077a3a65fca 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoConfig.java @@ -149,7 +149,7 @@ public class DaoConfig { /** * EXPERIMENTAL - Do not use in production! Do not change default of {@code false}! */ - private boolean myPreExpandValueSetsExperimental = false; + private boolean myPreExpandValueSetsExperimental = true; // FIXME: DM 2019-08-19 - Return to false; private boolean myFilterParameterEnabled = false; /** * EXPERIMENTAL - Do not use in production! Do not change default of {@code 0}! diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java index ad30a841f1e..7c8895b06dc 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java @@ -194,6 +194,7 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3 } ValueSet source = new ValueSet(); + source.setUrl(theUri); source.getCompose().addInclude().addValueSet(theUri); @@ -210,16 +211,16 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3 } @Override - public ValueSet expand(ValueSet source, String theFilter) { + public ValueSet expand(ValueSet theSource, String theFilter) { ValueSet toExpand = new ValueSet(); - // for (UriType next : source.getCompose().getInclude()) { + // for (UriType next : theSource.getCompose().getInclude()) { // ConceptSetComponent include = toExpand.getCompose().addInclude(); // include.setSystem(next.getValue()); // addFilterIfPresent(theFilter, include); // } - for (ConceptSetComponent next : source.getCompose().getInclude()) { + for (ConceptSetComponent next : theSource.getCompose().getInclude()) { toExpand.getCompose().addInclude(next); addFilterIfPresent(theFilter, next); } @@ -228,7 +229,7 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3 throw new InvalidRequestException("ValueSet does not have any compose.include or compose.import values, can not expand"); } - toExpand.getCompose().getExclude().addAll(source.getCompose().getExclude()); + toExpand.getCompose().getExclude().addAll(theSource.getCompose().getExclude()); ValueSet retVal = doExpand(toExpand); @@ -240,10 +241,12 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3 } @Override - public ValueSet expand(ValueSet source, String theFilter, int theOffset, int theCount) { + public ValueSet expand(ValueSet theSource, String theFilter, int theOffset, int theCount) { ValueSet toExpand = new ValueSet(); + toExpand.setId(theSource.getId()); + toExpand.setUrl(theSource.getUrl()); - for (ConceptSetComponent next : source.getCompose().getInclude()) { + for (ConceptSetComponent next : theSource.getCompose().getInclude()) { toExpand.getCompose().addInclude(next); addFilterIfPresent(theFilter, next); } @@ -252,7 +255,7 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3 throw new InvalidRequestException("ValueSet does not have any compose.include or compose.import values, can not expand"); } - toExpand.getCompose().getExclude().addAll(source.getCompose().getExclude()); + toExpand.getCompose().getExclude().addAll(theSource.getCompose().getExclude()); ValueSet retVal = doExpand(toExpand, theOffset, theCount); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java index c0b10552dfc..6a525e07c0b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java @@ -190,6 +190,7 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4 imple } ValueSet source = new ValueSet(); + source.setUrl(theUri); source.getCompose().addInclude().addValueSet(theUri); @@ -206,16 +207,16 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4 imple } @Override - public ValueSet expand(ValueSet source, String theFilter) { + public ValueSet expand(ValueSet theSource, String theFilter) { ValueSet toExpand = new ValueSet(); - // for (UriType next : source.getCompose().getInclude()) { + // for (UriType next : theSource.getCompose().getInclude()) { // ConceptSetComponent include = toExpand.getCompose().addInclude(); // include.setSystem(next.getValue()); // addFilterIfPresent(theFilter, include); // } - for (ConceptSetComponent next : source.getCompose().getInclude()) { + for (ConceptSetComponent next : theSource.getCompose().getInclude()) { toExpand.getCompose().addInclude(next); addFilterIfPresent(theFilter, next); } @@ -224,7 +225,7 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4 imple throw new InvalidRequestException("ValueSet does not have any compose.include or compose.import values, can not expand"); } - toExpand.getCompose().getExclude().addAll(source.getCompose().getExclude()); + toExpand.getCompose().getExclude().addAll(theSource.getCompose().getExclude()); ValueSet retVal = doExpand(toExpand); @@ -236,10 +237,12 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4 imple } @Override - public ValueSet expand(ValueSet source, String theFilter, int theOffset, int theCount) { + public ValueSet expand(ValueSet theSource, String theFilter, int theOffset, int theCount) { ValueSet toExpand = new ValueSet(); + toExpand.setId(theSource.getId()); + toExpand.setUrl(theSource.getUrl()); - for (ConceptSetComponent next : source.getCompose().getInclude()) { + for (ConceptSetComponent next : theSource.getCompose().getInclude()) { toExpand.getCompose().addInclude(next); addFilterIfPresent(theFilter, next); } @@ -248,7 +251,7 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4 imple throw new InvalidRequestException("ValueSet does not have any compose.include or compose.import values, can not expand"); } - toExpand.getCompose().getExclude().addAll(source.getCompose().getExclude()); + toExpand.getCompose().getExclude().addAll(theSource.getCompose().getExclude()); ValueSet retVal = doExpand(toExpand, theOffset, theCount); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java index b1f32510b1e..855c8d43bcf 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java @@ -196,6 +196,7 @@ public class FhirResourceDaoValueSetR5 extends FhirResourceDaoR5 imple } ValueSet source = new ValueSet(); + source.setUrl(theUri); source.getCompose().addInclude().addValueSet(theUri); @@ -212,16 +213,16 @@ public class FhirResourceDaoValueSetR5 extends FhirResourceDaoR5 imple } @Override - public ValueSet expand(ValueSet source, String theFilter) { + public ValueSet expand(ValueSet theSource, String theFilter) { ValueSet toExpand = new ValueSet(); - // for (UriType next : source.getCompose().getInclude()) { + // for (UriType next : theSource.getCompose().getInclude()) { // ConceptSetComponent include = toExpand.getCompose().addInclude(); // include.setSystem(next.getValue()); // addFilterIfPresent(theFilter, include); // } - for (ConceptSetComponent next : source.getCompose().getInclude()) { + for (ConceptSetComponent next : theSource.getCompose().getInclude()) { toExpand.getCompose().addInclude(next); addFilterIfPresent(theFilter, next); } @@ -230,7 +231,7 @@ public class FhirResourceDaoValueSetR5 extends FhirResourceDaoR5 imple throw new InvalidRequestException("ValueSet does not have any compose.include or compose.import values, can not expand"); } - toExpand.getCompose().getExclude().addAll(source.getCompose().getExclude()); + toExpand.getCompose().getExclude().addAll(theSource.getCompose().getExclude()); ValueSet retVal = doExpand(toExpand); @@ -242,10 +243,12 @@ public class FhirResourceDaoValueSetR5 extends FhirResourceDaoR5 imple } @Override - public ValueSet expand(ValueSet source, String theFilter, int theOffset, int theCount) { + public ValueSet expand(ValueSet theSource, String theFilter, int theOffset, int theCount) { ValueSet toExpand = new ValueSet(); + toExpand.setId(theSource.getId()); + toExpand.setUrl(theSource.getUrl()); - for (ConceptSetComponent next : source.getCompose().getInclude()) { + for (ConceptSetComponent next : theSource.getCompose().getInclude()) { toExpand.getCompose().addInclude(next); addFilterIfPresent(theFilter, next); } @@ -254,7 +257,7 @@ public class FhirResourceDaoValueSetR5 extends FhirResourceDaoR5 imple throw new InvalidRequestException("ValueSet does not have any compose.include or compose.import values, can not expand"); } - toExpand.getCompose().getExclude().addAll(source.getCompose().getExclude()); + toExpand.getCompose().getExclude().addAll(theSource.getCompose().getExclude()); ValueSet retVal = doExpand(toExpand, theOffset, theCount); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java index 67f16c5cba7..7865069ecba 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java @@ -482,10 +482,17 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, @Transactional(propagation = Propagation.REQUIRED) public ValueSet expandValueSet(ValueSet theValueSetToExpand, int theOffset, int theCount) { ValidateUtil.isNotNullOrThrowUnprocessableEntity(theValueSetToExpand, "ValueSet to expand can not be null"); - ValidateUtil.isTrueOrThrowInvalidRequest(theValueSetToExpand.hasUrl(), "ValueSet to be expanded must provide ValueSet.url", theValueSetToExpand); - ValidateUtil.isNotBlankOrThrowUnprocessableEntity(theValueSetToExpand.getUrl(), theValueSetToExpand.getIdElement().toUnqualifiedVersionless().getValue() + " to be expanded must provide ValueSet.url"); - Optional optionalTermValueSet = myValueSetDao.findByUrl(theValueSetToExpand.getUrl()); + Optional optionalTermValueSet; + // FIXME: DM 2019-08-19 - This is no good. I need pageable results while querying TRM_VALUESET_CONCEPT + if (theValueSetToExpand.hasId()) { + optionalTermValueSet = myValueSetDao.findByResourcePid(theValueSetToExpand.getIdElement().getIdPartAsLong()); + } else if (theValueSetToExpand.hasUrl()) { + optionalTermValueSet = myValueSetDao.findByUrl(theValueSetToExpand.getUrl()); + } else { + throw new UnprocessableEntityException("ValueSet to be expanded must provide either ValueSet.id or ValueSet.url"); + } + if (!optionalTermValueSet.isPresent()) { throw new InvalidRequestException("ValueSet is not present in terminology tables: " + theValueSetToExpand.getUrl()); } @@ -705,8 +712,6 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, */ FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, TermConcept.class); - int maxResult = 50000; - jpaQuery.setMaxResults(maxResult); StopWatch sw = new StopWatch(); AtomicInteger count = new AtomicInteger(0); @@ -717,11 +722,6 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, addCodeIfNotAlreadyAdded(theValueSetCodeAccumulator, theAddedCodes, concept, theAdd, theCodeCounter); } - - if (maxResult == count.get()) { - throw new InternalErrorException("Expansion fragment produced too many (>= " + maxResult + ") results"); - } - ourLog.info("Expansion for {} produced {} results in {}ms", (theAdd ? "inclusion" : "exclusion"), count, sw.getMillis()); } else { @@ -1554,25 +1554,31 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, ourLog.info("Done storing TermConceptMap."); } - @Scheduled(fixedDelay = 600000) // 10 minutes. +// @Scheduled(fixedDelay = 600000) // 10 minutes. + @Scheduled(fixedDelay = 60000) // FIXME: DM 2019-08-19 - Remove this! @Override public synchronized void preExpandValueSetToTerminologyTables() { new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() { @Override protected void doInTransactionWithoutResult(TransactionStatus theStatus) { - Optional optionalTermValueSet = getNextTermValueSetNotExpanded(); - if (optionalTermValueSet.isPresent()) { - TermValueSet termValueSet = optionalTermValueSet.get(); - termValueSet.setExpansionStatus(TermValueSetExpansionStatusEnum.EXPANSION_IN_PROGRESS); - myValueSetDao.saveAndFlush(termValueSet); + boolean hasNextTermValueSetNotExpanded = true; + do { + Optional optionalTermValueSet = getNextTermValueSetNotExpanded(); + if (optionalTermValueSet.isPresent()) { + TermValueSet termValueSet = optionalTermValueSet.get(); + termValueSet.setExpansionStatus(TermValueSetExpansionStatusEnum.EXPANSION_IN_PROGRESS); + myValueSetDao.saveAndFlush(termValueSet); - ValueSet valueSet = getValueSetFromResourceTable(termValueSet.getResource()); + ValueSet valueSet = getValueSetFromResourceTable(termValueSet.getResource()); - expandValueSet(valueSet, new ValueSetConceptAccumulator(termValueSet, myValueSetConceptDao, myValueSetConceptDesignationDao)); + expandValueSet(valueSet, new ValueSetConceptAccumulator(termValueSet, myValueSetConceptDao, myValueSetConceptDesignationDao)); - termValueSet.setExpansionStatus(TermValueSetExpansionStatusEnum.EXPANDED); - myValueSetDao.saveAndFlush(termValueSet); - } + termValueSet.setExpansionStatus(TermValueSetExpansionStatusEnum.EXPANDED); + myValueSetDao.saveAndFlush(termValueSet); + } else { + hasNextTermValueSetNotExpanded = false; + } + } while (hasNextTermValueSetNotExpanded); } }); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetExpansionComponentWithConceptAccumulator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetExpansionComponentWithConceptAccumulator.java index 5aa95013365..3104b452c6e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetExpansionComponentWithConceptAccumulator.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetExpansionComponentWithConceptAccumulator.java @@ -22,15 +22,23 @@ package ca.uhn.fhir.jpa.term; import ca.uhn.fhir.jpa.entity.TermConceptDesignation; import ca.uhn.fhir.model.api.annotation.Block; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import org.hl7.fhir.r4.model.ValueSet; import java.util.Collection; @Block() public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.ValueSetExpansionComponent implements IValueSetConceptAccumulator { + private final int myMaxResults = 50000; + private int myConceptsCount; + + public ValueSetExpansionComponentWithConceptAccumulator() { + myConceptsCount = 0; + } @Override public void includeConcept(String theSystem, String theCode, String theDisplay) { + incrementConceptsCount(); ValueSet.ValueSetExpansionContainsComponent contains = this.addContains(); contains.setSystem(theSystem); contains.setCode(theCode); @@ -39,6 +47,7 @@ public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.V @Override public void includeConceptWithDesignations(String theSystem, String theCode, String theDisplay, Collection theDesignations) { + incrementConceptsCount(); ValueSet.ValueSetExpansionContainsComponent contains = this.addContains(); contains.setSystem(theSystem); contains.setCode(theCode); @@ -65,4 +74,10 @@ public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.V theSystem.equals(t.getSystem()) && theCode.equals(t.getCode())); } + + private void incrementConceptsCount() { + if (++myConceptsCount > myMaxResults) { + throw new InternalErrorException("Expansion produced too many (>= " + myMaxResults + ") results"); + } + } } From a43f4ba967433c58f671b992eff26ddace4972fb Mon Sep 17 00:00:00 2001 From: Ken Stevens Date: Mon, 19 Aug 2019 22:28:33 -0400 Subject: [PATCH 05/23] should be test scope only --- hapi-fhir-test-utilities/pom.xml | 1 + 1 file changed, 1 insertion(+) diff --git a/hapi-fhir-test-utilities/pom.xml b/hapi-fhir-test-utilities/pom.xml index b22af2f86e8..3dfde10fae2 100644 --- a/hapi-fhir-test-utilities/pom.xml +++ b/hapi-fhir-test-utilities/pom.xml @@ -50,6 +50,7 @@ org.awaitility awaitility + test From 97e14711a2a0f6d3c9824800c938603000cbcbe3 Mon Sep 17 00:00:00 2001 From: Ken Stevens Date: Mon, 19 Aug 2019 22:31:52 -0400 Subject: [PATCH 06/23] add missing dep --- hapi-fhir-structures-dstu2/pom.xml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/hapi-fhir-structures-dstu2/pom.xml b/hapi-fhir-structures-dstu2/pom.xml index 0604b84fd02..b16cd0536cd 100644 --- a/hapi-fhir-structures-dstu2/pom.xml +++ b/hapi-fhir-structures-dstu2/pom.xml @@ -216,6 +216,11 @@ ${project.version} test + + org.awaitility + awaitility + test + From daf45db2bef27ac35c7ccb4fceb50267ab3bd42c Mon Sep 17 00:00:00 2001 From: Ken Stevens Date: Tue, 20 Aug 2019 09:14:21 -0400 Subject: [PATCH 07/23] fixed a test and removed awaitility excludes mvn install completed successfully --- hapi-fhir-jpaserver-base/pom.xml | 6 +++++- .../ca/uhn/fhir/parser/XmlParserDstu2Test.java | 4 ---- pom.xml | 16 ---------------- 3 files changed, 5 insertions(+), 21 deletions(-) diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml index b6e36b529a3..951c0cbd193 100644 --- a/hapi-fhir-jpaserver-base/pom.xml +++ b/hapi-fhir-jpaserver-base/pom.xml @@ -146,7 +146,11 @@ logback-classic test - + + org.awaitility + awaitility + test + org.javassist javassist diff --git a/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/parser/XmlParserDstu2Test.java b/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/parser/XmlParserDstu2Test.java index f42ad461da9..7f6b0370b7a 100644 --- a/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/parser/XmlParserDstu2Test.java +++ b/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/parser/XmlParserDstu2Test.java @@ -802,7 +802,6 @@ public class XmlParserDstu2Test { //@formatter:off assertThat(enc, stringContainsInOrder("", - "", "", "", "", @@ -817,7 +816,6 @@ public class XmlParserDstu2Test { "", "", "", - "", "", "", "", @@ -856,7 +854,6 @@ public class XmlParserDstu2Test { //@formatter:off assertThat(enc, stringContainsInOrder("", - "", "", "", "", @@ -869,7 +866,6 @@ public class XmlParserDstu2Test { "", "", "", - "", "", "", "", diff --git a/pom.xml b/pom.xml index e867b4eec09..bf2f0eac0da 100755 --- a/pom.xml +++ b/pom.xml @@ -1063,22 +1063,6 @@ org.awaitility awaitility 4.0.0-rc1 - org.codehaus.plexus From df7469731ba2f25db78caf3a204bb2e13f708357 Mon Sep 17 00:00:00 2001 From: Ken Stevens Date: Tue, 20 Aug 2019 15:02:02 -0400 Subject: [PATCH 08/23] turn of sql queries --- .../src/test/java/ca/uhn/fhir/jpa/config/TestR4Config.java | 5 +++-- hapi-fhir-test-utilities/pom.xml | 6 ------ 2 files changed, 3 insertions(+), 8 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4Config.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4Config.java index a41355ef83f..7caa7447e24 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4Config.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4Config.java @@ -23,6 +23,7 @@ import org.springframework.transaction.annotation.EnableTransactionManagement; import javax.sql.DataSource; import java.sql.Connection; import java.util.Properties; +import java.util.concurrent.TimeUnit; import static org.junit.Assert.fail; @@ -110,8 +111,8 @@ public class TestR4Config extends BaseJavaConfigR4 { SLF4JLogLevel level = SLF4JLogLevel.INFO; DataSource dataSource = ProxyDataSourceBuilder .create(retVal) - .logQueryBySlf4j(level, "SQL") -// .logSlowQueryBySlf4j(10, TimeUnit.SECONDS) +// .logQueryBySlf4j(level, "SQL") + .logSlowQueryBySlf4j(10, TimeUnit.SECONDS) // .countQuery(new ThreadQueryCountHolder()) .beforeQuery(new BlockLargeNumbersOfParamsListener()) .afterQuery(captureQueriesListener()) diff --git a/hapi-fhir-test-utilities/pom.xml b/hapi-fhir-test-utilities/pom.xml index 3dfde10fae2..1a57db75e00 100644 --- a/hapi-fhir-test-utilities/pom.xml +++ b/hapi-fhir-test-utilities/pom.xml @@ -46,12 +46,6 @@ junit junit - - - org.awaitility - awaitility - test - From 3218bd7853cd98a489f65b25ec5bc15f3fabf25b Mon Sep 17 00:00:00 2001 From: Diederik Muylwyk Date: Tue, 20 Aug 2019 18:11:53 -0400 Subject: [PATCH 09/23] Incremental work on large ValueSet expansion support; implemented paging when reading concepts from the terminology tables. Switched from Derby to H2. --- hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml | 22 +-- .../ca/uhn/fhir/jpa/demo/CommonConfig.java | 15 +- .../jpa/dao/data/ITermValueSetConceptDao.java | 10 +- .../ITermValueSetConceptDesignationDao.java | 7 + .../fhir/jpa/dao/data/ITermValueSetDao.java | 4 +- .../jpa/term/BaseHapiTerminologySvcImpl.java | 134 +++++++++++++----- .../jpa/term/ValueSetConceptAccumulator.java | 2 +- .../jpa/term/TerminologySvcImplR4Test.java | 30 ++++ 8 files changed, 154 insertions(+), 70 deletions(-) diff --git a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml index 31f8770e279..5715d52ddca 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml @@ -90,28 +90,12 @@ commons-cli - - org.apache.derby - derby + com.h2database + h2 - - org.apache.derby - derbynet - - - org.apache.derby - derbyclient - - diff --git a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/CommonConfig.java b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/CommonConfig.java index 47d81a02566..a2ceab17980 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/CommonConfig.java +++ b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/CommonConfig.java @@ -1,17 +1,16 @@ package ca.uhn.fhir.jpa.demo; -import java.util.Properties; - import ca.uhn.fhir.jpa.dao.DaoConfig; import ca.uhn.fhir.jpa.model.entity.ModelConfig; import ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory; -import ca.uhn.fhir.jpa.util.DerbyTenSevenHapiFhirDialect; import org.apache.commons.dbcp2.BasicDataSource; import org.apache.commons.lang3.time.DateUtils; +import org.hibernate.dialect.H2Dialect; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import javax.sql.DataSource; +import java.util.Properties; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -38,20 +37,20 @@ public class CommonConfig { } /** - * The following bean configures the database connection. The 'url' property value of "jdbc:derby:directory:jpaserver_derby_files;create=true" indicates that the server should save resources in a - * directory called "jpaserver_derby_files". + * The following bean configures the database connection. The 'url' property value of "jdbc:h2:file:target./jpaserver_h2_files" indicates that the server should save resources in a + * directory called "jpaserver_h2_files". * * A URL to a remote database could also be placed here, along with login credentials and other properties supported by BasicDataSource. */ @Bean(destroyMethod = "close") public DataSource dataSource() { - String url = "jdbc:derby:directory:target/jpaserver_derby_files;create=true"; + String url = "jdbc:h2:file:./target/jpaserver_h2_files"; if (isNotBlank(ContextHolder.getDatabaseUrl())) { url = ContextHolder.getDatabaseUrl(); } BasicDataSource retVal = new BasicDataSource(); - retVal.setDriver(new org.apache.derby.jdbc.EmbeddedDriver()); + retVal.setDriver(new org.h2.Driver()); retVal.setUrl(url); retVal.setUsername(""); retVal.setPassword(""); @@ -61,7 +60,7 @@ public class CommonConfig { @Bean public Properties jpaProperties() { Properties extraProperties = new Properties(); - extraProperties.put("hibernate.dialect", DerbyTenSevenHapiFhirDialect.class.getName()); + extraProperties.put("hibernate.dialect", H2Dialect.class.getName()); extraProperties.put("hibernate.format_sql", "true"); extraProperties.put("hibernate.show_sql", "false"); extraProperties.put("hibernate.hbm2ddl.auto", "update"); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDao.java index 9577c47b842..bfec0626c1e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDao.java @@ -21,6 +21,8 @@ package ca.uhn.fhir.jpa.dao.data; */ import ca.uhn.fhir.jpa.entity.TermValueSetConcept; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Modifying; import org.springframework.data.jpa.repository.Query; @@ -30,11 +32,17 @@ import java.util.Optional; public interface ITermValueSetConceptDao extends JpaRepository { + @Query("SELECT COUNT(vsc) FROM TermValueSetConcept vsc WHERE vsc.myValueSet.myId = :pid") + Integer countByTermValueSetId(@Param("pid") Long theValueSetId); + @Query("DELETE FROM TermValueSetConcept vsc WHERE vsc.myValueSet.myId = :pid") @Modifying void deleteByTermValueSetId(@Param("pid") Long theValueSetId); + @Query("SELECT vsc from TermValueSetConcept vsc WHERE vsc.myValueSet.myId = :pid") + Slice findByTermValueSetId(Pageable thePage, @Param("pid") Long theValueSetId); + @Query("SELECT vsc FROM TermValueSetConcept vsc WHERE vsc.myValueSet.myId = :pid AND vsc.mySystem = :system_url AND vsc.myCode = :codeval") - Optional findByValueSetIdSystemAndCode(@Param("pid") Long theValueSetId, @Param("system_url") String theSystem, @Param("codeval") String theCode); + Optional findByTermValueSetIdSystemAndCode(@Param("pid") Long theValueSetId, @Param("system_url") String theSystem, @Param("codeval") String theCode); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDesignationDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDesignationDao.java index 1a0875b5a2f..ced9e67b5f3 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDesignationDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDesignationDao.java @@ -21,6 +21,8 @@ package ca.uhn.fhir.jpa.dao.data; */ import ca.uhn.fhir.jpa.entity.TermValueSetConceptDesignation; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Modifying; import org.springframework.data.jpa.repository.Query; @@ -28,8 +30,13 @@ import org.springframework.data.repository.query.Param; public interface ITermValueSetConceptDesignationDao extends JpaRepository { + @Query("SELECT COUNT(vscd) FROM TermValueSetConceptDesignation vscd WHERE vscd.myConcept.myId = :pid") + Integer countByTermValueSetConceptId(@Param("pid") Long theValueSetConceptId); + @Query("DELETE FROM TermValueSetConceptDesignation vscd WHERE vscd.myConcept.myValueSet.myId = :pid") @Modifying void deleteByTermValueSetId(@Param("pid") Long theValueSetId); + @Query("SELECT vscd from TermValueSetConceptDesignation vscd WHERE vscd.myConcept.myId = :pid") + Slice findByTermValueSetConceptId(Pageable thePage, @Param("pid") Long theValueSetConceptId); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetDao.java index d04cf883041..037d65f98d0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetDao.java @@ -22,8 +22,8 @@ package ca.uhn.fhir.jpa.dao.data; import ca.uhn.fhir.jpa.entity.TermValueSet; import ca.uhn.fhir.jpa.entity.TermValueSetExpansionStatusEnum; -import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Modifying; import org.springframework.data.jpa.repository.Query; @@ -44,6 +44,6 @@ public interface ITermValueSetDao extends JpaRepository { Optional findByUrl(@Param("url") String theUrl); @Query("SELECT vs FROM TermValueSet vs WHERE vs.myExpansionStatus = :expansion_status") - Page findByExpansionStatus(Pageable pageable, @Param("expansion_status") TermValueSetExpansionStatusEnum theExpansionStatus); + Slice findByExpansionStatus(Pageable pageable, @Param("expansion_status") TermValueSetExpansionStatusEnum theExpansionStatus); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java index 7865069ecba..a3e3723ebfe 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java @@ -420,7 +420,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, count = 0; while (true) { Slice link = theLoader.get(); - if (link.hasContent() == false) { + if (!link.hasContent()) { break; } @@ -484,7 +484,6 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, ValidateUtil.isNotNullOrThrowUnprocessableEntity(theValueSetToExpand, "ValueSet to expand can not be null"); Optional optionalTermValueSet; - // FIXME: DM 2019-08-19 - This is no good. I need pageable results while querying TRM_VALUESET_CONCEPT if (theValueSetToExpand.hasId()) { optionalTermValueSet = myValueSetDao.findByResourcePid(theValueSetToExpand.getIdElement().getIdPartAsLong()); } else if (theValueSetToExpand.hasUrl()) { @@ -507,35 +506,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, expansionComponent.setIdentifier(UUID.randomUUID().toString()); expansionComponent.setTimestamp(new Date()); - int numberOfConcepts = termValueSet.getConcepts().size(); - expansionComponent.setTotal(numberOfConcepts); - expansionComponent.setOffset(theOffset); - - List subListOfConcepts = new ArrayList<>(); - if (theCount != 0 && numberOfConcepts != 0) { - int toIndex = Math.min(theOffset + theCount, numberOfConcepts); - subListOfConcepts = termValueSet.getConcepts().subList(theOffset, toIndex); - } - - for (TermValueSetConcept concept : subListOfConcepts) { - ValueSet.ValueSetExpansionContainsComponent containsComponent = expansionComponent.addContains(); - containsComponent.setSystem(concept.getSystem()); - containsComponent.setCode(concept.getCode()); - containsComponent.setDisplay(concept.getDisplay()); - - // TODO: DM 2019-08-17 - Implement includeDesignations parameter for $expand operation. - for (TermValueSetConceptDesignation designation : concept.getDesignations()) { - ValueSet.ConceptReferenceDesignationComponent designationComponent = containsComponent.addDesignation(); - designationComponent.setLanguage(designation.getLanguage()); - if (isNoneBlank(designation.getUseSystem(), designation.getUseCode())) { - designationComponent.setUse(new Coding( - designation.getUseSystem(), - designation.getUseCode(), - designation.getUseDisplay())); - } - designationComponent.setValue(designation.getValue()); - } - } + populateExpansionComponent(expansionComponent, termValueSet, theOffset, theCount); ValueSet valueSet = new ValueSet(); valueSet.setStatus(Enumerations.PublicationStatus.ACTIVE); @@ -544,6 +515,91 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, return valueSet; } + private void populateExpansionComponent(ValueSet.ValueSetExpansionComponent theExpansionComponent, TermValueSet theTermValueSet, int theOffset, int theCount) { + int total = myValueSetConceptDao.countByTermValueSetId(theTermValueSet.getId()); + theExpansionComponent.setTotal(total); + theExpansionComponent.setOffset(theOffset); + theExpansionComponent.addParameter().setName("offset").setValue(new IntegerType(theOffset)); + theExpansionComponent.addParameter().setName("count").setValue(new IntegerType(theCount)); + + if (theCount == 0 || total == 0) { + return; + } + + expandConcepts(theExpansionComponent, theTermValueSet, theOffset, theCount); + } + + private void expandConcepts(ValueSet.ValueSetExpansionComponent theExpansionComponent, TermValueSet theTermValueSet, int theOffset, int theCount) { + int conceptsExpanded = 0; + for (int i = theOffset; i < (theOffset + theCount); i++) { + final int page = i; + Supplier> loader = () -> myValueSetConceptDao.findByTermValueSetId(PageRequest.of(page, 1), theTermValueSet.getId()); + + Slice slice = loader.get(); + if (!slice.hasContent()) { + break; + } + + for (TermValueSetConcept concept : slice.getContent()) { + ValueSet.ValueSetExpansionContainsComponent containsComponent = theExpansionComponent.addContains(); + containsComponent.setSystem(concept.getSystem()); + containsComponent.setCode(concept.getCode()); + containsComponent.setDisplay(concept.getDisplay()); + + // TODO: DM 2019-08-17 - Implement includeDesignations parameter for $expand operation to make this optional. + expandDesignations(theTermValueSet, concept, containsComponent); + + if (++conceptsExpanded % 250 == 0) { + ourLog.info("Have expanded {} concepts in ValueSet[{}]", conceptsExpanded, theTermValueSet.getUrl()); + } + } + + if (!slice.hasNext()) { + break; + } + } + + if (conceptsExpanded > 0) { + ourLog.info("Have expanded {} concepts in ValueSet[{}]", conceptsExpanded, theTermValueSet.getUrl()); + } + } + + private void expandDesignations(TermValueSet theValueSet, TermValueSetConcept theConcept, ValueSet.ValueSetExpansionContainsComponent theContainsComponent) { + int designationsExpanded = 0; + int index = 0; + while (true) { + final int page = index++; + Supplier> loader = () -> myValueSetConceptDesignationDao.findByTermValueSetConceptId(PageRequest.of(page, 1000), theConcept.getId()); + + Slice slice = loader.get(); + if (!slice.hasContent()) { + break; + } + + for (TermValueSetConceptDesignation designation : slice.getContent()) { + ValueSet.ConceptReferenceDesignationComponent designationComponent = theContainsComponent.addDesignation(); + designationComponent.setLanguage(designation.getLanguage()); + designationComponent.setUse(new Coding( + designation.getUseSystem(), + designation.getUseCode(), + designation.getUseDisplay())); + designationComponent.setValue(designation.getValue()); + + if (++designationsExpanded % 250 == 0) { + ourLog.info("Have expanded {} designations for Concept[{}|{}] in ValueSet[{}]", designationsExpanded, theConcept.getSystem(), theConcept.getCode(), theValueSet.getUrl()); + } + } + + if (!slice.hasNext()) { + break; + } + } + + if (designationsExpanded > 0) { + ourLog.info("Have expanded {} designations for Concept[{}|{}] in ValueSet[{}]", designationsExpanded, theConcept.getSystem(), theConcept.getCode(), theValueSet.getUrl()); + } + } + @Override @Transactional(propagation = Propagation.REQUIRED) public void expandValueSet(ValueSet theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator) { @@ -658,10 +714,10 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, String value = nextFilter.getValue(); if (value.endsWith("$")) { value = value.substring(0, value.length() - 1); - } else if (value.endsWith(".*") == false) { + } else if (!value.endsWith(".*")) { value = value + ".*"; } - if (value.startsWith("^") == false && value.startsWith(".*") == false) { + if (!value.startsWith("^") && !value.startsWith(".*")) { value = ".*" + value; } else if (value.startsWith("^")) { value = value.substring(1); @@ -732,7 +788,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, throw new InvalidRequestException("Unknown code system: " + system); } - if (theInclude.getConcept().isEmpty() == false) { + if (!theInclude.getConcept().isEmpty()) { for (ValueSet.ConceptReferenceComponent next : theInclude.getConcept()) { String nextCode = next.getCode(); if (isNoneBlank(system, nextCode) && !theAddedCodes.contains(system + "|" + nextCode)) { @@ -857,7 +913,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, StopWatch stopwatch = new StopWatch(); Optional concept = fetchLoadedCode(theCodeSystemResourcePid, theCode); - if (concept.isPresent() == false) { + if (!concept.isPresent()) { return Collections.emptySet(); } @@ -888,7 +944,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, Stopwatch stopwatch = Stopwatch.createStarted(); Optional concept = fetchLoadedCode(theCodeSystemResourcePid, theCode); - if (concept.isPresent() == false) { + if (!concept.isPresent()) { return Collections.emptySet(); } @@ -1073,8 +1129,8 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, @Override protected void doInTransactionWithoutResult(TransactionStatus theArg0) { int maxResult = 1000; - Page concepts = myConceptDao.findResourcesRequiringReindexing(new PageRequest(0, maxResult)); - if (concepts.hasContent() == false) { + Page concepts = myConceptDao.findResourcesRequiringReindexing(PageRequest.of(0, maxResult)); + if (!concepts.hasContent()) { if (myChildToParentPidCache != null) { ourLog.info("Clearing parent concept cache"); myNextReindexPass = System.currentTimeMillis() + DateUtils.MILLIS_PER_MINUTE; @@ -1587,7 +1643,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, private Optional getNextTermValueSetNotExpanded() { Optional retVal = Optional.empty(); - Page page = myValueSetDao.findByExpansionStatus(PageRequest.of(0, 1), TermValueSetExpansionStatusEnum.NOT_EXPANDED); + Slice page = myValueSetDao.findByExpansionStatus(PageRequest.of(0, 1), TermValueSetExpansionStatusEnum.NOT_EXPANDED); if (!page.getContent().isEmpty()) { retVal = Optional.of(page.getContent().get(0)); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java index 4e2e5b92ec2..a072fb52251 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java @@ -71,7 +71,7 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator { } // Get existing entity so it can be deleted. - Optional optionalConcept = myValueSetConceptDao.findByValueSetIdSystemAndCode(myTermValueSet.getId(), theSystem, theCode); + Optional optionalConcept = myValueSetConceptDao.findByTermValueSetIdSystemAndCode(myTermValueSet.getId(), theSystem, theCode); if (optionalConcept.isPresent()) { TermValueSetConcept concept = optionalConcept.get(); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java index e7b16145091..cd6a1b46811 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java @@ -614,6 +614,12 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test { assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getTotal()); assertEquals(myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental(), expandedValueSet.getExpansion().getOffset()); + assertEquals(2, expandedValueSet.getExpansion().getParameter().size()); + assertEquals("offset", expandedValueSet.getExpansion().getParameter().get(0).getName()); + assertEquals(0, expandedValueSet.getExpansion().getParameter().get(0).getValueIntegerType().getValue().intValue()); + assertEquals("count", expandedValueSet.getExpansion().getParameter().get(1).getName()); + assertEquals(1000, expandedValueSet.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue()); + assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getContains().size()); ValueSet.ValueSetExpansionContainsComponent containsComponent = expandedValueSet.getExpansion().getContains().get(0); @@ -683,6 +689,12 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test { assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getTotal()); assertEquals(myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental(), expandedValueSet.getExpansion().getOffset()); + assertEquals(2, expandedValueSet.getExpansion().getParameter().size()); + assertEquals("offset", expandedValueSet.getExpansion().getParameter().get(0).getName()); + assertEquals(0, expandedValueSet.getExpansion().getParameter().get(0).getValueIntegerType().getValue().intValue()); + assertEquals("count", expandedValueSet.getExpansion().getParameter().get(1).getName()); + assertEquals(23, expandedValueSet.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue()); + assertEquals(23, expandedValueSet.getExpansion().getContains().size()); ValueSet.ValueSetExpansionContainsComponent containsComponent = expandedValueSet.getExpansion().getContains().get(0); @@ -746,6 +758,12 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test { assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getTotal()); assertEquals(myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental(), expandedValueSet.getExpansion().getOffset()); + assertEquals(2, expandedValueSet.getExpansion().getParameter().size()); + assertEquals("offset", expandedValueSet.getExpansion().getParameter().get(0).getName()); + assertEquals(0, expandedValueSet.getExpansion().getParameter().get(0).getValueIntegerType().getValue().intValue()); + assertEquals("count", expandedValueSet.getExpansion().getParameter().get(1).getName()); + assertEquals(0, expandedValueSet.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue()); + assertFalse(expandedValueSet.getExpansion().hasContains()); } @@ -768,6 +786,12 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test { assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getTotal()); assertEquals(1, expandedValueSet.getExpansion().getOffset()); + assertEquals(2, expandedValueSet.getExpansion().getParameter().size()); + assertEquals("offset", expandedValueSet.getExpansion().getParameter().get(0).getName()); + assertEquals(1, expandedValueSet.getExpansion().getParameter().get(0).getValueIntegerType().getValue().intValue()); + assertEquals("count", expandedValueSet.getExpansion().getParameter().get(1).getName()); + assertEquals(1000, expandedValueSet.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue()); + assertEquals(codeSystem.getConcept().size() - expandedValueSet.getExpansion().getOffset(), expandedValueSet.getExpansion().getContains().size()); ValueSet.ValueSetExpansionContainsComponent containsComponent = expandedValueSet.getExpansion().getContains().get(0); @@ -823,6 +847,12 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test { assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getTotal()); assertEquals(1, expandedValueSet.getExpansion().getOffset()); + assertEquals(2, expandedValueSet.getExpansion().getParameter().size()); + assertEquals("offset", expandedValueSet.getExpansion().getParameter().get(0).getName()); + assertEquals(1, expandedValueSet.getExpansion().getParameter().get(0).getValueIntegerType().getValue().intValue()); + assertEquals("count", expandedValueSet.getExpansion().getParameter().get(1).getName()); + assertEquals(22, expandedValueSet.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue()); + assertEquals(22, expandedValueSet.getExpansion().getContains().size()); ValueSet.ValueSetExpansionContainsComponent containsComponent = expandedValueSet.getExpansion().getContains().get(0); From 72dd6b2922b205c00d57767df0e46a21dd912de1 Mon Sep 17 00:00:00 2001 From: Diederik Muylwyk Date: Thu, 22 Aug 2019 14:00:08 -0400 Subject: [PATCH 10/23] Incremental work on large ValueSet expansion support; fixed broken deletion for TermValueSetConceptDesignation. --- .../jpa/dao/data/ITermValueSetConceptDao.java | 2 +- .../ITermValueSetConceptDesignationDao.java | 8 +-- .../TermValueSetConceptDesignation.java | 38 +++++++++++++ .../jpa/term/BaseHapiTerminologySvcImpl.java | 43 ++++++++++++++ .../jpa/term/ValueSetConceptAccumulator.java | 1 + .../jpa/term/TerminologySvcImplR4Test.java | 56 +++++++++++++++---- 6 files changed, 132 insertions(+), 16 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDao.java index bfec0626c1e..e225719ed4e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDao.java @@ -32,7 +32,7 @@ import java.util.Optional; public interface ITermValueSetConceptDao extends JpaRepository { - @Query("SELECT COUNT(vsc) FROM TermValueSetConcept vsc WHERE vsc.myValueSet.myId = :pid") + @Query("SELECT COUNT(*) FROM TermValueSetConcept vsc WHERE vsc.myValueSet.myId = :pid") Integer countByTermValueSetId(@Param("pid") Long theValueSetId); @Query("DELETE FROM TermValueSetConcept vsc WHERE vsc.myValueSet.myId = :pid") diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDesignationDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDesignationDao.java index ced9e67b5f3..1792b8496e0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDesignationDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDesignationDao.java @@ -30,13 +30,13 @@ import org.springframework.data.repository.query.Param; public interface ITermValueSetConceptDesignationDao extends JpaRepository { - @Query("SELECT COUNT(vscd) FROM TermValueSetConceptDesignation vscd WHERE vscd.myConcept.myId = :pid") - Integer countByTermValueSetConceptId(@Param("pid") Long theValueSetConceptId); + @Query("SELECT COUNT(vscd) FROM TermValueSetConceptDesignation vscd WHERE vscd.myValueSet.myId = :pid") + Integer countByTermValueSetId(@Param("pid") Long theValueSetId); - @Query("DELETE FROM TermValueSetConceptDesignation vscd WHERE vscd.myConcept.myValueSet.myId = :pid") + @Query("DELETE FROM TermValueSetConceptDesignation vscd WHERE vscd.myValueSet.myId = :pid") @Modifying void deleteByTermValueSetId(@Param("pid") Long theValueSetId); - @Query("SELECT vscd from TermValueSetConceptDesignation vscd WHERE vscd.myConcept.myId = :pid") + @Query("SELECT vscd FROM TermValueSetConceptDesignation vscd WHERE vscd.myConcept.myId = :pid") Slice findByTermValueSetConceptId(Pageable thePage, @Param("pid") Long theValueSetConceptId); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptDesignation.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptDesignation.java index b1ab2cff713..e3aa0c5196a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptDesignation.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptDesignation.java @@ -52,6 +52,16 @@ public class TermValueSetConceptDesignation implements Serializable { @JoinColumn(name = "VALUESET_CONCEPT_PID", referencedColumnName = "PID", nullable = false, foreignKey = @ForeignKey(name = "FK_TRM_VALUESET_CONCEPT_PID")) private TermValueSetConcept myConcept; + @ManyToOne() + @JoinColumn(name = "VALUESET_PID", referencedColumnName = "PID", nullable = false, foreignKey = @ForeignKey(name = "FK_TRM_VSCD_VS_PID")) + private TermValueSet myValueSet; + + @Transient + private String myValueSetUrl; + + @Transient + private String myValueSetName; + @Column(name = "LANG", nullable = true, length = MAX_LENGTH) private String myLanguage; @@ -80,6 +90,31 @@ public class TermValueSetConceptDesignation implements Serializable { return this; } + public TermValueSet getValueSet() { + return myValueSet; + } + + public TermValueSetConceptDesignation setValueSet(TermValueSet theValueSet) { + myValueSet = theValueSet; + return this; + } + + public String getValueSetUrl() { + if (myValueSetUrl == null) { + myValueSetUrl = getValueSet().getUrl(); + } + + return myValueSetUrl; + } + + public String getValueSetName() { + if (myValueSetName == null) { + myValueSetName = getValueSet().getName(); + } + + return myValueSetName; + } + public String getLanguage() { return myLanguage; } @@ -167,6 +202,9 @@ public class TermValueSetConceptDesignation implements Serializable { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) .append("myId", myId) .append(myConcept != null ? ("myConcept - id=" + myConcept.getId()) : ("myConcept=(null)")) + .append(myValueSet != null ? ("myValueSet - id=" + myValueSet.getId()) : ("myValueSet=(null)")) + .append("myValueSetUrl", this.getValueSetUrl()) + .append("myValueSetName", this.getValueSetName()) .append("myLanguage", myLanguage) .append("myUseSystem", myUseSystem) .append("myUseCode", myUseCode) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java index a3e3723ebfe..bf1b6f0bf2d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java @@ -612,6 +612,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, // Handle includes ourLog.debug("Handling includes"); for (ValueSet.ConceptSetComponent include : theValueSetToExpand.getCompose().getInclude()) { + ourLog.info("Working with " + identifyValueSetForLogging(theValueSetToExpand)); boolean add = true; expandValueSetHandleIncludeOrExclude(theValueSetCodeAccumulator, addedCodes, include, add, theCodeCounter); } @@ -619,11 +620,48 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, // Handle excludes ourLog.debug("Handling excludes"); for (ValueSet.ConceptSetComponent exclude : theValueSetToExpand.getCompose().getExclude()) { + ourLog.info("Working with " + identifyValueSetForLogging(theValueSetToExpand)); boolean add = false; expandValueSetHandleIncludeOrExclude(theValueSetCodeAccumulator, addedCodes, exclude, add, theCodeCounter); } } + private String identifyValueSetForLogging(ValueSet theValueSet) { + StringBuilder sb = new StringBuilder(); + boolean isIdentified = false; + sb + .append("ValueSet:"); + if (theValueSet.hasId()) { + isIdentified = true; + sb + .append(" ValueSet.id[") + .append(theValueSet.getId()) + .append("]"); + } + if (theValueSet.hasUrl()) { + isIdentified = true; + sb + .append(" ValueSet.url[") + .append(theValueSet.getUrl()) + .append("]"); + } + if (theValueSet.hasIdentifier()) { + isIdentified = true; + sb + .append(" ValueSet.identifier[") + .append(theValueSet.getIdentifierFirstRep().getSystem()) + .append("|") + .append(theValueSet.getIdentifierFirstRep().getValue()) + .append("]"); + } + + if (!isIdentified) { + sb.append(" None of ValueSet.id, ValueSet.url, and ValueSet.identifier are provided."); + } + + return sb.toString(); + } + protected List expandValueSetAndReturnVersionIndependentConcepts(org.hl7.fhir.r4.model.ValueSet theValueSetToExpandR4) { org.hl7.fhir.r4.model.ValueSet.ValueSetExpansionComponent expandedR4 = expandValueSet(theValueSetToExpandR4).getExpansion(); @@ -768,6 +806,11 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, */ FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, TermConcept.class); + /* + * DM 2019-08-21 - Processing slows after any ValueSets with many codes explicitly identified. This might + * be due to the dark arts that is memory management. Will monitor but not do anything about this right now. + */ + BooleanQuery.setMaxClauseCount(10000); StopWatch sw = new StopWatch(); AtomicInteger count = new AtomicInteger(0); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java index a072fb52251..4d74d1ed24d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java @@ -116,6 +116,7 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator { TermValueSetConceptDesignation designation = new TermValueSetConceptDesignation(); designation.setConcept(theConcept); + designation.setValueSet(myTermValueSet); designation.setLanguage(theDesignation.getLanguage()); if (isNoneBlank(theDesignation.getUseSystem(), theDesignation.getUseCode())) { designation.setUseSystem(theDesignation.getUseSystem()); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java index cd6a1b46811..c80329d070b 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java @@ -562,6 +562,40 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test { } + @Test + public void testDeleteValueSet() throws Exception { + myDaoConfig.setPreExpandValueSetsExperimental(true); + + loadAndPersistCodeSystemAndValueSetWithDesignations(); + + CodeSystem codeSystem = myCodeSystemDao.read(myExtensionalCsId); + ourLog.info("CodeSystem:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(codeSystem)); + + ValueSet valueSet = myValueSetDao.read(myExtensionalVsId); + ourLog.info("ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(valueSet)); + + myTermSvc.preExpandValueSetToTerminologyTables(); + + ValueSet expandedValueSet = myTermSvc.expandValueSet(valueSet, myDaoConfig.getPreExpandValueSetsDefaultOffsetExperimental(), myDaoConfig.getPreExpandValueSetsDefaultCountExperimental()); + ourLog.info("Expanded ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expandedValueSet)); + + Long termValueSetId = myTermValueSetDao.findByResourcePid(valueSet.getIdElement().toUnqualifiedVersionless().getIdPartAsLong()).get().getId(); + assertEquals(3, myTermValueSetConceptDesignationDao.countByTermValueSetId(termValueSetId).intValue()); + assertEquals(24, myTermValueSetConceptDao.countByTermValueSetId(termValueSetId).intValue()); + + new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() { + @Override + protected void doInTransactionWithoutResult(TransactionStatus theStatus) { + myTermValueSetConceptDesignationDao.deleteByTermValueSetId(termValueSetId); + assertEquals(0, myTermValueSetConceptDesignationDao.countByTermValueSetId(termValueSetId).intValue()); + myTermValueSetConceptDao.deleteByTermValueSetId(termValueSetId); + assertEquals(0, myTermValueSetConceptDao.countByTermValueSetId(termValueSetId).intValue()); + myTermValueSetDao.deleteByTermValueSetId(termValueSetId); + assertFalse(myTermValueSetDao.findByResourcePid(valueSet.getIdElement().toUnqualifiedVersionless().getIdPartAsLong()).isPresent()); + } + }); + } + @Test public void testDuplicateCodeSystemUrls() throws Exception { loadAndPersistCodeSystem(); @@ -895,17 +929,6 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test { verify(myValueSetCodeAccumulator, times(9)).includeConceptWithDesignations(anyString(), anyString(), nullable(String.class), anyCollection()); } - @Test - public void testValidateCode() { - createCodeSystem(); - - IValidationSupport.CodeValidationResult validation = myTermSvc.validateCode(myFhirCtx, CS_URL, "ParentWithNoChildrenA", null); - assertEquals(true, validation.isOk()); - - validation = myTermSvc.validateCode(myFhirCtx, CS_URL, "ZZZZZZZ", null); - assertEquals(false, validation.isOk()); - } - @Test public void testStoreTermCodeSystemAndChildren() throws Exception { loadAndPersistCodeSystemWithDesignations(); @@ -2579,6 +2602,17 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test { }); } + @Test + public void testValidateCode() { + createCodeSystem(); + + IValidationSupport.CodeValidationResult validation = myTermSvc.validateCode(myFhirCtx, CS_URL, "ParentWithNoChildrenA", null); + assertEquals(true, validation.isOk()); + + validation = myTermSvc.validateCode(myFhirCtx, CS_URL, "ZZZZZZZ", null); + assertEquals(false, validation.isOk()); + } + @AfterClass public static void afterClassClearContext() { TestUtil.clearAllStaticFieldsForUnitTest(); From 890555a77d3ab4f591d203174ac0b91610bef4fe Mon Sep 17 00:00:00 2001 From: Diederik Muylwyk Date: Fri, 23 Aug 2019 11:46:32 -0400 Subject: [PATCH 11/23] Incremental work on large ValueSet expansion support; transaction boundaries need to be broken up. --- .../ca/uhn/fhir/i18n/hapi-messages.properties | 35 +++-- .../fhir/jpa/dao/data/ITermValueSetDao.java | 4 +- .../ca/uhn/fhir/jpa/entity/TermValueSet.java | 8 +- .../TermValueSetExpansionStatusEnum.java | 42 ------ .../TermValueSetPreExpansionStatusEnum.java | 69 +++++++++ .../jpa/term/BaseHapiTerminologySvcImpl.java | 135 +++++++++++++++--- .../jpa/term/ValueSetConceptAccumulator.java | 8 +- .../jpa/term/TerminologySvcImplR4Test.java | 16 ++- .../tasks/HapiFhirJpaMigrationTasks.java | 8 ++ 9 files changed, 238 insertions(+), 87 deletions(-) delete mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetExpansionStatusEnum.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetPreExpansionStatusEnum.java diff --git a/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties b/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties index ed76533ec3b..a8765577f54 100644 --- a/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties +++ b/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties @@ -17,8 +17,8 @@ ca.uhn.fhir.rest.client.impl.GenericClient.noPagingLinkFoundInBundle=Can not per ca.uhn.fhir.rest.client.impl.GenericClient.noVersionIdForVread=No version specified in URL for 'vread' operation: {0} ca.uhn.fhir.rest.client.impl.GenericClient.incompleteUriForRead=The given URI is not an absolute URL and is not usable for this operation: {0} ca.uhn.fhir.rest.client.impl.GenericClient.cannotDetermineResourceTypeFromUri=Unable to determine the resource type from the given URI: {0} -ca.uhn.fhir.rest.client.impl.RestfulClientFactory.failedToRetrieveConformance=Failed to retrieve the server metadata statement during client initialization. URL used was {0} -ca.uhn.fhir.rest.client.impl.RestfulClientFactory.wrongVersionInConformance=The server at base URL "{0}" returned a conformance statement indicating that it supports FHIR version "{1}" which corresponds to {2}, but this client is configured to use {3} (via the FhirContext). +ca.uhn.fhir.rest.client.impl.RestfulClientFactory.failedToRetrieveConformance=Failed to retrieve the server metadata statement during client initialization. URL used was {0} +ca.uhn.fhir.rest.client.impl.RestfulClientFactory.wrongVersionInConformance=The server at base URL "{0}" returned a conformance statement indicating that it supports FHIR version "{1}" which corresponds to {2}, but this client is configured to use {3} (via the FhirContext). ca.uhn.fhir.rest.server.interceptor.auth.RuleImplOp.invalidRequestBundleTypeForTransaction=Invalid request Bundle.type value for transaction: {0} @@ -46,14 +46,14 @@ ca.uhn.fhir.rest.param.BaseParam.chainNotSupported=Invalid search parameter "{0} ca.uhn.fhir.rest.server.method.ResourceParameter.invalidContentTypeInRequest=Incorrect Content-Type header value of "{0}" was provided in the request. A FHIR Content-Type is required for "{1}" operation ca.uhn.fhir.rest.server.method.ResourceParameter.noContentTypeInRequest=No Content-Type header was provided in the request. This is required for "{0}" operation -ca.uhn.fhir.rest.server.method.ResourceParameter.failedToParseRequest=Failed to parse request body as {0} resource. Error was: {1} +ca.uhn.fhir.rest.server.method.ResourceParameter.failedToParseRequest=Failed to parse request body as {0} resource. Error was: {1} ca.uhn.fhir.parser.ParserState.wrongResourceTypeFound=Incorrect resource type found, expected "{0}" but found "{1}" ca.uhn.fhir.rest.server.RestfulServer.getPagesNonHttpGet=Requests for _getpages must use HTTP GET ca.uhn.fhir.rest.server.RestfulServer.unknownMethod=Invalid request: The FHIR endpoint on this server does not know how to handle {0} operation[{1}] with parameters [{2}] ca.uhn.fhir.rest.server.RestfulServer.rootRequest=This is the base URL of FHIR server. Unable to handle this request, as it does not contain a resource type or operation name. ca.uhn.fhir.rest.server.RestfulServer.rootRequest.multitenant=This is the base URL of a multitenant FHIR server. Unable to handle this request, as it does not contain a tenant ID. -ca.uhn.fhir.validation.ValidationContext.unableToDetermineEncoding=Unable to determine encoding (e.g. XML / JSON) on validation input. Is this a valid FHIR resource body? +ca.uhn.fhir.validation.ValidationContext.unableToDetermineEncoding=Unable to determine encoding (e.g. XML / JSON) on validation input. Is this a valid FHIR resource body? ca.uhn.fhir.validation.FhirValidator.noPhWarningOnStartup=Ph-schematron library not found on classpath, will not attempt to perform schematron validation ca.uhn.fhir.validation.FhirValidator.noPhError=Ph-schematron library not found on classpath, can not enable perform schematron validation ca.uhn.fhir.validation.ValidationResult.noIssuesDetected=No issues detected during validation @@ -62,22 +62,22 @@ ca.uhn.fhir.validation.ValidationResult.noIssuesDetected=No issues detected duri ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceVersionConstraintFailure=The operation has failed with a version constraint failure. This generally means that two clients/threads were trying to update the same resource at the same time, and this request was chosen as the failing request. -ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceIndexedCompositeStringUniqueConstraintFailure=The operation has failed with a unique index constraint failure. This probably means that the operation was trying to create/update a resource that would have resulted in a duplicate value for a unique index. -ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.forcedIdConstraintFailure=The operation has failed with a client-assigned ID constraint failure. This typically means that multiple client threads are trying to create a new resource with the same client-assigned ID at the same time, and this thread was chosen to be rejected. +ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceIndexedCompositeStringUniqueConstraintFailure=The operation has failed with a unique index constraint failure. This probably means that the operation was trying to create/update a resource that would have resulted in a duplicate value for a unique index. +ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.forcedIdConstraintFailure=The operation has failed with a client-assigned ID constraint failure. This typically means that multiple client threads are trying to create a new resource with the same client-assigned ID at the same time, and this thread was chosen to be rejected. ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.incomingNoopInTransaction=Transaction contains resource with operation NOOP. This is only valid as a response operation, not in a request ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.invalidMatchUrlInvalidResourceType=Invalid match URL "{0}" - Unknown resource type: "{1}" ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.invalidMatchUrlNoMatches=Invalid match URL "{0}" - No resources match this search ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.invalidMatchUrlMultipleMatches=Invalid match URL "{0}" - Multiple resources match this search ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationWithMultipleMatchFailure=Failed to {0} resource with match URL "{1}" because this search matched {2} resources -ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedNoId=Failed to {0} resource in transaction because no ID was provided -ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedUnknownId=Failed to {0} resource in transaction because no resource could be found with ID {1} +ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedNoId=Failed to {0} resource in transaction because no ID was provided +ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedUnknownId=Failed to {0} resource in transaction because no resource could be found with ID {1} ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.uniqueIndexConflictFailure=Can not create resource of type {0} as it would create a duplicate index matching query: {1} (existing index belongs to {2}) ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionContainsMultipleWithDuplicateId=Transaction bundle contains multiple resources with ID: {0} -ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionEntryHasInvalidVerb=Transaction bundle entry has missing or invalid HTTP Verb specified in Bundle.entry({1}).request.method. Found value: "{0}" -ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionMissingUrl=Unable to perform {0}, no URL provided. -ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionInvalidUrl=Unable to perform {0}, URL provided is invalid: {1} +ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionEntryHasInvalidVerb=Transaction bundle entry has missing or invalid HTTP Verb specified in Bundle.entry({1}).request.method. Found value: "{0}" +ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionMissingUrl=Unable to perform {0}, no URL provided. +ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionInvalidUrl=Unable to perform {0}, URL provided is invalid: {1} ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.cantValidateWithNoResource=No resource supplied for $validate operation (resource is required unless mode is \"delete\") ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.duplicateCreateForcedId=Can not create entity with ID[{0}], a resource with this ID already exists @@ -97,8 +97,8 @@ ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.successfulDeletes=Successfully delet ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.invalidSearchParameter=Unknown search parameter "{0}". Value search parameters for this search are: {1} ca.uhn.fhir.jpa.dao.TransactionProcessor.missingMandatoryResource=Missing required resource in Bundle.entry[{1}].resource for operation {0} -ca.uhn.fhir.jpa.dao.TransactionProcessor.missingPatchContentType=Missing or invalid content type for PATCH operation -ca.uhn.fhir.jpa.dao.TransactionProcessor.missingPatchBody=Unable to determine PATCH body from request +ca.uhn.fhir.jpa.dao.TransactionProcessor.missingPatchContentType=Missing or invalid content type for PATCH operation +ca.uhn.fhir.jpa.dao.TransactionProcessor.missingPatchBody=Unable to determine PATCH body from request ca.uhn.fhir.jpa.searchparam.extractor.BaseSearchParamExtractor.externalReferenceNotAllowed=Resource contains external reference to URL "{0}" but this server is not configured to allow external references ca.uhn.fhir.jpa.searchparam.extractor.BaseSearchParamExtractor.failedToExtractPaths=Failed to extract values from resource using FHIRPath "{0}": {1} @@ -118,8 +118,8 @@ ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor.noParam=Note that cascadi ca.uhn.fhir.jpa.provider.BaseJpaProvider.cantCombintAtAndSince=Unable to combine _at and _since parameters for history operation ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.noAttachmentDataPresent=The resource with ID {0} has no data at path: {1} -ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.unknownBlobId=Can not find the requested binary content. It may have been deleted. -ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.unknownPath=Unable to find content in resource of type {0} at path: {1} +ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.unknownBlobId=Can not find the requested binary content. It may have been deleted. +ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.unknownPath=Unable to find content in resource of type {0} at path: {1} ca.uhn.fhir.jpa.binstore.BinaryAccessProvider.unknownType=Content in resource of type {0} at path {1} is not appropriate for binary storage: {2} @@ -127,5 +127,10 @@ ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.cannotCreateDuplicateCodeSystemU ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.cannotCreateDuplicateConceptMapUrl=Can not create multiple ConceptMap resources with ConceptMap.url "{0}", already have one with resource ID: {1} ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.cannotCreateDuplicateValueSetUrl=Can not create multiple ValueSet resources with ValueSet.url "{0}", already have one with resource ID: {1} ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.expansionTooLarge=Expansion of ValueSet produced too many codes (maximum {0}) - Operation aborted! +ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.valueSetNotReadyForExpand=ValueSet is not ready for operation $expand; current status: {0} | {1} ca.uhn.fhir.jpa.util.jsonpatch.JsonPatchUtils.failedToApplyPatch=Failed to apply JSON patch to {0}: {1} + +ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum.notExpanded=The ValueSet is waiting to be picked up and pre-expanded by a scheduled task. +ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum.expansionInProgress=The ValueSet has been picked up by a scheduled task and pre-expansion is in progress. +ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum.expanded=The ValueSet has been picked up by a scheduled task and pre-expansion is complete. diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetDao.java index 037d65f98d0..86053789288 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetDao.java @@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.dao.data; */ import ca.uhn.fhir.jpa.entity.TermValueSet; -import ca.uhn.fhir.jpa.entity.TermValueSetExpansionStatusEnum; +import ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Slice; import org.springframework.data.jpa.repository.JpaRepository; @@ -44,6 +44,6 @@ public interface ITermValueSetDao extends JpaRepository { Optional findByUrl(@Param("url") String theUrl); @Query("SELECT vs FROM TermValueSet vs WHERE vs.myExpansionStatus = :expansion_status") - Slice findByExpansionStatus(Pageable pageable, @Param("expansion_status") TermValueSetExpansionStatusEnum theExpansionStatus); + Slice findByExpansionStatus(Pageable pageable, @Param("expansion_status") TermValueSetPreExpansionStatusEnum theExpansionStatus); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSet.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSet.java index 28a60a1fef8..0ba562efe79 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSet.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSet.java @@ -71,11 +71,11 @@ public class TermValueSet implements Serializable { @Enumerated(EnumType.STRING) @Column(name = "EXPANSION_STATUS", nullable = false, length = MAX_EXPANSION_STATUS_LENGTH) - private TermValueSetExpansionStatusEnum myExpansionStatus; + private TermValueSetPreExpansionStatusEnum myExpansionStatus; public TermValueSet() { super(); - myExpansionStatus = TermValueSetExpansionStatusEnum.NOT_EXPANDED; + myExpansionStatus = TermValueSetPreExpansionStatusEnum.NOT_EXPANDED; } public Long getId() { @@ -120,11 +120,11 @@ public class TermValueSet implements Serializable { return myConcepts; } - public TermValueSetExpansionStatusEnum getExpansionStatus() { + public TermValueSetPreExpansionStatusEnum getExpansionStatus() { return myExpansionStatus; } - public void setExpansionStatus(TermValueSetExpansionStatusEnum theExpansionStatus) { + public void setExpansionStatus(TermValueSetPreExpansionStatusEnum theExpansionStatus) { myExpansionStatus = theExpansionStatus; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetExpansionStatusEnum.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetExpansionStatusEnum.java deleted file mode 100644 index 33cb4301097..00000000000 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetExpansionStatusEnum.java +++ /dev/null @@ -1,42 +0,0 @@ -package ca.uhn.fhir.jpa.entity; - -/* - * #%L - * HAPI FHIR JPA Server - * %% - * Copyright (C) 2014 - 2019 University Health Network - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -/** - * This enum is used to indicate the expansion status of a given ValueSet in the terminology tables. In this context, - * an expanded ValueSet has its included concepts stored in the terminology tables as well. - */ -public enum TermValueSetExpansionStatusEnum { - - /** - * This status indicates the ValueSet is waiting to be picked up and expanded by a scheduled task. - */ - NOT_EXPANDED, - /** - * This status indicates the ValueSet has been picked up by a scheduled task and is mid-expansion. - */ - EXPANSION_IN_PROGRESS, - /** - * This status indicates the ValueSet has been picked up by a scheduled task and expansion is complete. - */ - EXPANDED - -} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetPreExpansionStatusEnum.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetPreExpansionStatusEnum.java new file mode 100644 index 00000000000..d5158cfa86c --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetPreExpansionStatusEnum.java @@ -0,0 +1,69 @@ +package ca.uhn.fhir.jpa.entity; + +/* + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2019 University Health Network + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +/** + * This enum is used to indicate the pre-expansion status of a given ValueSet in the terminology tables. In this context, + * an expanded ValueSet has its included concepts stored in the terminology tables as well. + */ +public enum TermValueSetPreExpansionStatusEnum { + /** + * Sorting agnostic. + */ + NOT_EXPANDED("notExpanded"), + EXPANSION_IN_PROGRESS("expansionInProgress"), + EXPANDED("expanded"); + + private static Map ourValues; + private String myCode; + + private TermValueSetPreExpansionStatusEnum(String theCode) { + myCode = theCode; + } + + public String getCode() { + return myCode; + } + + public static TermValueSetPreExpansionStatusEnum fromCode(String theCode) { + if (ourValues == null) { + HashMap values = new HashMap(); + for (TermValueSetPreExpansionStatusEnum next : values()) { + values.put(next.getCode(), next); + } + ourValues = Collections.unmodifiableMap(values); + } + return ourValues.get(theCode); + } + + /** + * Convert from Enum ordinal to Enum type. + * + * Usage: + * + * TermValueSetExpansionStatusEnum termValueSetExpansionStatusEnum = TermValueSetExpansionStatusEnum.values[ordinal]; + */ + public static final TermValueSetPreExpansionStatusEnum values[] = values(); +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java index bf1b6f0bf2d..db92f054233 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java @@ -498,9 +498,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, TermValueSet termValueSet = optionalTermValueSet.get(); - if (TermValueSetExpansionStatusEnum.EXPANDED != termValueSet.getExpansionStatus()) { - throw new UnprocessableEntityException("ValueSet is not ready for expansion; current status: " + termValueSet.getExpansionStatus()); - } + validatePreExpansionStatusOfValueSetOrThrowException(termValueSet.getExpansionStatus()); ValueSet.ValueSetExpansionComponent expansionComponent = new ValueSet.ValueSetExpansionComponent(); expansionComponent.setIdentifier(UUID.randomUUID().toString()); @@ -515,6 +513,20 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, return valueSet; } + private void validatePreExpansionStatusOfValueSetOrThrowException(TermValueSetPreExpansionStatusEnum thePreExpansionStatus) { + if (TermValueSetPreExpansionStatusEnum.EXPANDED != thePreExpansionStatus) { + String statusMsg = myContext.getLocalizer().getMessage( + TermValueSetPreExpansionStatusEnum.class, + thePreExpansionStatus.getCode()); + String msg = myContext.getLocalizer().getMessage( + BaseHapiTerminologySvcImpl.class, + "valueSetNotReadyForExpand", + thePreExpansionStatus.name(), + statusMsg); + throw new UnprocessableEntityException(msg); + } + } + private void populateExpansionComponent(ValueSet.ValueSetExpansionComponent theExpansionComponent, TermValueSet theTermValueSet, int theOffset, int theCount) { int total = myValueSetConceptDao.countByTermValueSetId(theTermValueSet.getId()); theExpansionComponent.setTotal(total); @@ -815,11 +827,34 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, StopWatch sw = new StopWatch(); AtomicInteger count = new AtomicInteger(0); - for (Object next : jpaQuery.getResultList()) { - count.incrementAndGet(); - TermConcept concept = (TermConcept) next; - addCodeIfNotAlreadyAdded(theValueSetCodeAccumulator, theAddedCodes, concept, theAdd, theCodeCounter); - } + int maxResultsPerBatch = 10000; + jpaQuery.setMaxResults(maxResultsPerBatch); + jpaQuery.setFirstResult(0); + + ourLog.info("Beginning batch expansion for {} with max results per batch: {}", (theAdd ? "inclusion" : "exclusion"), maxResultsPerBatch); + + do { + StopWatch swForBatch = new StopWatch(); + AtomicInteger countForBatch = new AtomicInteger(0); + + List resultList = jpaQuery.getResultList(); + int resultsInBatch = jpaQuery.getResultSize(); + int firstResult = jpaQuery.getFirstResult(); + for (Object next : resultList) { + count.incrementAndGet(); + countForBatch.incrementAndGet(); + TermConcept concept = (TermConcept) next; + addCodeIfNotAlreadyAdded(theValueSetCodeAccumulator, theAddedCodes, concept, theAdd, theCodeCounter); + } + + ourLog.info("Batch expansion for {} with starting index of {} produced {} results in {}ms", (theAdd ? "inclusion" : "exclusion"), firstResult, countForBatch, swForBatch.getMillis()); + + if (resultsInBatch < maxResultsPerBatch) { + break; + } else { + jpaQuery.setFirstResult(firstResult + maxResultsPerBatch); + } + } while (true); ourLog.info("Expansion for {} produced {} results in {}ms", (theAdd ? "inclusion" : "exclusion"), count, sw.getMillis()); @@ -1280,28 +1315,28 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, @Transactional(propagation = Propagation.NEVER) @Override public synchronized void saveDeferred() { - if (!myProcessDeferred) { + if (isProcessDeferredPaused()) { return; - } else if (myDeferredConcepts.isEmpty() && myConceptLinksToSaveLater.isEmpty()) { + } else if (isNoDeferredConceptsAndNoConceptLinksToSaveLater()) { processReindexing(); } TransactionTemplate tt = new TransactionTemplate(myTransactionMgr); tt.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); - if (!myDeferredConcepts.isEmpty() || !myConceptLinksToSaveLater.isEmpty()) { + if (isDeferredConceptsOrConceptLinksToSaveLater()) { tt.execute(t -> { processDeferredConcepts(); return null; }); } - if (myDeferredValueSets.size() > 0) { + if (isDeferredValueSets()) { tt.execute(t -> { processDeferredValueSets(); return null; }); } - if (myDeferredConceptMaps.size() > 0) { + if (isDeferredConceptMaps()) { tt.execute(t -> { processDeferredConceptMaps(); return null; @@ -1310,6 +1345,42 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, } + private boolean isProcessDeferredPaused() { + return !myProcessDeferred; + } + + private boolean isNoDeferredConceptsAndNoConceptLinksToSaveLater() { + return isNoDeferredConcepts() && isNoConceptLinksToSaveLater(); + } + + private boolean isDeferredConceptsOrConceptLinksToSaveLater() { + return isDeferredConcepts() || isConceptLinksToSaveLater(); + } + + private boolean isDeferredConcepts() { + return !myDeferredConcepts.isEmpty(); + } + + private boolean isNoDeferredConcepts() { + return myDeferredConcepts.isEmpty(); + } + + private boolean isConceptLinksToSaveLater() { + return !myConceptLinksToSaveLater.isEmpty(); + } + + private boolean isNoConceptLinksToSaveLater() { + return myConceptLinksToSaveLater.isEmpty(); + } + + private boolean isDeferredValueSets() { + return !myDeferredValueSets.isEmpty(); + } + + private boolean isDeferredConceptMaps() { + return !myDeferredConceptMaps.isEmpty(); + } + @Override public void setApplicationContext(ApplicationContext theApplicationContext) throws BeansException { myApplicationContext = theApplicationContext; @@ -1657,6 +1728,10 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, @Scheduled(fixedDelay = 60000) // FIXME: DM 2019-08-19 - Remove this! @Override public synchronized void preExpandValueSetToTerminologyTables() { + if (isNotSafeToPreExpandValueSets()) { + ourLog.info("Skipping scheduled pre-expansion of ValueSets while deferred entities are being loaded."); + return; + } new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() { @Override protected void doInTransactionWithoutResult(TransactionStatus theStatus) { @@ -1665,14 +1740,14 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, Optional optionalTermValueSet = getNextTermValueSetNotExpanded(); if (optionalTermValueSet.isPresent()) { TermValueSet termValueSet = optionalTermValueSet.get(); - termValueSet.setExpansionStatus(TermValueSetExpansionStatusEnum.EXPANSION_IN_PROGRESS); + termValueSet.setExpansionStatus(TermValueSetPreExpansionStatusEnum.EXPANSION_IN_PROGRESS); myValueSetDao.saveAndFlush(termValueSet); ValueSet valueSet = getValueSetFromResourceTable(termValueSet.getResource()); expandValueSet(valueSet, new ValueSetConceptAccumulator(termValueSet, myValueSetConceptDao, myValueSetConceptDesignationDao)); - termValueSet.setExpansionStatus(TermValueSetExpansionStatusEnum.EXPANDED); + termValueSet.setExpansionStatus(TermValueSetPreExpansionStatusEnum.EXPANDED); myValueSetDao.saveAndFlush(termValueSet); } else { hasNextTermValueSetNotExpanded = false; @@ -1682,11 +1757,39 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, }); } + private boolean isNotSafeToPreExpandValueSets() { + return !isSafeToPreExpandValueSets(); + } + + private boolean isSafeToPreExpandValueSets() { + if (isProcessDeferredPaused()) { + return false; + } + + if (isDeferredConcepts()) { + return false; + } + + if (isConceptLinksToSaveLater()) { + return false; + } + + if (isDeferredValueSets()) { + return false; + } + + if (isDeferredConceptMaps()) { + return false; + } + + return true; + } + protected abstract ValueSet getValueSetFromResourceTable(ResourceTable theResourceTable); private Optional getNextTermValueSetNotExpanded() { Optional retVal = Optional.empty(); - Slice page = myValueSetDao.findByExpansionStatus(PageRequest.of(0, 1), TermValueSetExpansionStatusEnum.NOT_EXPANDED); + Slice page = myValueSetDao.findByExpansionStatus(PageRequest.of(0, 1), TermValueSetPreExpansionStatusEnum.NOT_EXPANDED); if (!page.getContent().isEmpty()) { retVal = Optional.of(page.getContent().get(0)); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java index 4d74d1ed24d..e70c4d41f1d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java @@ -103,10 +103,10 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator { } myValueSetConceptDao.save(concept); - if (myConceptsSaved++ % 250 == 0) { +// if (myConceptsSaved++ % 2 == 0) { // FIXME: DM 2019-08-23 - Reset to 250. ourLog.info("Have pre-expanded {} concepts in ValueSet[{}]", myConceptsSaved, myTermValueSet.getUrl()); - myValueSetConceptDao.flush(); - } +// myValueSetConceptDao.flush(); +// } return concept; } @@ -128,7 +128,7 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator { designation.setValue(theDesignation.getValue()); myValueSetConceptDesignationDao.save(designation); - if (myDesignationsSaved++ % 250 == 0) { + if (myDesignationsSaved++ % 2 == 0) { // FIXME: DM 2019-08-23 - Reset to 250. ourLog.info("Have pre-expanded {} designations in ValueSet[{}]", myDesignationsSaved, myTermValueSet.getUrl()); myValueSetConceptDesignationDao.flush(); } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java index c80329d070b..22a42682d9e 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java @@ -606,6 +606,14 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test { loadAndPersistCodeSystem(); } + @Test + public void testTest() { + ourLog.info("as is: {}", TermValueSetPreExpansionStatusEnum.EXPANSION_IN_PROGRESS); + ourLog.info("toString: {}", TermValueSetPreExpansionStatusEnum.EXPANSION_IN_PROGRESS.toString()); + ourLog.info("name: {}", TermValueSetPreExpansionStatusEnum.EXPANSION_IN_PROGRESS.name()); + ourLog.info("getCode: {}", TermValueSetPreExpansionStatusEnum.EXPANSION_IN_PROGRESS.getCode()); + } + @Test public void testDuplicateConceptMapUrls() { createAndPersistConceptMap(); @@ -1296,7 +1304,7 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test { assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl()); assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName()); assertEquals(0, termValueSet.getConcepts().size()); - assertEquals(TermValueSetExpansionStatusEnum.NOT_EXPANDED, termValueSet.getExpansionStatus()); + assertEquals(TermValueSetPreExpansionStatusEnum.NOT_EXPANDED, termValueSet.getExpansionStatus()); }); myTermSvc.preExpandValueSetToTerminologyTables(); @@ -1314,7 +1322,7 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test { assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl()); assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName()); assertEquals(codeSystem.getConcept().size(), termValueSet.getConcepts().size()); - assertEquals(TermValueSetExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus()); + assertEquals(TermValueSetPreExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus()); TermValueSetConcept concept = termValueSet.getConcepts().get(0); ourLog.info("Code:\n" + concept.toString()); @@ -1394,7 +1402,7 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test { assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl()); assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName()); assertEquals(0, termValueSet.getConcepts().size()); - assertEquals(TermValueSetExpansionStatusEnum.NOT_EXPANDED, termValueSet.getExpansionStatus()); + assertEquals(TermValueSetPreExpansionStatusEnum.NOT_EXPANDED, termValueSet.getExpansionStatus()); }); myTermSvc.preExpandValueSetToTerminologyTables(); @@ -1412,7 +1420,7 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test { assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl()); assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName()); assertEquals(codeSystem.getConcept().size() - 2, termValueSet.getConcepts().size()); - assertEquals(TermValueSetExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus()); + assertEquals(TermValueSetPreExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus()); TermValueSetConcept concept = termValueSet.getConcepts().get(0); ourLog.info("Code:\n" + concept.toString()); diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java index 8dae8204771..c8e9278f13a 100644 --- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java +++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java @@ -79,6 +79,14 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { resVerProv.addIndex("IDX_RESVERPROV_SOURCEURI").unique(false).withColumns("SOURCE_URI"); resVerProv.addIndex("IDX_RESVERPROV_REQUESTID").unique(false).withColumns("REQUEST_ID"); + // TermValueSetConceptDesignation + version.startSectionWithMessage("Processing table: TRM_VALUESET_C_DESIGNATION"); + Builder.BuilderWithTableName termValueSetConceptDesignationTable = version.onTable("TRM_VALUESET_C_DESIGNATION"); + termValueSetConceptDesignationTable.addColumn("VALUESET_PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG); + termValueSetConceptDesignationTable + .addForeignKey("FK_TRM_VSCD_VS_PID") + .toColumn("VALUESET_PID") + .references("TRM_VALUESET", "PID"); } protected void init400() { From 10958a8e4d00d6f46c43c058d70e5bc9cadaf474 Mon Sep 17 00:00:00 2001 From: James Agnew Date: Fri, 23 Aug 2019 12:22:27 -0400 Subject: [PATCH 12/23] I think this might work now.. --- .../TermValueSetPreExpansionStatusEnum.java | 4 +- .../jpa/term/BaseHapiTerminologySvcImpl.java | 144 ++++++++++++------ 2 files changed, 97 insertions(+), 51 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetPreExpansionStatusEnum.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetPreExpansionStatusEnum.java index d5158cfa86c..f8a2cb673df 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetPreExpansionStatusEnum.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetPreExpansionStatusEnum.java @@ -32,9 +32,11 @@ public enum TermValueSetPreExpansionStatusEnum { /** * Sorting agnostic. */ + // FIXME: add a unit test that verifies a message exists for each code NOT_EXPANDED("notExpanded"), EXPANSION_IN_PROGRESS("expansionInProgress"), - EXPANDED("expanded"); + EXPANDED("expanded"), + FAILED_TO_EXPAND("failedToExpand"); private static Map ourValues; private String myCode; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java index db92f054233..f8ddae24886 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java @@ -618,6 +618,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, expandValueSet(theValueSetToExpand, theValueSetCodeAccumulator, new AtomicInteger(0)); } + @SuppressWarnings("ConstantConditions") private void expandValueSet(ValueSet theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator, AtomicInteger theCodeCounter) { Set addedCodes = new HashSet<>(); @@ -625,16 +626,32 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, ourLog.debug("Handling includes"); for (ValueSet.ConceptSetComponent include : theValueSetToExpand.getCompose().getInclude()) { ourLog.info("Working with " + identifyValueSetForLogging(theValueSetToExpand)); - boolean add = true; - expandValueSetHandleIncludeOrExclude(theValueSetCodeAccumulator, addedCodes, include, add, theCodeCounter); + for (int i = 0; ; i++) { + int finalI = i; + boolean shouldContinue = myTxTemplate.execute(t -> { + boolean add = true; + return expandValueSetHandleIncludeOrExclude(theValueSetCodeAccumulator, addedCodes, include, add, theCodeCounter, finalI); + }); + if (!shouldContinue) { + break; + } + } } // Handle excludes ourLog.debug("Handling excludes"); for (ValueSet.ConceptSetComponent exclude : theValueSetToExpand.getCompose().getExclude()) { ourLog.info("Working with " + identifyValueSetForLogging(theValueSetToExpand)); - boolean add = false; - expandValueSetHandleIncludeOrExclude(theValueSetCodeAccumulator, addedCodes, exclude, add, theCodeCounter); + for (int i = 0; ; i++) { + int finalI = i; + boolean shouldContinue = myTxTemplate.execute(t -> { + boolean add = false; + return expandValueSetHandleIncludeOrExclude(theValueSetCodeAccumulator, addedCodes, exclude, add, theCodeCounter, finalI); + }); + if (!shouldContinue) { + break; + } + } } } @@ -685,7 +702,11 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, return retVal; } - private void expandValueSetHandleIncludeOrExclude(IValueSetConceptAccumulator theValueSetCodeAccumulator, Set theAddedCodes, ValueSet.ConceptSetComponent theInclude, boolean theAdd, AtomicInteger theCodeCounter) { + /** + * @return Returns true if there are potentially more results to return + */ + private boolean expandValueSetHandleIncludeOrExclude(IValueSetConceptAccumulator theValueSetCodeAccumulator, Set theAddedCodes, ValueSet.ConceptSetComponent theInclude, boolean theAdd, AtomicInteger theCodeCounter, int theQueryIndex) { + String system = theInclude.getSystem(); boolean hasSystem = isNotBlank(system); boolean hasValueSet = theInclude.getValueSet().size() > 0; @@ -695,6 +716,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(system); if (cs != null) { + TermCodeSystemVersion csv = cs.getCurrentVersion(); FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager); @@ -704,7 +726,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, */ if (myFulltextSearchSvc == null) { expandWithoutHibernateSearch(theValueSetCodeAccumulator, theAddedCodes, theInclude, system, theAdd, theCodeCounter); - return; + return false; } /* @@ -829,34 +851,31 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, int maxResultsPerBatch = 10000; jpaQuery.setMaxResults(maxResultsPerBatch); - jpaQuery.setFirstResult(0); + jpaQuery.setFirstResult(theQueryIndex * maxResultsPerBatch); ourLog.info("Beginning batch expansion for {} with max results per batch: {}", (theAdd ? "inclusion" : "exclusion"), maxResultsPerBatch); - do { - StopWatch swForBatch = new StopWatch(); - AtomicInteger countForBatch = new AtomicInteger(0); + StopWatch swForBatch = new StopWatch(); + AtomicInteger countForBatch = new AtomicInteger(0); - List resultList = jpaQuery.getResultList(); - int resultsInBatch = jpaQuery.getResultSize(); - int firstResult = jpaQuery.getFirstResult(); - for (Object next : resultList) { - count.incrementAndGet(); - countForBatch.incrementAndGet(); - TermConcept concept = (TermConcept) next; - addCodeIfNotAlreadyAdded(theValueSetCodeAccumulator, theAddedCodes, concept, theAdd, theCodeCounter); - } + List resultList = jpaQuery.getResultList(); + int resultsInBatch = jpaQuery.getResultSize(); + int firstResult = jpaQuery.getFirstResult(); + for (Object next : resultList) { + count.incrementAndGet(); + countForBatch.incrementAndGet(); + TermConcept concept = (TermConcept) next; + addCodeIfNotAlreadyAdded(theValueSetCodeAccumulator, theAddedCodes, concept, theAdd, theCodeCounter); + } - ourLog.info("Batch expansion for {} with starting index of {} produced {} results in {}ms", (theAdd ? "inclusion" : "exclusion"), firstResult, countForBatch, swForBatch.getMillis()); + ourLog.info("Batch expansion for {} with starting index of {} produced {} results in {}ms", (theAdd ? "inclusion" : "exclusion"), firstResult, countForBatch, swForBatch.getMillis()); - if (resultsInBatch < maxResultsPerBatch) { - break; - } else { - jpaQuery.setFirstResult(firstResult + maxResultsPerBatch); - } - } while (true); - - ourLog.info("Expansion for {} produced {} results in {}ms", (theAdd ? "inclusion" : "exclusion"), count, sw.getMillis()); + if (resultsInBatch < maxResultsPerBatch) { + ourLog.info("Expansion for {} produced {} results in {}ms", (theAdd ? "inclusion" : "exclusion"), count, sw.getMillis()); + return false; + } else { + return true; + } } else { // No codesystem matching the URL found in the database @@ -886,8 +905,10 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, addConceptsToList(theValueSetCodeAccumulator, theAddedCodes, system, concept, theAdd); } + return false; } } else if (hasValueSet) { + for (CanonicalType nextValueSet : theInclude.getValueSet()) { ourLog.info("Starting {} expansion around ValueSet URI: {}", (theAdd ? "inclusion" : "exclusion"), nextValueSet.getValueAsString()); @@ -908,9 +929,14 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, } } + + return false; + } else { throw new InvalidRequestException("ValueSet contains " + (theAdd ? "include" : "exclude") + " criteria with no system defined"); } + + } private void expandWithoutHibernateSearch(IValueSetConceptAccumulator theValueSetCodeAccumulator, Set theAddedCodes, ValueSet.ConceptSetComponent theInclude, String theSystem, boolean theAdd, AtomicInteger theCodeCounter) { @@ -974,7 +1000,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, */ TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager); txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_MANDATORY); - return txTemplate.execute(t->{ + return txTemplate.execute(t -> { TermCodeSystemVersion csv = findCurrentCodeSystemVersionForSystem(theCodeSystem); return myConceptDao.findByCodeSystemAndCode(csv, theCode); }); @@ -1269,7 +1295,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, } else { return saveConcept(theConcept); } - + } /** @@ -1724,7 +1750,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, ourLog.info("Done storing TermConceptMap."); } -// @Scheduled(fixedDelay = 600000) // 10 minutes. + // @Scheduled(fixedDelay = 600000) // 10 minutes. @Scheduled(fixedDelay = 60000) // FIXME: DM 2019-08-19 - Remove this! @Override public synchronized void preExpandValueSetToTerminologyTables() { @@ -1732,29 +1758,47 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, ourLog.info("Skipping scheduled pre-expansion of ValueSets while deferred entities are being loaded."); return; } - new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() { - @Override - protected void doInTransactionWithoutResult(TransactionStatus theStatus) { - boolean hasNextTermValueSetNotExpanded = true; - do { - Optional optionalTermValueSet = getNextTermValueSetNotExpanded(); - if (optionalTermValueSet.isPresent()) { - TermValueSet termValueSet = optionalTermValueSet.get(); - termValueSet.setExpansionStatus(TermValueSetPreExpansionStatusEnum.EXPANSION_IN_PROGRESS); - myValueSetDao.saveAndFlush(termValueSet); + TransactionTemplate txTemplate = new TransactionTemplate(myTxManager); - ValueSet valueSet = getValueSetFromResourceTable(termValueSet.getResource()); + while (true) { - expandValueSet(valueSet, new ValueSetConceptAccumulator(termValueSet, myValueSetConceptDao, myValueSetConceptDesignationDao)); + TermValueSet valueSetToExpand = txTemplate.execute(t -> { + Optional optionalTermValueSet = getNextTermValueSetNotExpanded(); + if (optionalTermValueSet.isPresent() == false) { + return null; + } - termValueSet.setExpansionStatus(TermValueSetPreExpansionStatusEnum.EXPANDED); - myValueSetDao.saveAndFlush(termValueSet); - } else { - hasNextTermValueSetNotExpanded = false; - } - } while (hasNextTermValueSetNotExpanded); + TermValueSet termValueSet = optionalTermValueSet.get(); + termValueSet.setExpansionStatus(TermValueSetPreExpansionStatusEnum.EXPANSION_IN_PROGRESS); + return myValueSetDao.saveAndFlush(termValueSet); + }); + if (valueSetToExpand == null) { + return; } - }); + + // Ok so we have a VS to expand + try { + ValueSet valueSet = txTemplate.execute(t -> getValueSetFromResourceTable(valueSetToExpand.getResource())); + expandValueSet(valueSet, new ValueSetConceptAccumulator(valueSetToExpand, myValueSetConceptDao, myValueSetConceptDesignationDao)); + + // We're done with this guy + txTemplate.execute(t -> { + valueSetToExpand.setExpansionStatus(TermValueSetPreExpansionStatusEnum.EXPANDED); + myValueSetDao.saveAndFlush(valueSetToExpand); + return null; + }); + + } catch (Exception e) { + ourLog.error("Failed to expand valueset: " + e.getMessage(), e); + txTemplate.execute(t -> { + valueSetToExpand.setExpansionStatus(TermValueSetPreExpansionStatusEnum.FAILED_TO_EXPAND); + myValueSetDao.saveAndFlush(valueSetToExpand); + return null; + }); + } + } + + } private boolean isNotSafeToPreExpandValueSets() { From b41926307cdf699631ab2e48ca7cacc2cee093a9 Mon Sep 17 00:00:00 2001 From: Diederik Muylwyk Date: Fri, 23 Aug 2019 17:40:45 -0400 Subject: [PATCH 13/23] Completed work on large ValueSet expansion support; ready for review. --- .../ca/uhn/fhir/i18n/hapi-messages.properties | 1 + .../java/ca/uhn/fhir/jpa/dao/DaoConfig.java | 2 +- .../TermValueSetPreExpansionStatusEnum.java | 4 +- .../jpa/term/BaseHapiTerminologySvcImpl.java | 51 ++++++++++--------- .../jpa/term/ValueSetConceptAccumulator.java | 10 ++-- ...ermValueSetPreExpansionStatusEnumTest.java | 21 ++++++++ 6 files changed, 56 insertions(+), 33 deletions(-) create mode 100644 hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/entity/TermValueSetPreExpansionStatusEnumTest.java diff --git a/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties b/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties index a8765577f54..8d9373ef09b 100644 --- a/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties +++ b/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties @@ -134,3 +134,4 @@ ca.uhn.fhir.jpa.util.jsonpatch.JsonPatchUtils.failedToApplyPatch=Failed to apply ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum.notExpanded=The ValueSet is waiting to be picked up and pre-expanded by a scheduled task. ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum.expansionInProgress=The ValueSet has been picked up by a scheduled task and pre-expansion is in progress. ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum.expanded=The ValueSet has been picked up by a scheduled task and pre-expansion is complete. +ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum.failedToExpand=The ValueSet has been picked up by a scheduled task and pre-expansion has failed. diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoConfig.java index b7456bd3dd0..7a7f588a72f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoConfig.java @@ -149,7 +149,7 @@ public class DaoConfig { /** * EXPERIMENTAL - Do not use in production! Do not change default of {@code false}! */ - private boolean myPreExpandValueSetsExperimental = true; // FIXME: DM 2019-08-19 - Return to false; + private boolean myPreExpandValueSetsExperimental = false; private boolean myFilterParameterEnabled = false; private StoreMetaSourceInformation myStoreMetaSourceInformation = StoreMetaSourceInformation.SOURCE_URI_AND_REQUEST_ID; /** diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetPreExpansionStatusEnum.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetPreExpansionStatusEnum.java index f8a2cb673df..83e1e0af62e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetPreExpansionStatusEnum.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetPreExpansionStatusEnum.java @@ -32,7 +32,7 @@ public enum TermValueSetPreExpansionStatusEnum { /** * Sorting agnostic. */ - // FIXME: add a unit test that verifies a message exists for each code + NOT_EXPANDED("notExpanded"), EXPANSION_IN_PROGRESS("expansionInProgress"), EXPANDED("expanded"), @@ -41,7 +41,7 @@ public enum TermValueSetPreExpansionStatusEnum { private static Map ourValues; private String myCode; - private TermValueSetPreExpansionStatusEnum(String theCode) { + TermValueSetPreExpansionStatusEnum(String theCode) { myCode = theCode; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java index f8ddae24886..9ce1362b4fd 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java @@ -351,7 +351,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, if (optionalExistingTermConceptMapById.isPresent()) { TermConceptMap existingTermConceptMap = optionalExistingTermConceptMapById.get(); - ourLog.info("Deleting existing TermConceptMap {} and its children...", existingTermConceptMap.getId()); + ourLog.info("Deleting existing TermConceptMap[{}] and its children...", existingTermConceptMap.getId()); for (TermConceptMapGroup group : existingTermConceptMap.getConceptMapGroups()) { for (TermConceptMapGroupElement element : group.getConceptMapGroupElements()) { @@ -368,7 +368,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, } myConceptMapDao.deleteTermConceptMapById(existingTermConceptMap.getId()); - ourLog.info("Done deleting existing TermConceptMap {} and its children.", existingTermConceptMap.getId()); + ourLog.info("Done deleting existing TermConceptMap[{}] and its children.", existingTermConceptMap.getId()); ourLog.info("Flushing..."); myConceptMapGroupElementTargetDao.flush(); @@ -392,11 +392,11 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, if (optionalExistingTermValueSetById.isPresent()) { TermValueSet existingTermValueSet = optionalExistingTermValueSetById.get(); - ourLog.info("Deleting existing TermValueSet {} and its children...", existingTermValueSet.getId()); + ourLog.info("Deleting existing TermValueSet[{}] and its children...", existingTermValueSet.getId()); myValueSetConceptDesignationDao.deleteByTermValueSetId(existingTermValueSet.getId()); myValueSetConceptDao.deleteByTermValueSetId(existingTermValueSet.getId()); myValueSetDao.deleteByTermValueSetId(existingTermValueSet.getId()); - ourLog.info("Done deleting existing TermValueSet {} and its children.", existingTermValueSet.getId()); + ourLog.info("Done deleting existing TermValueSet[{}] and its children.", existingTermValueSet.getId()); ourLog.info("Flushing..."); myValueSetConceptDesignationDao.flush(); @@ -622,13 +622,16 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, private void expandValueSet(ValueSet theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator, AtomicInteger theCodeCounter) { Set addedCodes = new HashSet<>(); + StopWatch sw = new StopWatch(); + String valueSetInfo = getValueSetInfo(theValueSetToExpand); + ourLog.info("Working with {}", valueSetInfo); + // Handle includes ourLog.debug("Handling includes"); for (ValueSet.ConceptSetComponent include : theValueSetToExpand.getCompose().getInclude()) { - ourLog.info("Working with " + identifyValueSetForLogging(theValueSetToExpand)); for (int i = 0; ; i++) { int finalI = i; - boolean shouldContinue = myTxTemplate.execute(t -> { + Boolean shouldContinue = myTxTemplate.execute(t -> { boolean add = true; return expandValueSetHandleIncludeOrExclude(theValueSetCodeAccumulator, addedCodes, include, add, theCodeCounter, finalI); }); @@ -641,10 +644,9 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, // Handle excludes ourLog.debug("Handling excludes"); for (ValueSet.ConceptSetComponent exclude : theValueSetToExpand.getCompose().getExclude()) { - ourLog.info("Working with " + identifyValueSetForLogging(theValueSetToExpand)); for (int i = 0; ; i++) { int finalI = i; - boolean shouldContinue = myTxTemplate.execute(t -> { + Boolean shouldContinue = myTxTemplate.execute(t -> { boolean add = false; return expandValueSetHandleIncludeOrExclude(theValueSetCodeAccumulator, addedCodes, exclude, add, theCodeCounter, finalI); }); @@ -653,9 +655,11 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, } } } + + ourLog.info("Done working with {} in {}ms", valueSetInfo, sw.getMillis()); } - private String identifyValueSetForLogging(ValueSet theValueSet) { + private String getValueSetInfo(ValueSet theValueSet) { StringBuilder sb = new StringBuilder(); boolean isIdentified = false; sb @@ -703,16 +707,16 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, } /** - * @return Returns true if there are potentially more results to return + * @return Returns true if there are potentially more results to process. */ - private boolean expandValueSetHandleIncludeOrExclude(IValueSetConceptAccumulator theValueSetCodeAccumulator, Set theAddedCodes, ValueSet.ConceptSetComponent theInclude, boolean theAdd, AtomicInteger theCodeCounter, int theQueryIndex) { + private Boolean expandValueSetHandleIncludeOrExclude(IValueSetConceptAccumulator theValueSetCodeAccumulator, Set theAddedCodes, ValueSet.ConceptSetComponent theInclude, boolean theAdd, AtomicInteger theCodeCounter, int theQueryIndex) { String system = theInclude.getSystem(); boolean hasSystem = isNotBlank(system); boolean hasValueSet = theInclude.getValueSet().size() > 0; if (hasSystem) { - ourLog.info("Starting {} expansion around code system: {}", (theAdd ? "inclusion" : "exclusion"), system); + ourLog.info("Starting {} expansion around CodeSystem: {}", (theAdd ? "inclusion" : "exclusion"), system); TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(system); if (cs != null) { @@ -859,7 +863,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, AtomicInteger countForBatch = new AtomicInteger(0); List resultList = jpaQuery.getResultList(); - int resultsInBatch = jpaQuery.getResultSize(); + int resultsInBatch = resultList.size(); int firstResult = jpaQuery.getFirstResult(); for (Object next : resultList) { count.incrementAndGet(); @@ -910,7 +914,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, } else if (hasValueSet) { for (CanonicalType nextValueSet : theInclude.getValueSet()) { - ourLog.info("Starting {} expansion around ValueSet URI: {}", (theAdd ? "inclusion" : "exclusion"), nextValueSet.getValueAsString()); + ourLog.info("Starting {} expansion around ValueSet: {}", (theAdd ? "inclusion" : "exclusion"), nextValueSet.getValueAsString()); List expanded = expandValueSet(nextValueSet.getValueAsString()); for (VersionIndependentConcept nextConcept : expanded) { @@ -1750,8 +1754,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, ourLog.info("Done storing TermConceptMap."); } - // @Scheduled(fixedDelay = 600000) // 10 minutes. - @Scheduled(fixedDelay = 60000) // FIXME: DM 2019-08-19 - Remove this! + @Scheduled(fixedDelay = 600000) // 10 minutes. @Override public synchronized void preExpandValueSetToTerminologyTables() { if (isNotSafeToPreExpandValueSets()) { @@ -1761,10 +1764,9 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, TransactionTemplate txTemplate = new TransactionTemplate(myTxManager); while (true) { - TermValueSet valueSetToExpand = txTemplate.execute(t -> { Optional optionalTermValueSet = getNextTermValueSetNotExpanded(); - if (optionalTermValueSet.isPresent() == false) { + if (!optionalTermValueSet.isPresent()) { return null; } @@ -1776,12 +1778,15 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, return; } - // Ok so we have a VS to expand + // We have a ValueSet to pre-expand. try { - ValueSet valueSet = txTemplate.execute(t -> getValueSetFromResourceTable(valueSetToExpand.getResource())); + ValueSet valueSet = txTemplate.execute(t -> { + TermValueSet refreshedValueSetToExpand = myValueSetDao.findById(valueSetToExpand.getId()).get(); + return getValueSetFromResourceTable(refreshedValueSetToExpand.getResource()); + }); expandValueSet(valueSet, new ValueSetConceptAccumulator(valueSetToExpand, myValueSetConceptDao, myValueSetConceptDesignationDao)); - // We're done with this guy + // We are done with this ValueSet. txTemplate.execute(t -> { valueSetToExpand.setExpansionStatus(TermValueSetPreExpansionStatusEnum.EXPANDED); myValueSetDao.saveAndFlush(valueSetToExpand); @@ -1789,7 +1794,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, }); } catch (Exception e) { - ourLog.error("Failed to expand valueset: " + e.getMessage(), e); + ourLog.error("Failed to pre-expand ValueSet: " + e.getMessage(), e); txTemplate.execute(t -> { valueSetToExpand.setExpansionStatus(TermValueSetPreExpansionStatusEnum.FAILED_TO_EXPAND); myValueSetDao.saveAndFlush(valueSetToExpand); @@ -1797,8 +1802,6 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, }); } } - - } private boolean isNotSafeToPreExpandValueSets() { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java index e70c4d41f1d..9c3f6a0a3f3 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java @@ -103,10 +103,9 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator { } myValueSetConceptDao.save(concept); -// if (myConceptsSaved++ % 2 == 0) { // FIXME: DM 2019-08-23 - Reset to 250. + if (myConceptsSaved++ % 250 == 0) { // TODO: DM 2019-08-23 - This message never appears in the log. Fix it! ourLog.info("Have pre-expanded {} concepts in ValueSet[{}]", myConceptsSaved, myTermValueSet.getUrl()); -// myValueSetConceptDao.flush(); -// } + } return concept; } @@ -128,9 +127,8 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator { designation.setValue(theDesignation.getValue()); myValueSetConceptDesignationDao.save(designation); - if (myDesignationsSaved++ % 2 == 0) { // FIXME: DM 2019-08-23 - Reset to 250. - ourLog.info("Have pre-expanded {} designations in ValueSet[{}]", myDesignationsSaved, myTermValueSet.getUrl()); - myValueSetConceptDesignationDao.flush(); + if (myDesignationsSaved++ % 250 == 0) { // TODO: DM 2019-08-23 - This message never appears in the log. Fix it! + ourLog.info("Have pre-expanded {} designations for Concept[{}|{}] in ValueSet[{}]", myDesignationsSaved, theConcept.getSystem(), theConcept.getCode(), myTermValueSet.getUrl()); } return designation; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/entity/TermValueSetPreExpansionStatusEnumTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/entity/TermValueSetPreExpansionStatusEnumTest.java new file mode 100644 index 00000000000..a9b8f365ca4 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/entity/TermValueSetPreExpansionStatusEnumTest.java @@ -0,0 +1,21 @@ +package ca.uhn.fhir.jpa.entity; + +import ca.uhn.fhir.i18n.HapiLocalizer; +import org.junit.Test; + +import static org.junit.Assert.fail; + +public class TermValueSetPreExpansionStatusEnumTest { + @Test + public void testHaveDescriptions() { + HapiLocalizer localizer = new HapiLocalizer(); + + for (TermValueSetPreExpansionStatusEnum next : TermValueSetPreExpansionStatusEnum.values()) { + String key = "ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum." + next.getCode(); + String msg = localizer.getMessage(key); + if (msg.equals(HapiLocalizer.UNKNOWN_I18N_KEY_MESSAGE)) { + fail("No value for key: " + key); + } + } + } +} From 3f14a7a68be14203bdb95374894b5f2be818461d Mon Sep 17 00:00:00 2001 From: Diederik Muylwyk Date: Mon, 26 Aug 2019 09:54:05 -0400 Subject: [PATCH 14/23] Added changelog entry for switch from Derby to H2 for HAPI FHIR CLI server. --- src/changes/changes.xml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index fc505a69c21..f91b8a758aa 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -73,6 +73,11 @@ The informational message returned in an OperationOutcome when a delete failed due to cascades not being enabled contained an incorrect example. This has been corrected. + + The HAPI FHIR CLI server now uses H2 as its database platform instead of Derby. + Note that this means that data in any existing installations will need to be + re-uploaded to the new database platform. + From 073d2a45a4b40223aeae3177eb7807dc299f226d Mon Sep 17 00:00:00 2001 From: Diederik Muylwyk Date: Mon, 26 Aug 2019 14:26:38 -0400 Subject: [PATCH 15/23] Multiaxial hierarchy support is implemented; we not populated parent and child properties. --- .../jpa/term/loinc/LoincHierarchyHandler.java | 13 +++ .../term/TerminologyLoaderSvcLoincTest.java | 108 +++++++++++++++++- 2 files changed, 120 insertions(+), 1 deletion(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincHierarchyHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincHierarchyHandler.java index 0ece9206e5c..b5474f283eb 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincHierarchyHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincHierarchyHandler.java @@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.term.loinc; import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion; import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink; +import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc; import ca.uhn.fhir.jpa.term.IRecordHandler; import org.apache.commons.csv.CSVRecord; @@ -52,6 +53,18 @@ public class LoincHierarchyHandler implements IRecordHandler { TermConcept child = getOrCreate(childCode, childCodeText); parent.addChild(child, TermConceptParentChildLink.RelationshipTypeEnum.ISA); + + parent.addPropertyCoding( + "child", + IHapiTerminologyLoaderSvc.LOINC_URI, + child.getCode(), + child.getDisplay()); + + child.addPropertyCoding( + "parent", + IHapiTerminologyLoaderSvc.LOINC_URI, + parent.getCode(), + parent.getDisplay()); } } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcLoincTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcLoincTest.java index ecd67f4a724..d2d3dba5d71 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcLoincTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcLoincTest.java @@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.term; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.entity.TermConcept; +import ca.uhn.fhir.jpa.entity.TermConceptProperty; import ca.uhn.fhir.jpa.term.loinc.*; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; @@ -19,6 +20,8 @@ import org.mockito.Captor; import org.mockito.Mock; import java.io.IOException; +import java.util.ArrayList; +import java.util.List; import java.util.Map; import static org.hamcrest.Matchers.*; @@ -336,7 +339,6 @@ public class TerminologyLoaderSvcLoincTest extends BaseLoaderTest { } } - public static void addLoincMandatoryFilesToZip(ZipCollectionBuilder theFiles) throws IOException { theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_UPLOAD_PROPERTIES_FILE); theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_GROUP_FILE); @@ -358,6 +360,110 @@ public class TerminologyLoaderSvcLoincTest extends BaseLoaderTest { theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE); } + @Test + public void testLoadLoincMultiaxialHierarchySupport() throws Exception { + addLoincMandatoryFilesToZip(myFiles); + + // Actually do the load + mySvc.loadLoinc(myFiles.getFiles(), mySrd); + + verify(myTermSvc, times(1)).storeNewCodeSystemVersion(mySystemCaptor.capture(), myCsvCaptor.capture(), any(RequestDetails.class), myValueSetsCaptor.capture(), myConceptMapCaptor.capture()); + Map concepts = extractConcepts(); + Map valueSets = extractValueSets(); + Map conceptMaps = extractConceptMaps(); + + ConceptMap conceptMap; + TermConcept code; + ValueSet vs; + ConceptMap.ConceptMapGroupComponent group; + + // Normal loinc code + code = concepts.get("10013-1"); + assertEquals("10013-1", code.getCode()); + assertEquals(IHapiTerminologyLoaderSvc.LOINC_URI, code.getCodingProperties("PROPERTY").get(0).getSystem()); + assertEquals("LP6802-5", code.getCodingProperties("PROPERTY").get(0).getCode()); + assertEquals("Elpot", code.getCodingProperties("PROPERTY").get(0).getDisplay()); + assertEquals("EKG.MEAS", code.getStringProperty("CLASS")); + assertEquals("R' wave amplitude in lead I", code.getDisplay()); + + // Codes with parent and child properties + code = concepts.get("LP31755-9"); + assertEquals("LP31755-9", code.getCode()); + List properties = new ArrayList<>(code.getProperties()); + assertEquals(1, properties.size()); + assertEquals("child", properties.get(0).getKey()); + assertEquals(IHapiTerminologyLoaderSvc.LOINC_URI, properties.get(0).getCodeSystem()); + assertEquals("LP14559-6", properties.get(0).getValue()); + assertEquals("Microorganism", properties.get(0).getDisplay()); + assertEquals(0, code.getParents().size()); + assertEquals(1, code.getChildren().size()); + + TermConcept childCode = code.getChildren().get(0).getChild(); + assertEquals("LP14559-6", childCode.getCode()); + assertEquals("Microorganism", childCode.getDisplay()); + + properties = new ArrayList<>(childCode.getProperties()); + assertEquals(2, properties.size()); + assertEquals("parent", properties.get(0).getKey()); + assertEquals(IHapiTerminologyLoaderSvc.LOINC_URI, properties.get(0).getCodeSystem()); + assertEquals(code.getCode(), properties.get(0).getValue()); + assertEquals(code.getDisplay(), properties.get(0).getDisplay()); + assertEquals("child", properties.get(1).getKey()); + assertEquals(IHapiTerminologyLoaderSvc.LOINC_URI, properties.get(1).getCodeSystem()); + assertEquals("LP98185-9", properties.get(1).getValue()); + assertEquals("Bacteria", properties.get(1).getDisplay()); + assertEquals(1, childCode.getParents().size()); + assertEquals(1, childCode.getChildren().size()); + assertEquals(code.getCode(), new ArrayList<>(childCode.getParents()).get(0).getParent().getCode()); + + TermConcept nestedChildCode = childCode.getChildren().get(0).getChild(); + assertEquals("LP98185-9", nestedChildCode.getCode()); + assertEquals("Bacteria", nestedChildCode.getDisplay()); + + properties = new ArrayList<>(nestedChildCode.getProperties()); + assertEquals(2, properties.size()); + assertEquals("parent", properties.get(0).getKey()); + assertEquals(IHapiTerminologyLoaderSvc.LOINC_URI, properties.get(0).getCodeSystem()); + assertEquals(childCode.getCode(), properties.get(0).getValue()); + assertEquals(childCode.getDisplay(), properties.get(0).getDisplay()); + assertEquals("child", properties.get(1).getKey()); + assertEquals(IHapiTerminologyLoaderSvc.LOINC_URI, properties.get(1).getCodeSystem()); + assertEquals("LP14082-9", properties.get(1).getValue()); + assertEquals("Bacteria", properties.get(1).getDisplay()); + assertEquals(1, nestedChildCode.getParents().size()); + assertEquals(1, nestedChildCode.getChildren().size()); + assertEquals(childCode.getCode(), new ArrayList<>(nestedChildCode.getParents()).get(0).getParent().getCode()); + + TermConcept doublyNestedChildCode = nestedChildCode.getChildren().get(0).getChild(); + assertEquals("LP14082-9", doublyNestedChildCode.getCode()); + assertEquals("Bacteria", doublyNestedChildCode.getDisplay()); + + properties = new ArrayList<>(doublyNestedChildCode.getProperties()); + assertEquals(4, properties.size()); + assertEquals("parent", properties.get(0).getKey()); + assertEquals(IHapiTerminologyLoaderSvc.LOINC_URI, properties.get(0).getCodeSystem()); + assertEquals(nestedChildCode.getCode(), properties.get(0).getValue()); + assertEquals(nestedChildCode.getDisplay(), properties.get(0).getDisplay()); + assertEquals("child", properties.get(1).getKey()); + assertEquals(IHapiTerminologyLoaderSvc.LOINC_URI, properties.get(1).getCodeSystem()); + assertEquals("LP52258-8", properties.get(1).getValue()); + assertEquals("Bacteria | Body Fluid", properties.get(1).getDisplay()); + assertEquals("child", properties.get(2).getKey()); + assertEquals(IHapiTerminologyLoaderSvc.LOINC_URI, properties.get(2).getCodeSystem()); + assertEquals("LP52260-4", properties.get(2).getValue()); + assertEquals("Bacteria | Cerebral spinal fluid", properties.get(2).getDisplay()); + assertEquals("child", properties.get(3).getKey()); + assertEquals(IHapiTerminologyLoaderSvc.LOINC_URI, properties.get(3).getCodeSystem()); + assertEquals("LP52960-9", properties.get(3).getValue()); + assertEquals("Bacteria | Cervix", properties.get(3).getDisplay()); + assertEquals(1, doublyNestedChildCode.getParents().size()); + assertEquals(3, doublyNestedChildCode.getChildren().size()); + assertEquals(nestedChildCode.getCode(), new ArrayList<>(doublyNestedChildCode.getParents()).get(0).getParent().getCode()); + assertEquals("LP52258-8", doublyNestedChildCode.getChildren().get(0).getChild().getCode()); + assertEquals("LP52260-4", doublyNestedChildCode.getChildren().get(1).getChild().getCode()); + assertEquals("LP52960-9", doublyNestedChildCode.getChildren().get(2).getChild().getCode()); + } + @AfterClass public static void afterClassClearContext() { TestUtil.clearAllStaticFieldsForUnitTest(); From b01d59d5a0ce40395da8c8241a0d2b1c25e66de1 Mon Sep 17 00:00:00 2001 From: Diederik Muylwyk Date: Mon, 26 Aug 2019 14:47:50 -0400 Subject: [PATCH 16/23] Added changelog entry. --- src/changes/changes.xml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index f91b8a758aa..00136dd85d2 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -78,6 +78,10 @@ Note that this means that data in any existing installations will need to be re-uploaded to the new database platform. + + LOINC concepts now include multiaxial hierarchical properties (e.g. parent]]> and + child]]>, which identify parent and child concepts. + From 0ee15874ca773e4afe36f011e4c481dcd8fc0b98 Mon Sep 17 00:00:00 2001 From: Diederik Muylwyk Date: Mon, 26 Aug 2019 16:42:37 -0400 Subject: [PATCH 17/23] loinc-all ValueSet is now created automatically when loading LOINC terminology. --- .../jpa/term/TerminologyLoaderSvcImpl.java | 20 +++++++++++++++++++ .../term/TerminologyLoaderSvcLoincTest.java | 16 +++++++++++++++ .../jpa/term/TerminologySvcImplR4Test.java | 9 +-------- src/changes/changes.xml | 5 +++++ 4 files changed, 42 insertions(+), 8 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcImpl.java index 5c3ec5d20a8..af8110b43bb 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcImpl.java @@ -32,6 +32,7 @@ import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.CodeSystem; import org.hl7.fhir.r4.model.ConceptMap; +import org.hl7.fhir.r4.model.Enumerations; import org.hl7.fhir.r4.model.ValueSet; import org.springframework.beans.factory.annotation.Autowired; @@ -556,6 +557,8 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc { IOUtils.closeQuietly(theDescriptors); + valueSets.add(getValueSetLoincAll()); + for (Entry next : code2concept.entrySet()) { TermConcept nextConcept = next.getValue(); if (nextConcept.getParents().isEmpty()) { @@ -573,6 +576,23 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc { return new UploadStatistics(conceptCount, target); } + private ValueSet getValueSetLoincAll() { + ValueSet retVal = new ValueSet(); + + retVal.setId("loinc-all"); + retVal.setUrl("http://loinc.org/fhir/ValueSet/loinc-all"); + retVal.setVersion("1.0.0"); + retVal.setName("All LOINC codes"); + retVal.setStatus(Enumerations.PublicationStatus.ACTIVE); + retVal.setDate(new Date()); + retVal.setPublisher("Regenstrief Institute, Inc."); + retVal.setDescription("A value set that includes all LOINC codes"); + retVal.setCopyright("This content from LOINC® is copyright © 1995 Regenstrief Institute, Inc. and the LOINC Committee, and available at no cost under the license at https://loinc.org/license/"); + retVal.getCompose().addInclude().setSystem(IHapiTerminologyLoaderSvc.LOINC_URI); + + return retVal; + } + private UploadStatistics processSnomedCtFiles(LoadedFileDescriptors theDescriptors, RequestDetails theRequestDetails) { final TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion(); final Map id2concept = new HashMap<>(); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcLoincTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcLoincTest.java index d2d3dba5d71..60fced8db94 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcLoincTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcLoincTest.java @@ -255,6 +255,22 @@ public class TerminologyLoaderSvcLoincTest extends BaseLoaderTest { assertEquals("42176-8", vs.getCompose().getInclude().get(0).getConcept().get(0).getCode()); assertEquals("1,3 beta glucan [Mass/volume] in Serum", vs.getCompose().getInclude().get(0).getConcept().get(0).getDisplay()); + // All LOINC codes + assertTrue(valueSets.containsKey("loinc-all")); + vs = valueSets.get("loinc-all"); + assertEquals("http://loinc.org/fhir/ValueSet/loinc-all", vs.getUrl()); + assertEquals("1.0.0", vs.getVersion()); + assertEquals("All LOINC codes", vs.getName()); + assertEquals(Enumerations.PublicationStatus.ACTIVE, vs.getStatus()); + assertTrue(vs.hasDate()); + assertEquals("Regenstrief Institute, Inc.", vs.getPublisher()); + assertEquals("A value set that includes all LOINC codes", vs.getDescription()); + assertEquals("This content from LOINC® is copyright © 1995 Regenstrief Institute, Inc. and the LOINC Committee, and available at no cost under the license at https://loinc.org/license/", vs.getCopyright()); + assertTrue(vs.hasCompose()); + assertTrue(vs.getCompose().hasInclude()); + assertEquals(1, vs.getCompose().getInclude().size()); + assertEquals(IHapiTerminologyLoaderSvc.LOINC_URI, vs.getCompose().getInclude().get(0).getSystem()); + // IEEE Medical Device Codes conceptMap = conceptMaps.get(LoincIeeeMedicalDeviceCodeHandler.LOINC_IEEE_CM_ID); ourLog.debug(FhirContext.forR4().newXmlParser().setPrettyPrint(true).encodeResourceToString(conceptMap)); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java index 22a42682d9e..f87145ce9f1 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java @@ -29,6 +29,7 @@ import org.springframework.transaction.support.TransactionTemplate; import javax.annotation.Nonnull; import java.io.IOException; +import java.util.Date; import java.util.List; import java.util.Optional; import java.util.concurrent.atomic.AtomicInteger; @@ -606,14 +607,6 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test { loadAndPersistCodeSystem(); } - @Test - public void testTest() { - ourLog.info("as is: {}", TermValueSetPreExpansionStatusEnum.EXPANSION_IN_PROGRESS); - ourLog.info("toString: {}", TermValueSetPreExpansionStatusEnum.EXPANSION_IN_PROGRESS.toString()); - ourLog.info("name: {}", TermValueSetPreExpansionStatusEnum.EXPANSION_IN_PROGRESS.name()); - ourLog.info("getCode: {}", TermValueSetPreExpansionStatusEnum.EXPANSION_IN_PROGRESS.getCode()); - } - @Test public void testDuplicateConceptMapUrls() { createAndPersistConceptMap(); diff --git a/src/changes/changes.xml b/src/changes/changes.xml index 00136dd85d2..cabe1a4dd78 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -82,6 +82,11 @@ LOINC concepts now include multiaxial hierarchical properties (e.g. parent]]> and child]]>, which identify parent and child concepts. + + When loading LOINC terminology, a new ValueSet is automatically created with a single include element that + identifies the LOINC CodeSystem in ValueSet.compose.include.system]]>. This ValueSet + includes all LOINC codes. + From 07652540246d6e85036645f79972490c09b4082a Mon Sep 17 00:00:00 2001 From: Diederik Muylwyk Date: Mon, 26 Aug 2019 18:32:21 -0400 Subject: [PATCH 18/23] Making LOINC filenames configurable; initial commit. --- .../jpa/term/TerminologyLoaderSvcImpl.java | 4 +-- .../fhir/jpa/term/loinc/BaseLoincHandler.java | 2 +- .../term/loinc/LoincAnswerListHandler.java | 2 +- .../resources/loinc/loincupload.properties | 32 +++++++++++++++++-- 4 files changed, 33 insertions(+), 7 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcImpl.java index af8110b43bb..ca95641e4aa 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcImpl.java @@ -472,13 +472,13 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc { Properties uploadProperties = new Properties(); for (FileDescriptor next : theDescriptors.getUncompressedFileDescriptors()) { - if (next.getFilename().endsWith("loincupload.properties")) { + if (next.getFilename().endsWith(LOINC_UPLOAD_PROPERTIES_FILE)) { try { try (InputStream inputStream = next.getInputStream()) { uploadProperties.load(inputStream); } } catch (IOException e) { - throw new InternalErrorException("Failed to read loincupload.properties", e); + throw new InternalErrorException("Failed to read " + LOINC_UPLOAD_PROPERTIES_FILE, e); } } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/BaseLoincHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/BaseLoincHandler.java index 57ca69fd2d8..fcd788c35b6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/BaseLoincHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/BaseLoincHandler.java @@ -114,7 +114,7 @@ public abstract class BaseLoincHandler implements IRecordHandler { conceptMap.setId(theMapping.getConceptMapId()); conceptMap.setUrl(theMapping.getConceptMapUri()); conceptMap.setName(theMapping.getConceptMapName()); - conceptMap.setVersion(myUploadProperties.getProperty("conceptmap.version")); + conceptMap.setVersion(myUploadProperties.getProperty("loinc.conceptmap.version")); conceptMap.setPublisher(REGENSTRIEF_INSTITUTE_INC); conceptMap.addContact() .setName(REGENSTRIEF_INSTITUTE_INC) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincAnswerListHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincAnswerListHandler.java index b8e7dc20166..204609eb332 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincAnswerListHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincAnswerListHandler.java @@ -72,7 +72,7 @@ public class LoincAnswerListHandler extends BaseLoincHandler { } // Answer list ValueSet - ValueSet vs = getValueSet(answerListId, "http://loinc.org/vs/" + answerListId, answerListName, "answerlist.version"); + ValueSet vs = getValueSet(answerListId, "http://loinc.org/vs/" + answerListId, answerListName, "loinc.answerlist.version"); if (vs.getIdentifier().isEmpty()) { vs.addIdentifier() .setSystem("urn:ietf:rfc:3986") diff --git a/hapi-fhir-jpaserver-base/src/test/resources/loinc/loincupload.properties b/hapi-fhir-jpaserver-base/src/test/resources/loinc/loincupload.properties index 7a7aab2ab36..b442c7264bc 100644 --- a/hapi-fhir-jpaserver-base/src/test/resources/loinc/loincupload.properties +++ b/hapi-fhir-jpaserver-base/src/test/resources/loinc/loincupload.properties @@ -1,7 +1,33 @@ - +loinc.answerlist.file=AnswerList.csv +loinc.answerlist.link.file=LoincAnswerListLink.csv # This is the version identifier for the AnswerList file -answerlist.version=Beta.1 +loinc.answerlist.version=Beta.1 # This is the version identifier for uploaded ConceptMap resources -conceptmap.version=Beta.1 +loinc.conceptmap.version=Beta.1 +loinc.document.ontology.file=DocumentOntology.csv + +loinc.file=LoincTable/Loinc.csv + +loinc.group.file=Group.csv +loinc.group.terms.file=GroupLoincTerms.csv + +loinc.hierarchy.file=MultiAxialHierarchy.csv + +loinc.ieee.medical.device.code.mapping.table.csv=LoincIeeeMedicalDeviceCodeMappingTable.csv + +loinc.imaging.document.codes.file=ImagingDocumentCodes.csv + +loinc.parent.group.file=ParentGroup.csv + +loinc.part.file=Part.csv +loinc.part.link.file=LoincPartLink.csv +loinc.part.related.code.mapping.file=PartRelatedCodeMapping.csv + +loinc.rsna.playbook.file=LoincRsnaRadiologyPlaybook.csv + +loinc.top2000.common.lab.results.si.file=Top2000CommonLabResultsSi.csv +loinc.top2000.common.lab.results.us.file=Top2000CommonLabResultsUs.csv + +loinc.universal.lab.order.valueset.file=LoincUniversalLabOrdersValueSet.csv From f4789247b2acb512ddd854b769e31cabb6e267a4 Mon Sep 17 00:00:00 2001 From: Diederik Muylwyk Date: Mon, 26 Aug 2019 18:46:57 -0400 Subject: [PATCH 19/23] Making LOINC filenames configurable; fix IEEE medical device code property key. --- .../src/test/resources/loinc/loincupload.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hapi-fhir-jpaserver-base/src/test/resources/loinc/loincupload.properties b/hapi-fhir-jpaserver-base/src/test/resources/loinc/loincupload.properties index b442c7264bc..3ee26623b47 100644 --- a/hapi-fhir-jpaserver-base/src/test/resources/loinc/loincupload.properties +++ b/hapi-fhir-jpaserver-base/src/test/resources/loinc/loincupload.properties @@ -15,7 +15,7 @@ loinc.group.terms.file=GroupLoincTerms.csv loinc.hierarchy.file=MultiAxialHierarchy.csv -loinc.ieee.medical.device.code.mapping.table.csv=LoincIeeeMedicalDeviceCodeMappingTable.csv +loinc.ieee.medical.device.code.mapping.table.file=LoincIeeeMedicalDeviceCodeMappingTable.csv loinc.imaging.document.codes.file=ImagingDocumentCodes.csv From 70d0a8aa8f2a3021af9a14ef576f95e8519321c0 Mon Sep 17 00:00:00 2001 From: James Agnew Date: Tue, 27 Aug 2019 05:42:15 -0400 Subject: [PATCH 20/23] Get patch working (#1449) --- ...ansactionProcessorVersionAdapterDstu3.java | 21 ++++++++ ...temProviderTransactionSearchDstu3Test.java | 52 +++++++++++++++++++ src/changes/changes.xml | 6 +++ 3 files changed, 79 insertions(+) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/TransactionProcessorVersionAdapterDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/TransactionProcessorVersionAdapterDstu3.java index 3398660898e..413f6e262ce 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/TransactionProcessorVersionAdapterDstu3.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/TransactionProcessorVersionAdapterDstu3.java @@ -21,19 +21,24 @@ package ca.uhn.fhir.jpa.dao.dstu3; */ import ca.uhn.fhir.jpa.dao.TransactionProcessor; +import ca.uhn.fhir.rest.api.PatchTypeEnum; import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import org.hl7.fhir.dstu3.model.Bundle; import org.hl7.fhir.dstu3.model.OperationOutcome; import org.hl7.fhir.dstu3.model.Resource; import org.hl7.fhir.dstu3.model.codesystems.IssueType; import org.hl7.fhir.exceptions.FHIRException; +import org.hl7.fhir.instance.model.api.IBaseBinary; import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; import org.hl7.fhir.instance.model.api.IBaseResource; import java.util.Date; import java.util.List; +import static org.apache.commons.lang3.StringUtils.isBlank; + public class TransactionProcessorVersionAdapterDstu3 implements TransactionProcessor.ITransactionProcessorVersionAdapter { @Override public void setResponseStatus(Bundle.BundleEntryComponent theBundleEntry, String theStatus) { @@ -106,6 +111,22 @@ public class TransactionProcessorVersionAdapterDstu3 implements TransactionProce if (value != null) { retVal = value.toCode(); } + + /* + * This is a workaround for the fact that PATCH isn't a valid constant for + * DSTU3 Bundle.entry.request.method (it was added in R4) + */ + if (isBlank(retVal)) { + if (theEntry.getResource() instanceof IBaseBinary) { + String contentType = ((IBaseBinary) theEntry.getResource()).getContentType(); + try { + PatchTypeEnum.forContentTypeOrThrowInvalidRequestException(contentType); + retVal = "PATCH"; + } catch (InvalidRequestException e) { + // ignore + } + } + } return retVal; } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/SystemProviderTransactionSearchDstu3Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/SystemProviderTransactionSearchDstu3Test.java index 2e6d360908b..2b84ec5ed5c 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/SystemProviderTransactionSearchDstu3Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/SystemProviderTransactionSearchDstu3Test.java @@ -4,11 +4,19 @@ import static org.hamcrest.Matchers.*; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.concurrent.TimeUnit; +import ca.uhn.fhir.rest.api.Constants; +import com.google.common.base.Charsets; +import org.apache.commons.io.IOUtils; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; @@ -180,6 +188,50 @@ public class SystemProviderTransactionSearchDstu3Test extends BaseJpaDstu3Test { assertThat(actualIds, contains(ids.subList(5, 10).toArray(new String[0]))); } + + @Test + public void testPatchUsingJsonPatch_Transaction() throws Exception { + String methodName = "testPatchUsingJsonPatch_Transaction"; + IIdType pid1; + { + Patient patient = new Patient(); + patient.setActive(true); + patient.addIdentifier().setSystem("urn:system").setValue("0"); + patient.addName().setFamily(methodName).addGiven("Joe"); + pid1 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless(); + } + + String patchString = "[ { \"op\":\"replace\", \"path\":\"/active\", \"value\":false } ]"; + Binary patch = new Binary(); + patch.setContentType(ca.uhn.fhir.rest.api.Constants.CT_JSON_PATCH); + patch.setContent(patchString.getBytes(Charsets.UTF_8)); + + // Note that we don't set the type + Bundle input = new Bundle(); + input.setType(Bundle.BundleType.TRANSACTION); + input.addEntry() + .setFullUrl(pid1.getValue()) + .setResource(patch) + .getRequest().setUrl(pid1.getValue()); + + HttpPost post = new HttpPost(ourServerBase); + String encodedRequest = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input); + ourLog.info("Requet:\n{}", encodedRequest); + post.setEntity(new StringEntity(encodedRequest, ContentType.parse(ca.uhn.fhir.rest.api.Constants.CT_FHIR_JSON_NEW+ Constants.CHARSET_UTF8_CTSUFFIX))); + try (CloseableHttpResponse response = ourHttpClient.execute(post)) { + String responseString = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8); + ourLog.info(responseString); + assertEquals(200, response.getStatusLine().getStatusCode()); + assertThat(responseString, containsString("\"resourceType\":\"Bundle\"")); + } + + Patient newPt = ourClient.read().resource(Patient.class).withId(pid1.getIdPart()).execute(); + assertEquals("2", newPt.getIdElement().getVersionIdPart()); + assertEquals(false, newPt.getActive()); + } + + + @Test public void testTransactionWithGetHardLimitLargeSynchronous() { List ids = create20Patients(); diff --git a/src/changes/changes.xml b/src/changes/changes.xml index cabe1a4dd78..64645a6b469 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -73,6 +73,12 @@ The informational message returned in an OperationOutcome when a delete failed due to cascades not being enabled contained an incorrect example. This has been corrected. + + It is now possible to submit a PATCH request as a part of a FHIR transaction in DSTU3 (previously this + was only supported in R4+). This is not officially part of the DSTU3 spec, but it can now be performed by + leaving the Bundle.entry.request.method blank in DSTU3 transactions and setting the request payload + as a Binary resource containing a valid patch. + The HAPI FHIR CLI server now uses H2 as its database platform instead of Derby. Note that this means that data in any existing installations will need to be From 1adfda44d95bc4ad9bd0e1f77c6725df3e6d5f85 Mon Sep 17 00:00:00 2001 From: Diederik Muylwyk Date: Tue, 27 Aug 2019 11:19:01 -0400 Subject: [PATCH 21/23] Making LOINC filenames configurable; ready for review. --- .../jpa/term/TerminologyLoaderSvcImpl.java | 187 +++++++++--------- .../fhir/jpa/term/loinc/BaseLoincHandler.java | 3 +- .../term/loinc/LoincAnswerListHandler.java | 6 +- .../term/loinc/LoincUploadPropertiesEnum.java | 142 +++++++++++++ .../TerminologyUploaderProviderDstu3Test.java | 39 ++-- .../term/TerminologyLoaderSvcLoincTest.java | 41 ++-- .../resources/loinc/loincupload.properties | 106 +++++++--- 7 files changed, 365 insertions(+), 159 deletions(-) create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincUploadPropertiesEnum.java diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcImpl.java index ca95641e4aa..a36e326381b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcImpl.java @@ -34,6 +34,7 @@ import org.hl7.fhir.r4.model.CodeSystem; import org.hl7.fhir.r4.model.ConceptMap; import org.hl7.fhir.r4.model.Enumerations; import org.hl7.fhir.r4.model.ValueSet; +import org.jetbrains.annotations.NotNull; import org.springframework.beans.factory.annotation.Autowired; import java.io.*; @@ -42,6 +43,7 @@ import java.util.Map.Entry; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.*; import static org.apache.commons.lang3.StringUtils.isNotBlank; /* @@ -68,26 +70,10 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc { public static final String SCT_FILE_CONCEPT = "Terminology/sct2_Concept_Full_"; public static final String SCT_FILE_DESCRIPTION = "Terminology/sct2_Description_Full-en"; public static final String SCT_FILE_RELATIONSHIP = "Terminology/sct2_Relationship_Full"; + public static final String IMGTHLA_HLA_NOM_TXT = "hla_nom.txt"; public static final String IMGTHLA_HLA_XML = "hla.xml"; - public static final String LOINC_ANSWERLIST_FILE = "AnswerList.csv"; - public static final String LOINC_ANSWERLIST_LINK_FILE = "LoincAnswerListLink.csv"; - public static final String LOINC_DOCUMENT_ONTOLOGY_FILE = "DocumentOntology.csv"; - public static final String LOINC_UPLOAD_PROPERTIES_FILE = "loincupload.properties"; - public static final String LOINC_FILE = "LoincTable/Loinc.csv"; - public static final String LOINC_HIERARCHY_FILE = "MultiAxialHierarchy.csv"; - public static final String LOINC_PART_FILE = "Part.csv"; - public static final String LOINC_PART_LINK_FILE = "LoincPartLink.csv"; - public static final String LOINC_PART_RELATED_CODE_MAPPING_FILE = "PartRelatedCodeMapping.csv"; - public static final String LOINC_RSNA_PLAYBOOK_FILE = "LoincRsnaRadiologyPlaybook.csv"; - public static final String LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE = "Top2000CommonLabResultsUs.csv"; - public static final String LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE = "Top2000CommonLabResultsSi.csv"; - public static final String LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE = "LoincUniversalLabOrdersValueSet.csv"; - public static final String LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV = "LoincIeeeMedicalDeviceCodeMappingTable.csv"; - public static final String LOINC_IMAGING_DOCUMENT_CODES_FILE = "ImagingDocumentCodes.csv"; - public static final String LOINC_GROUP_FILE = "Group.csv"; - public static final String LOINC_GROUP_TERMS_FILE = "GroupLoincTerms.csv"; - public static final String LOINC_PARENT_GROUP_FILE = "ParentGroup.csv"; + public static final String CUSTOM_CONCEPTS_FILE = "concepts.csv"; public static final String CUSTOM_HIERARCHY_FILE = "hierarchy.csv"; public static final String CUSTOM_CODESYSTEM_JSON = "codesystem.json"; @@ -220,35 +206,61 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc { @Override public UploadStatistics loadLoinc(List theFiles, RequestDetails theRequestDetails) { try (LoadedFileDescriptors descriptors = new LoadedFileDescriptors(theFiles)) { + List loincUploadPropertiesFragment = Arrays.asList( + LOINC_UPLOAD_PROPERTIES_FILE.getCode() + ); + descriptors.verifyMandatoryFilesExist(loincUploadPropertiesFragment); + + Properties uploadProperties = getProperties(descriptors, LOINC_UPLOAD_PROPERTIES_FILE.getCode()); + List mandatoryFilenameFragments = Arrays.asList( - LOINC_FILE, - LOINC_HIERARCHY_FILE, - LOINC_UPLOAD_PROPERTIES_FILE, - LOINC_ANSWERLIST_FILE, - LOINC_ANSWERLIST_LINK_FILE, - LOINC_PART_FILE, - LOINC_PART_LINK_FILE, - LOINC_PART_RELATED_CODE_MAPPING_FILE, - LOINC_DOCUMENT_ONTOLOGY_FILE, - LOINC_RSNA_PLAYBOOK_FILE, - LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE, - LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE, - LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE, - LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV, - LOINC_IMAGING_DOCUMENT_CODES_FILE + uploadProperties.getProperty(LOINC_ANSWERLIST_FILE.getCode(), LOINC_ANSWERLIST_FILE_DEFAULT.getCode()), + uploadProperties.getProperty(LOINC_ANSWERLIST_LINK_FILE.getCode(), LOINC_ANSWERLIST_LINK_FILE_DEFAULT.getCode()), + uploadProperties.getProperty(LOINC_DOCUMENT_ONTOLOGY_FILE.getCode(), LOINC_DOCUMENT_ONTOLOGY_FILE_DEFAULT.getCode()), + uploadProperties.getProperty(LOINC_FILE.getCode(), LOINC_FILE_DEFAULT.getCode()), + uploadProperties.getProperty(LOINC_HIERARCHY_FILE.getCode(), LOINC_HIERARCHY_FILE_DEFAULT.getCode()), + uploadProperties.getProperty(LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE.getCode(), LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE_DEFAULT.getCode()), + uploadProperties.getProperty(LOINC_IMAGING_DOCUMENT_CODES_FILE.getCode(), LOINC_IMAGING_DOCUMENT_CODES_FILE_DEFAULT.getCode()), + uploadProperties.getProperty(LOINC_PART_FILE.getCode(), LOINC_PART_FILE_DEFAULT.getCode()), + uploadProperties.getProperty(LOINC_PART_LINK_FILE.getCode(), LOINC_PART_LINK_FILE_DEFAULT.getCode()), + uploadProperties.getProperty(LOINC_PART_RELATED_CODE_MAPPING_FILE.getCode(), LOINC_PART_RELATED_CODE_MAPPING_FILE_DEFAULT.getCode()), + uploadProperties.getProperty(LOINC_RSNA_PLAYBOOK_FILE.getCode(), LOINC_RSNA_PLAYBOOK_FILE_DEFAULT.getCode()), + uploadProperties.getProperty(LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE.getCode(), LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE_DEFAULT.getCode()), + uploadProperties.getProperty(LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE.getCode(), LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE_DEFAULT.getCode()), + uploadProperties.getProperty(LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE.getCode(), LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE_DEFAULT.getCode()) ); descriptors.verifyMandatoryFilesExist(mandatoryFilenameFragments); List optionalFilenameFragments = Arrays.asList( + uploadProperties.getProperty(LOINC_GROUP_FILE.getCode(), LOINC_GROUP_FILE_DEFAULT.getCode()), + uploadProperties.getProperty(LOINC_GROUP_TERMS_FILE.getCode(), LOINC_GROUP_TERMS_FILE_DEFAULT.getCode()), + uploadProperties.getProperty(LOINC_PARENT_GROUP_FILE.getCode(), LOINC_PARENT_GROUP_FILE_DEFAULT.getCode()) ); descriptors.verifyOptionalFilesExist(optionalFilenameFragments); ourLog.info("Beginning LOINC processing"); - return processLoincFiles(descriptors, theRequestDetails); + return processLoincFiles(descriptors, theRequestDetails, uploadProperties); } } + @NotNull + private Properties getProperties(LoadedFileDescriptors theDescriptors, String thePropertiesFile) { + Properties retVal = new Properties(); + for (FileDescriptor next : theDescriptors.getUncompressedFileDescriptors()) { + if (next.getFilename().endsWith(thePropertiesFile)) { + try { + try (InputStream inputStream = next.getInputStream()) { + retVal.load(inputStream); + } + } catch (IOException e) { + throw new InternalErrorException("Failed to read " + thePropertiesFile, e); + } + } + } + return retVal; + } + @Override public UploadStatistics loadSnomedCt(List theFiles, RequestDetails theRequestDetails) { try (LoadedFileDescriptors descriptors = new LoadedFileDescriptors(theFiles)) { @@ -445,7 +457,7 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc { // return new UploadStatistics(conceptCount, target); } - UploadStatistics processLoincFiles(LoadedFileDescriptors theDescriptors, RequestDetails theRequestDetails) { + UploadStatistics processLoincFiles(LoadedFileDescriptors theDescriptors, RequestDetails theRequestDetails, Properties theUploadProperties) { final TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion(); final Map code2concept = new HashMap<>(); final List valueSets = new ArrayList<>(); @@ -470,90 +482,77 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc { IRecordHandler handler; - Properties uploadProperties = new Properties(); - for (FileDescriptor next : theDescriptors.getUncompressedFileDescriptors()) { - if (next.getFilename().endsWith(LOINC_UPLOAD_PROPERTIES_FILE)) { - try { - try (InputStream inputStream = next.getInputStream()) { - uploadProperties.load(inputStream); - } - } catch (IOException e) { - throw new InternalErrorException("Failed to read " + LOINC_UPLOAD_PROPERTIES_FILE, e); - } - } - } - - // Part file + // Part handler = new LoincPartHandler(codeSystemVersion, code2concept); - iterateOverZipFile(theDescriptors, LOINC_PART_FILE, handler, ',', QuoteMode.NON_NUMERIC, false); + iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_PART_FILE.getCode(), LOINC_PART_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); Map partTypeAndPartNameToPartNumber = ((LoincPartHandler) handler).getPartTypeAndPartNameToPartNumber(); - // Loinc Codes + // LOINC codes handler = new LoincHandler(codeSystemVersion, code2concept, propertyNamesToTypes, partTypeAndPartNameToPartNumber); - iterateOverZipFile(theDescriptors, LOINC_FILE, handler, ',', QuoteMode.NON_NUMERIC, false); + iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_FILE.getCode(), LOINC_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); - // Loinc Hierarchy + // LOINC hierarchy handler = new LoincHierarchyHandler(codeSystemVersion, code2concept); - iterateOverZipFile(theDescriptors, LOINC_HIERARCHY_FILE, handler, ',', QuoteMode.NON_NUMERIC, false); + iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_HIERARCHY_FILE.getCode(), LOINC_HIERARCHY_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); - // Answer lists (ValueSets of potential answers/values for loinc "questions") - handler = new LoincAnswerListHandler(codeSystemVersion, code2concept, valueSets, conceptMaps, uploadProperties); - iterateOverZipFile(theDescriptors, LOINC_ANSWERLIST_FILE, handler, ',', QuoteMode.NON_NUMERIC, false); + // Answer lists (ValueSets of potential answers/values for LOINC "questions") + handler = new LoincAnswerListHandler(codeSystemVersion, code2concept, valueSets, conceptMaps, theUploadProperties); + iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_ANSWERLIST_FILE.getCode(), LOINC_ANSWERLIST_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); - // Answer list links (connects loinc observation codes to answerlist codes) + // Answer list links (connects LOINC observation codes to answer list codes) handler = new LoincAnswerListLinkHandler(code2concept, valueSets); - iterateOverZipFile(theDescriptors, LOINC_ANSWERLIST_LINK_FILE, handler, ',', QuoteMode.NON_NUMERIC, false); + iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_ANSWERLIST_LINK_FILE.getCode(), LOINC_ANSWERLIST_LINK_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); - // RSNA Playbook file + // RSNA playbook // Note that this should come before the "Part Related Code Mapping" // file because there are some duplicate mappings between these // two files, and the RSNA Playbook file has more metadata - handler = new LoincRsnaPlaybookHandler(code2concept, valueSets, conceptMaps, uploadProperties); - iterateOverZipFile(theDescriptors, LOINC_RSNA_PLAYBOOK_FILE, handler, ',', QuoteMode.NON_NUMERIC, false); + handler = new LoincRsnaPlaybookHandler(code2concept, valueSets, conceptMaps, theUploadProperties); + iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_RSNA_PLAYBOOK_FILE.getCode(), LOINC_RSNA_PLAYBOOK_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); - // Part link file + // Part link handler = new LoincPartLinkHandler(codeSystemVersion, code2concept); - iterateOverZipFile(theDescriptors, LOINC_PART_LINK_FILE, handler, ',', QuoteMode.NON_NUMERIC, false); + iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_PART_LINK_FILE.getCode(), LOINC_PART_LINK_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); // Part related code mapping - handler = new LoincPartRelatedCodeMappingHandler(code2concept, valueSets, conceptMaps, uploadProperties); - iterateOverZipFile(theDescriptors, LOINC_PART_RELATED_CODE_MAPPING_FILE, handler, ',', QuoteMode.NON_NUMERIC, false); + handler = new LoincPartRelatedCodeMappingHandler(code2concept, valueSets, conceptMaps, theUploadProperties); + iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_PART_RELATED_CODE_MAPPING_FILE.getCode(), LOINC_PART_RELATED_CODE_MAPPING_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); - // Document Ontology File - handler = new LoincDocumentOntologyHandler(code2concept, propertyNamesToTypes, valueSets, conceptMaps, uploadProperties); - iterateOverZipFile(theDescriptors, LOINC_DOCUMENT_ONTOLOGY_FILE, handler, ',', QuoteMode.NON_NUMERIC, false); + // Document ontology + handler = new LoincDocumentOntologyHandler(code2concept, propertyNamesToTypes, valueSets, conceptMaps, theUploadProperties); + iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_DOCUMENT_ONTOLOGY_FILE.getCode(), LOINC_DOCUMENT_ONTOLOGY_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); - // Top 2000 Codes - US - handler = new LoincTop2000LabResultsUsHandler(code2concept, valueSets, conceptMaps, uploadProperties); - iterateOverZipFile(theDescriptors, LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE, handler, ',', QuoteMode.NON_NUMERIC, false); + // Top 2000 codes - US + handler = new LoincTop2000LabResultsUsHandler(code2concept, valueSets, conceptMaps, theUploadProperties); + iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE.getCode(), LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); - // Top 2000 Codes - SI - handler = new LoincTop2000LabResultsSiHandler(code2concept, valueSets, conceptMaps, uploadProperties); - iterateOverZipFile(theDescriptors, LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE, handler, ',', QuoteMode.NON_NUMERIC, false); + // Top 2000 codes - SI + handler = new LoincTop2000LabResultsSiHandler(code2concept, valueSets, conceptMaps, theUploadProperties); + iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE.getCode(), LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); - // Universal Lab Order ValueSet - handler = new LoincUniversalOrderSetHandler(code2concept, valueSets, conceptMaps, uploadProperties); - iterateOverZipFile(theDescriptors, LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE, handler, ',', QuoteMode.NON_NUMERIC, false); + // Universal lab order ValueSet + handler = new LoincUniversalOrderSetHandler(code2concept, valueSets, conceptMaps, theUploadProperties); + iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE.getCode(), LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); - // IEEE Medical Device Codes - handler = new LoincIeeeMedicalDeviceCodeHandler(code2concept, valueSets, conceptMaps, uploadProperties); - iterateOverZipFile(theDescriptors, LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV, handler, ',', QuoteMode.NON_NUMERIC, false); + // IEEE medical device codes + handler = new LoincIeeeMedicalDeviceCodeHandler(code2concept, valueSets, conceptMaps, theUploadProperties); + iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE.getCode(), LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); - // Imaging Document Codes - handler = new LoincImagingDocumentCodeHandler(code2concept, valueSets, conceptMaps, uploadProperties); - iterateOverZipFile(theDescriptors, LOINC_IMAGING_DOCUMENT_CODES_FILE, handler, ',', QuoteMode.NON_NUMERIC, false); + // Imaging document codes + handler = new LoincImagingDocumentCodeHandler(code2concept, valueSets, conceptMaps, theUploadProperties); + iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_IMAGING_DOCUMENT_CODES_FILE.getCode(), LOINC_IMAGING_DOCUMENT_CODES_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); - // Group File - handler = new LoincGroupFileHandler(code2concept, valueSets, conceptMaps, uploadProperties); - iterateOverZipFile(theDescriptors, LOINC_GROUP_FILE, handler, ',', QuoteMode.NON_NUMERIC, false); + // Group + handler = new LoincGroupFileHandler(code2concept, valueSets, conceptMaps, theUploadProperties); + iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_GROUP_FILE.getCode(), LOINC_GROUP_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); - // Group Terms File - handler = new LoincGroupTermsFileHandler(code2concept, valueSets, conceptMaps, uploadProperties); - iterateOverZipFile(theDescriptors, LOINC_GROUP_TERMS_FILE, handler, ',', QuoteMode.NON_NUMERIC, false); + // Group terms + handler = new LoincGroupTermsFileHandler(code2concept, valueSets, conceptMaps, theUploadProperties); + iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_GROUP_TERMS_FILE.getCode(), LOINC_GROUP_TERMS_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); - // Parent Group File - handler = new LoincParentGroupFileHandler(code2concept, valueSets, conceptMaps, uploadProperties); - iterateOverZipFile(theDescriptors, LOINC_PARENT_GROUP_FILE, handler, ',', QuoteMode.NON_NUMERIC, false); + // Parent group + handler = new LoincParentGroupFileHandler(code2concept, valueSets, conceptMaps, theUploadProperties); + iterateOverZipFile(theDescriptors, theUploadProperties.getProperty(LOINC_PARENT_GROUP_FILE.getCode(), LOINC_PARENT_GROUP_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); IOUtils.closeQuietly(theDescriptors); @@ -752,7 +751,7 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc { private void verifyOptionalFilesExist(List theExpectedFilenameFragments) { List notFound = notFound(theExpectedFilenameFragments); if (!notFound.isEmpty()) { - ourLog.warn("Could not find the following optional file: " + notFound); + ourLog.warn("Could not find the following optional files: " + notFound); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/BaseLoincHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/BaseLoincHandler.java index fcd788c35b6..a2e2b7a43c6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/BaseLoincHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/BaseLoincHandler.java @@ -34,6 +34,7 @@ import java.util.List; import java.util.Map; import java.util.Properties; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.*; import static org.apache.commons.lang3.StringUtils.*; public abstract class BaseLoincHandler implements IRecordHandler { @@ -114,7 +115,7 @@ public abstract class BaseLoincHandler implements IRecordHandler { conceptMap.setId(theMapping.getConceptMapId()); conceptMap.setUrl(theMapping.getConceptMapUri()); conceptMap.setName(theMapping.getConceptMapName()); - conceptMap.setVersion(myUploadProperties.getProperty("loinc.conceptmap.version")); + conceptMap.setVersion(myUploadProperties.getProperty(LOINC_CONCEPTMAP_VERSION.getCode())); conceptMap.setPublisher(REGENSTRIEF_INSTITUTE_INC); conceptMap.addContact() .setName(REGENSTRIEF_INSTITUTE_INC) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincAnswerListHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincAnswerListHandler.java index 204609eb332..5e6700da2cd 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincAnswerListHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincAnswerListHandler.java @@ -31,7 +31,9 @@ import java.util.List; import java.util.Map; import java.util.Properties; -import static org.apache.commons.lang3.StringUtils.*; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.*; +import static org.apache.commons.lang3.StringUtils.isNotBlank; +import static org.apache.commons.lang3.StringUtils.trim; public class LoincAnswerListHandler extends BaseLoincHandler { @@ -72,7 +74,7 @@ public class LoincAnswerListHandler extends BaseLoincHandler { } // Answer list ValueSet - ValueSet vs = getValueSet(answerListId, "http://loinc.org/vs/" + answerListId, answerListName, "loinc.answerlist.version"); + ValueSet vs = getValueSet(answerListId, "http://loinc.org/vs/" + answerListId, answerListName, LOINC_ANSWERLIST_VERSION.getCode()); if (vs.getIdentifier().isEmpty()) { vs.addIdentifier() .setSystem("urn:ietf:rfc:3986") diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincUploadPropertiesEnum.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincUploadPropertiesEnum.java new file mode 100644 index 00000000000..aa2656cdea3 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincUploadPropertiesEnum.java @@ -0,0 +1,142 @@ +package ca.uhn.fhir.jpa.term.loinc; + +/* + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2019 University Health Network + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +/** + * This enum is used to facilitate configurable filenames when uploading LOINC. + */ +public enum LoincUploadPropertiesEnum { + /** + * Sorting agnostic. + */ + + LOINC_UPLOAD_PROPERTIES_FILE("loincupload.properties"), + + /* + * MANDATORY + */ + // Answer lists (ValueSets of potential answers/values for LOINC "questions") + LOINC_ANSWERLIST_FILE("loinc.answerlist.file"), + LOINC_ANSWERLIST_FILE_DEFAULT("AnswerList.csv"), + // Answer list links (connects LOINC observation codes to answer list codes) + LOINC_ANSWERLIST_LINK_FILE("loinc.answerlist.link.file"), + LOINC_ANSWERLIST_LINK_FILE_DEFAULT("LoincAnswerListLink.csv"), + + // Document ontology + LOINC_DOCUMENT_ONTOLOGY_FILE("loinc.document.ontology.file"), + LOINC_DOCUMENT_ONTOLOGY_FILE_DEFAULT("DocumentOntology.csv"), + + // LOINC codes + LOINC_FILE("loinc.file"), + LOINC_FILE_DEFAULT("LoincTable/Loinc.csv"), + + // LOINC hierarchy + LOINC_HIERARCHY_FILE("loinc.hierarchy.file"), + LOINC_HIERARCHY_FILE_DEFAULT("MultiAxialHierarchy.csv"), + + // IEEE medical device codes + LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE("loinc.ieee.medical.device.code.mapping.table.file"), + LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE_DEFAULT("LoincIeeeMedicalDeviceCodeMappingTable.csv"), + + // Imaging document codes + LOINC_IMAGING_DOCUMENT_CODES_FILE("loinc.imaging.document.codes.file"), + LOINC_IMAGING_DOCUMENT_CODES_FILE_DEFAULT("ImagingDocumentCodes.csv"), + + // Part + LOINC_PART_FILE("loinc.part.file"), + LOINC_PART_FILE_DEFAULT("Part.csv"), + // Part link + LOINC_PART_LINK_FILE("loinc.part.link.file"), + LOINC_PART_LINK_FILE_DEFAULT("LoincPartLink.csv"), + // Part related code mapping + LOINC_PART_RELATED_CODE_MAPPING_FILE("loinc.part.related.code.mapping.file"), + LOINC_PART_RELATED_CODE_MAPPING_FILE_DEFAULT("PartRelatedCodeMapping.csv"), + + // RSNA playbook + LOINC_RSNA_PLAYBOOK_FILE("loinc.rsna.playbook.file"), + LOINC_RSNA_PLAYBOOK_FILE_DEFAULT("LoincRsnaRadiologyPlaybook.csv"), + + // Top 2000 codes - SI + LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE("loinc.top2000.common.lab.results.si.file"), + LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE_DEFAULT("Top2000CommonLabResultsSi.csv"), + // Top 2000 codes - US + LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE("loinc.top2000.common.lab.results.us.file"), + LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE_DEFAULT("Top2000CommonLabResultsUs.csv"), + + // Universal lab order ValueSet + LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE("loinc.universal.lab.order.valueset.file"), + LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE_DEFAULT("LoincUniversalLabOrdersValueSet.csv"), + + /* + * OPTIONAL + */ + // This is the version identifier for the answer list file + LOINC_ANSWERLIST_VERSION("loinc.answerlist.version"), + + // This is the version identifier for uploaded ConceptMap resources + LOINC_CONCEPTMAP_VERSION("loinc.conceptmap.version"), + + // Group + LOINC_GROUP_FILE("loinc.group.file"), + LOINC_GROUP_FILE_DEFAULT("Group.csv"), + // Group terms + LOINC_GROUP_TERMS_FILE("loinc.group.terms.file"), + LOINC_GROUP_TERMS_FILE_DEFAULT("GroupLoincTerms.csv"), + + // Parent group + LOINC_PARENT_GROUP_FILE("loinc.parent.group.file"), + LOINC_PARENT_GROUP_FILE_DEFAULT("ParentGroup.csv"); + + private static Map ourValues; + private String myCode; + + LoincUploadPropertiesEnum(String theCode) { + myCode = theCode; + } + + public String getCode() { + return myCode; + } + + public static LoincUploadPropertiesEnum fromCode(String theCode) { + if (ourValues == null) { + HashMap values = new HashMap(); + for (LoincUploadPropertiesEnum next : values()) { + values.put(next.getCode(), next); + } + ourValues = Collections.unmodifiableMap(values); + } + return ourValues.get(theCode); + } + + /** + * Convert from Enum ordinal to Enum type. + * + * Usage: + * + * LoincUploadPropertiesEnum loincUploadPropertiesEnum = LoincUploadPropertiesEnum.values[ordinal]; + */ + public static final LoincUploadPropertiesEnum values[] = values(); +} diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/TerminologyUploaderProviderDstu3Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/TerminologyUploaderProviderDstu3Test.java index b792a29aa06..007e9f63b22 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/TerminologyUploaderProviderDstu3Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/TerminologyUploaderProviderDstu3Test.java @@ -1,8 +1,6 @@ package ca.uhn.fhir.jpa.provider.dstu3; import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc; -import ca.uhn.fhir.jpa.term.TerminologyLoaderSvcImpl; -import ca.uhn.fhir.jpa.term.ZipCollectionBuilder; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import ca.uhn.fhir.util.TestUtil; @@ -20,6 +18,7 @@ import java.util.List; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.*; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.greaterThan; import static org.junit.Assert.*; @@ -37,24 +36,24 @@ public class TerminologyUploaderProviderDstu3Test extends BaseResourceProviderDs ByteArrayOutputStream bos = new ByteArrayOutputStream(); ZipOutputStream zos = new ZipOutputStream(bos); - addFile(zos, "loincupload.properties"); - addFile(zos, TerminologyLoaderSvcImpl.LOINC_PART_FILE); - addFile(zos, TerminologyLoaderSvcImpl.LOINC_FILE); - addFile(zos, TerminologyLoaderSvcImpl.LOINC_HIERARCHY_FILE); - addFile(zos, TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_FILE); - addFile(zos, TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_LINK_FILE); - addFile(zos, TerminologyLoaderSvcImpl.LOINC_GROUP_FILE); - addFile(zos, TerminologyLoaderSvcImpl.LOINC_GROUP_TERMS_FILE); - addFile(zos, TerminologyLoaderSvcImpl.LOINC_PARENT_GROUP_FILE); - addFile(zos, TerminologyLoaderSvcImpl.LOINC_PART_LINK_FILE); - addFile(zos, TerminologyLoaderSvcImpl.LOINC_PART_RELATED_CODE_MAPPING_FILE); - addFile(zos, TerminologyLoaderSvcImpl.LOINC_DOCUMENT_ONTOLOGY_FILE); - addFile(zos, TerminologyLoaderSvcImpl.LOINC_RSNA_PLAYBOOK_FILE); - addFile(zos, TerminologyLoaderSvcImpl.LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE); - addFile(zos, TerminologyLoaderSvcImpl.LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV); - addFile(zos, TerminologyLoaderSvcImpl.LOINC_IMAGING_DOCUMENT_CODES_FILE); - addFile(zos, TerminologyLoaderSvcImpl.LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE); - addFile(zos, TerminologyLoaderSvcImpl.LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE); + addFile(zos, LOINC_UPLOAD_PROPERTIES_FILE.getCode()); + addFile(zos, LOINC_PART_FILE_DEFAULT.getCode()); + addFile(zos, LOINC_FILE_DEFAULT.getCode()); + addFile(zos, LOINC_HIERARCHY_FILE_DEFAULT.getCode()); + addFile(zos, LOINC_ANSWERLIST_FILE_DEFAULT.getCode()); + addFile(zos, LOINC_ANSWERLIST_LINK_FILE_DEFAULT.getCode()); + addFile(zos, LOINC_GROUP_FILE_DEFAULT.getCode()); + addFile(zos, LOINC_GROUP_TERMS_FILE_DEFAULT.getCode()); + addFile(zos, LOINC_PARENT_GROUP_FILE_DEFAULT.getCode()); + addFile(zos, LOINC_PART_LINK_FILE_DEFAULT.getCode()); + addFile(zos, LOINC_PART_RELATED_CODE_MAPPING_FILE_DEFAULT.getCode()); + addFile(zos, LOINC_DOCUMENT_ONTOLOGY_FILE_DEFAULT.getCode()); + addFile(zos, LOINC_RSNA_PLAYBOOK_FILE_DEFAULT.getCode()); + addFile(zos, LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE_DEFAULT.getCode()); + addFile(zos, LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE_DEFAULT.getCode()); + addFile(zos, LOINC_IMAGING_DOCUMENT_CODES_FILE_DEFAULT.getCode()); + addFile(zos, LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE_DEFAULT.getCode()); + addFile(zos, LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE_DEFAULT.getCode()); zos.close(); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcLoincTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcLoincTest.java index 60fced8db94..c49dfed2fd9 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcLoincTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcLoincTest.java @@ -24,6 +24,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.*; import static org.hamcrest.Matchers.*; import static org.junit.Assert.*; import static org.mockito.ArgumentMatchers.any; @@ -341,8 +342,8 @@ public class TerminologyLoaderSvcLoincTest extends BaseLoaderTest { @Test public void testLoadLoincMissingMandatoryFiles() throws IOException { - myFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_UPLOAD_PROPERTIES_FILE); - myFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_GROUP_FILE); + myFiles.addFileZip("/loinc/", LOINC_UPLOAD_PROPERTIES_FILE.getCode()); + myFiles.addFileZip("/loinc/", LOINC_GROUP_FILE_DEFAULT.getCode()); // Actually do the load try { @@ -356,24 +357,24 @@ public class TerminologyLoaderSvcLoincTest extends BaseLoaderTest { } public static void addLoincMandatoryFilesToZip(ZipCollectionBuilder theFiles) throws IOException { - theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_UPLOAD_PROPERTIES_FILE); - theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_GROUP_FILE); - theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_GROUP_TERMS_FILE); - theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_PARENT_GROUP_FILE); - theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_FILE, TerminologyLoaderSvcImpl.LOINC_FILE); - theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_HIERARCHY_FILE, TerminologyLoaderSvcImpl.LOINC_HIERARCHY_FILE); - theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_FILE, TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_FILE); - theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_LINK_FILE, TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_LINK_FILE); - theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_PART_FILE, TerminologyLoaderSvcImpl.LOINC_PART_FILE); - theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_PART_LINK_FILE, TerminologyLoaderSvcImpl.LOINC_PART_LINK_FILE); - theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_PART_RELATED_CODE_MAPPING_FILE); - theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_DOCUMENT_ONTOLOGY_FILE); - theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_RSNA_PLAYBOOK_FILE); - theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE); - theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV); - theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_IMAGING_DOCUMENT_CODES_FILE); - theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE); - theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE); + theFiles.addFileZip("/loinc/", LOINC_UPLOAD_PROPERTIES_FILE.getCode()); + theFiles.addFileZip("/loinc/", LOINC_GROUP_FILE_DEFAULT.getCode()); + theFiles.addFileZip("/loinc/", LOINC_GROUP_TERMS_FILE_DEFAULT.getCode()); + theFiles.addFileZip("/loinc/", LOINC_PARENT_GROUP_FILE_DEFAULT.getCode()); + theFiles.addFileZip("/loinc/", LOINC_FILE_DEFAULT.getCode(), LOINC_FILE_DEFAULT.getCode()); + theFiles.addFileZip("/loinc/", LOINC_HIERARCHY_FILE_DEFAULT.getCode(), LOINC_HIERARCHY_FILE_DEFAULT.getCode()); + theFiles.addFileZip("/loinc/", LOINC_ANSWERLIST_FILE_DEFAULT.getCode(), LOINC_ANSWERLIST_FILE_DEFAULT.getCode()); + theFiles.addFileZip("/loinc/", LOINC_ANSWERLIST_LINK_FILE_DEFAULT.getCode(), LOINC_ANSWERLIST_LINK_FILE_DEFAULT.getCode()); + theFiles.addFileZip("/loinc/", LOINC_PART_FILE_DEFAULT.getCode(), LOINC_PART_FILE_DEFAULT.getCode()); + theFiles.addFileZip("/loinc/", LOINC_PART_LINK_FILE_DEFAULT.getCode(), LOINC_PART_LINK_FILE_DEFAULT.getCode()); + theFiles.addFileZip("/loinc/", LOINC_PART_RELATED_CODE_MAPPING_FILE_DEFAULT.getCode()); + theFiles.addFileZip("/loinc/", LOINC_DOCUMENT_ONTOLOGY_FILE_DEFAULT.getCode()); + theFiles.addFileZip("/loinc/", LOINC_RSNA_PLAYBOOK_FILE_DEFAULT.getCode()); + theFiles.addFileZip("/loinc/", LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE_DEFAULT.getCode()); + theFiles.addFileZip("/loinc/", LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE_DEFAULT.getCode()); + theFiles.addFileZip("/loinc/", LOINC_IMAGING_DOCUMENT_CODES_FILE_DEFAULT.getCode()); + theFiles.addFileZip("/loinc/", LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE_DEFAULT.getCode()); + theFiles.addFileZip("/loinc/", LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE_DEFAULT.getCode()); } @Test diff --git a/hapi-fhir-jpaserver-base/src/test/resources/loinc/loincupload.properties b/hapi-fhir-jpaserver-base/src/test/resources/loinc/loincupload.properties index 3ee26623b47..8ecefb96c4e 100644 --- a/hapi-fhir-jpaserver-base/src/test/resources/loinc/loincupload.properties +++ b/hapi-fhir-jpaserver-base/src/test/resources/loinc/loincupload.properties @@ -1,33 +1,95 @@ +################# +### MANDATORY ### +################# + +# Answer lists (ValueSets of potential answers/values for LOINC "questions") +## Default value if key not provided: AnswerList.csv +## File must be present loinc.answerlist.file=AnswerList.csv +# Answer list links (connects LOINC observation codes to answer list codes) +## Default value if key not provided: LoincAnswerListLink.csv +## File must be present loinc.answerlist.link.file=LoincAnswerListLink.csv -# This is the version identifier for the AnswerList file + +# Document ontology +## Default value if key not provided: DocumentOntology.csv +## File must be present +loinc.document.ontology.file=DocumentOntology.csv + +# LOINC codes +## Default value if key not provided: LoincTable/Loinc.csv +## File must be present +loinc.file=LoincTable/Loinc.csv + +# LOINC hierarchy +## Default value if key not provided: MultiAxialHierarchy.csv +## File must be present +loinc.hierarchy.file=MultiAxialHierarchy.csv + +# IEEE medical device codes +## Default value if key not provided: LoincIeeeMedicalDeviceCodeMappingTable.csv +## File must be present +loinc.ieee.medical.device.code.mapping.table.file=LoincIeeeMedicalDeviceCodeMappingTable.csv + +# Imaging document codes +## Default value if key not provided: ImagingDocumentCodes.csv +## File must be present +loinc.imaging.document.codes.file=ImagingDocumentCodes.csv + +# Part +## Default value if key not provided: Part.csv +## File must be present +loinc.part.file=Part.csv +# Part link +## Default value if key not provided: LoincPartLink.csv +## File must be present +loinc.part.link.file=LoincPartLink.csv +# Part related code mapping +## Default value if key not provided: PartRelatedCodeMapping.csv +## File must be present +loinc.part.related.code.mapping.file=PartRelatedCodeMapping.csv + +# RSNA playbook +## Default value if key not provided: LoincRsnaRadiologyPlaybook.csv +## File must be present +loinc.rsna.playbook.file=LoincRsnaRadiologyPlaybook.csv + +# Top 2000 codes - SI +## Default value if key not provided: Top2000CommonLabResultsSi.csv +## File must be present +loinc.top2000.common.lab.results.si.file=Top2000CommonLabResultsSi.csv +# Top 2000 codes - US +## Default value if key not provided: Top2000CommonLabResultsUs.csv +## File must be present +loinc.top2000.common.lab.results.us.file=Top2000CommonLabResultsUs.csv + +# Universal lab order ValueSet +## Default value if key not provided: LoincUniversalLabOrdersValueSet.csv +## File must be present +loinc.universal.lab.order.valueset.file=LoincUniversalLabOrdersValueSet.csv + +################ +### OPTIONAL ### +################ + +# This is the version identifier for the answer list file +## Key may be omitted loinc.answerlist.version=Beta.1 # This is the version identifier for uploaded ConceptMap resources +## Key may be omitted loinc.conceptmap.version=Beta.1 -loinc.document.ontology.file=DocumentOntology.csv - -loinc.file=LoincTable/Loinc.csv - +# Group +## Default value if key not provided: Group.csv +## File may be omitted loinc.group.file=Group.csv +# Group terms +## Default value if key not provided: GroupLoincTerms.csv +## File may be omitted loinc.group.terms.file=GroupLoincTerms.csv -loinc.hierarchy.file=MultiAxialHierarchy.csv - -loinc.ieee.medical.device.code.mapping.table.file=LoincIeeeMedicalDeviceCodeMappingTable.csv - -loinc.imaging.document.codes.file=ImagingDocumentCodes.csv - +# Parent group +## Default value if key not provided: ParentGroup.csv +## File may be omitted loinc.parent.group.file=ParentGroup.csv - -loinc.part.file=Part.csv -loinc.part.link.file=LoincPartLink.csv -loinc.part.related.code.mapping.file=PartRelatedCodeMapping.csv - -loinc.rsna.playbook.file=LoincRsnaRadiologyPlaybook.csv - -loinc.top2000.common.lab.results.si.file=Top2000CommonLabResultsSi.csv -loinc.top2000.common.lab.results.us.file=Top2000CommonLabResultsUs.csv - -loinc.universal.lab.order.valueset.file=LoincUniversalLabOrdersValueSet.csv From 94c7fc49fd156ae179315ec8a38eb092d2b0df0e Mon Sep 17 00:00:00 2001 From: Diederik Muylwyk Date: Tue, 27 Aug 2019 13:46:42 -0400 Subject: [PATCH 22/23] Making LOINC filenames configurable; fix implicit ValueSet URL for loinc-all. --- .../java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcImpl.java | 2 +- .../ca/uhn/fhir/jpa/term/TerminologyLoaderSvcLoincTest.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcImpl.java index a36e326381b..f719c98b37d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcImpl.java @@ -579,7 +579,7 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc { ValueSet retVal = new ValueSet(); retVal.setId("loinc-all"); - retVal.setUrl("http://loinc.org/fhir/ValueSet/loinc-all"); + retVal.setUrl("http://loinc.org/vs"); retVal.setVersion("1.0.0"); retVal.setName("All LOINC codes"); retVal.setStatus(Enumerations.PublicationStatus.ACTIVE); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcLoincTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcLoincTest.java index c49dfed2fd9..a6ca90547bc 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcLoincTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcLoincTest.java @@ -259,7 +259,7 @@ public class TerminologyLoaderSvcLoincTest extends BaseLoaderTest { // All LOINC codes assertTrue(valueSets.containsKey("loinc-all")); vs = valueSets.get("loinc-all"); - assertEquals("http://loinc.org/fhir/ValueSet/loinc-all", vs.getUrl()); + assertEquals("http://loinc.org/vs", vs.getUrl()); assertEquals("1.0.0", vs.getVersion()); assertEquals("All LOINC codes", vs.getName()); assertEquals(Enumerations.PublicationStatus.ACTIVE, vs.getStatus()); From 9ef0519b4c0e02ab162da9278179d84f77453c23 Mon Sep 17 00:00:00 2001 From: Diederik Muylwyk Date: Tue, 27 Aug 2019 21:15:12 -0400 Subject: [PATCH 23/23] validate-code operation has been optimized for large ValueSets; submitting pull request. --- .../jpa/dao/IFhirResourceDaoValueSet.java | 2 +- .../jpa/dao/data/ITermValueSetConceptDao.java | 6 ++ .../dstu3/FhirResourceDaoValueSetDstu3.java | 11 +++- .../jpa/dao/r4/FhirResourceDaoValueSetR4.java | 25 +++++---- .../jpa/dao/r5/FhirResourceDaoValueSetR5.java | 11 +++- .../jpa/term/BaseHapiTerminologySvcImpl.java | 43 ++++++++++++++ .../jpa/term/HapiTerminologySvcDstu2.java | 12 ++-- .../jpa/term/HapiTerminologySvcDstu3.java | 26 ++++++++- .../fhir/jpa/term/HapiTerminologySvcR4.java | 14 +++-- .../fhir/jpa/term/HapiTerminologySvcR5.java | 19 ++++++- .../fhir/jpa/term/IHapiTerminologySvc.java | 15 ++--- .../jpa/term/TerminologySvcImplR4Test.java | 56 ++++++++++++++++++- 12 files changed, 203 insertions(+), 37 deletions(-) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFhirResourceDaoValueSet.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFhirResourceDaoValueSet.java index 80aff0ed58c..3131726b20f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFhirResourceDaoValueSet.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFhirResourceDaoValueSet.java @@ -41,7 +41,7 @@ public interface IFhirResourceDaoValueSet exten ValidateCodeResult validateCode(IPrimitiveType theValueSetIdentifier, IIdType theId, IPrimitiveType theCode, IPrimitiveType theSystem, IPrimitiveType theDisplay, CD theCoding, CC theCodeableConcept, RequestDetails theRequestDetails); - public class ValidateCodeResult { + class ValidateCodeResult { private String myDisplay; private String myMessage; private boolean myResult; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDao.java index e225719ed4e..dca653b2058 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDao.java @@ -28,6 +28,7 @@ import org.springframework.data.jpa.repository.Modifying; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; +import java.util.List; import java.util.Optional; public interface ITermValueSetConceptDao extends JpaRepository { @@ -45,4 +46,9 @@ public interface ITermValueSetConceptDao extends JpaRepository findByTermValueSetIdSystemAndCode(@Param("pid") Long theValueSetId, @Param("system_url") String theSystem, @Param("codeval") String theCode); + @Query("SELECT vsc FROM TermValueSetConcept vsc WHERE vsc.myValueSet.myResourcePid = :resource_pid AND vsc.myCode = :codeval") + List findOneByValueSetIdAndCode(@Param("resource_pid") Long theValueSetId, @Param("codeval") String theCode); + + @Query("SELECT vsc FROM TermValueSetConcept vsc WHERE vsc.myValueSet.myResourcePid = :resource_pid AND vsc.mySystem = :system_url AND vsc.myCode = :codeval") + List findOneByValueSetIdSystemAndCode(@Param("resource_pid") Long theValueSetId, @Param("system_url") String theSystem, @Param("codeval") String theCode); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java index 7c8895b06dc..3e5d9f89445 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java @@ -330,9 +330,14 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3 } if (vs != null) { - ValueSet expansion = doExpand(vs); // TODO: DM 2019-08-17 - Need to account for concepts in terminology tables. See #1431 - List contains = expansion.getExpansion().getContains(); - ValidateCodeResult result = validateCodeIsInContains(contains, toStringOrNull(theSystem), toStringOrNull(theCode), theCoding, theCodeableConcept); + ValidateCodeResult result; + if (myDaoConfig.isPreExpandValueSetsExperimental()) { + result = myTerminologySvc.validateCodeIsInPreExpandedValueSet(vs, toStringOrNull(theSystem), toStringOrNull(theCode), toStringOrNull(theDisplay), theCoding, theCodeableConcept); + } else { + ValueSet expansion = doExpand(vs); + List contains = expansion.getExpansion().getContains(); + result = validateCodeIsInContains(contains, toStringOrNull(theSystem), toStringOrNull(theCode), theCoding, theCodeableConcept); + } if (result != null) { if (theDisplay != null && isNotBlank(theDisplay.getValue()) && isNotBlank(result.getDisplay())) { if (!theDisplay.getValue().equals(result.getDisplay())) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java index 6a525e07c0b..aa719557794 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoValueSetR4.java @@ -286,15 +286,15 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4 imple } @Override - public ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult validateCode(IPrimitiveType theValueSetIdentifier, IIdType theId, IPrimitiveType theCode, + public ValidateCodeResult validateCode(IPrimitiveType theValueSetIdentifier, IIdType theId, IPrimitiveType theCode, IPrimitiveType theSystem, IPrimitiveType theDisplay, Coding theCoding, CodeableConcept theCodeableConcept, RequestDetails theRequestDetails) { List valueSetIds = Collections.emptyList(); boolean haveCodeableConcept = theCodeableConcept != null && theCodeableConcept.getCoding().size() > 0; - boolean haveCoding = theCoding != null && theCoding.isEmpty() == false; - boolean haveCode = theCode != null && theCode.isEmpty() == false; + boolean haveCoding = theCoding != null && !theCoding.isEmpty(); + boolean haveCode = theCode != null && !theCode.isEmpty(); if (!haveCodeableConcept && !haveCoding && !haveCode) { throw new InvalidRequestException("No code, coding, or codeableConcept provided to validate"); @@ -303,7 +303,7 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4 imple throw new InvalidRequestException("$validate-code can only validate (system AND code) OR (coding) OR (codeableConcept)"); } - boolean haveIdentifierParam = theValueSetIdentifier != null && theValueSetIdentifier.isEmpty() == false; + boolean haveIdentifierParam = theValueSetIdentifier != null && !theValueSetIdentifier.isEmpty(); ValueSet vs = null; if (theId != null) { vs = read(theId, theRequestDetails); @@ -320,15 +320,20 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4 imple // String system = toStringOrNull(theSystem); IContextValidationSupport.LookupCodeResult result = myCodeSystemDao.lookupCode(theCode, theSystem, null, null); if (result.isFound()) { - ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult retVal = new ValidateCodeResult(true, "Found code", result.getCodeDisplay()); + ValidateCodeResult retVal = new ValidateCodeResult(true, "Found code", result.getCodeDisplay()); return retVal; } } if (vs != null) { - ValueSet expansion = doExpand(vs); // TODO: DM 2019-08-17 - Need to account for concepts in terminology tables. See #1431 - List contains = expansion.getExpansion().getContains(); - ValidateCodeResult result = validateCodeIsInContains(contains, toStringOrNull(theSystem), toStringOrNull(theCode), theCoding, theCodeableConcept); + ValidateCodeResult result; + if (myDaoConfig.isPreExpandValueSetsExperimental()) { + result = myTerminologySvc.validateCodeIsInPreExpandedValueSet(vs, toStringOrNull(theSystem), toStringOrNull(theCode), toStringOrNull(theDisplay), theCoding, theCodeableConcept); + } else { + ValueSet expansion = doExpand(vs); + List contains = expansion.getExpansion().getContains(); + result = validateCodeIsInContains(contains, toStringOrNull(theSystem), toStringOrNull(theCode), theCoding, theCodeableConcept); + } if (result != null) { if (theDisplay != null && isNotBlank(theDisplay.getValue()) && isNotBlank(result.getDisplay())) { if (!theDisplay.getValue().equals(result.getDisplay())) { @@ -347,10 +352,10 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4 imple return thePrimitive != null ? thePrimitive.getValue() : null; } - private ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult validateCodeIsInContains(List contains, String theSystem, String theCode, + private ValidateCodeResult validateCodeIsInContains(List contains, String theSystem, String theCode, Coding theCoding, CodeableConcept theCodeableConcept) { for (ValueSetExpansionContainsComponent nextCode : contains) { - ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult result = validateCodeIsInContains(nextCode.getContains(), theSystem, theCode, theCoding, theCodeableConcept); + ValidateCodeResult result = validateCodeIsInContains(nextCode.getContains(), theSystem, theCode, theCoding, theCodeableConcept); if (result != null) { return result; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java index 855c8d43bcf..f0bb1ed9b12 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoValueSetR5.java @@ -332,9 +332,14 @@ public class FhirResourceDaoValueSetR5 extends FhirResourceDaoR5 imple } if (vs != null) { - ValueSet expansion = doExpand(vs); // TODO: DM 2019-08-17 - Need to account for concepts in terminology tables. See #1431 - List contains = expansion.getExpansion().getContains(); - ValidateCodeResult result = validateCodeIsInContains(contains, toStringOrNull(theSystem), toStringOrNull(theCode), theCoding, theCodeableConcept); + ValidateCodeResult result; + if (myDaoConfig.isPreExpandValueSetsExperimental()) { + result = myTerminologySvc.validateCodeIsInPreExpandedValueSet(vs, toStringOrNull(theSystem), toStringOrNull(theCode), toStringOrNull(theDisplay), theCoding, theCodeableConcept); + } else { + ValueSet expansion = doExpand(vs); + List contains = expansion.getExpansion().getContains(); + result = validateCodeIsInContains(contains, toStringOrNull(theSystem), toStringOrNull(theCode), theCoding, theCodeableConcept); + } if (result != null) { if (theDisplay != null && isNotBlank(theDisplay.getValue()) && isNotBlank(result.getDisplay())) { if (!theDisplay.getValue().equals(result.getDisplay())) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java index 9ce1362b4fd..2e3324b8719 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java @@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.term; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.support.IContextValidationSupport; import ca.uhn.fhir.jpa.dao.*; +import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult; import ca.uhn.fhir.jpa.dao.data.*; import ca.uhn.fhir.jpa.entity.*; import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum; @@ -959,6 +960,48 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, } } + protected ValidateCodeResult validateCodeIsInPreExpandedValueSet( + ValueSet theValueSet, String theSystem, String theCode, String theDisplay, Coding theCoding, CodeableConcept theCodeableConcept) { + + ValidateUtil.isNotNullOrThrowUnprocessableEntity(theValueSet.hasId(), "ValueSet.id is required"); + + Long valueSetId = theValueSet.getIdElement().toUnqualifiedVersionless().getIdPartAsLong(); + + List concepts = new ArrayList<>(); + if (isNotBlank(theCode)) { + if (isNotBlank(theSystem)) { + concepts = myValueSetConceptDao.findOneByValueSetIdSystemAndCode(valueSetId, theSystem, theCode); + } else { + concepts = myValueSetConceptDao.findOneByValueSetIdAndCode(valueSetId, theCode); + } + } else if (theCoding != null) { + if (theCoding.hasSystem() && theCoding.hasCode()) { + concepts = myValueSetConceptDao.findOneByValueSetIdSystemAndCode(valueSetId, theCoding.getSystem(), theCoding.getCode()); + } + } else if (theCodeableConcept != null){ + for (Coding coding : theCodeableConcept.getCoding()) { + if (coding.hasSystem() && coding.hasCode()) { + concepts = myValueSetConceptDao.findOneByValueSetIdSystemAndCode(valueSetId, coding.getSystem(), coding.getCode()); + if (!concepts.isEmpty()) { + break; + } + } + } + } + + for (TermValueSetConcept concept : concepts) { + if (isNotBlank(theDisplay) && theDisplay.equals(concept.getDisplay())) { + return new ValidateCodeResult(true, "Validation succeeded", concept.getDisplay()); + } + } + + if (!concepts.isEmpty()) { + return new ValidateCodeResult(true, "Validation succeeded", concepts.get(0).getDisplay()); + } + + return null; + } + private void fetchChildren(TermConcept theConcept, Set theSetToPopulate) { for (TermConceptParentChildLink nextChildLink : theConcept.getChildren()) { TermConcept nextChild = nextChildLink.getChild(); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcDstu2.java index a64bb9baccf..eb9b7c7e96e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcDstu2.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcDstu2.java @@ -20,13 +20,13 @@ package ca.uhn.fhir.jpa.term; * #L% */ +import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult; import ca.uhn.fhir.jpa.model.entity.ResourceTable; +import org.hl7.fhir.instance.hapi.validation.IValidationSupport; +import org.hl7.fhir.instance.model.api.IBaseDatatype; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; -import org.hl7.fhir.instance.hapi.validation.IValidationSupport; -import org.hl7.fhir.r4.model.CodeSystem; -import org.hl7.fhir.r4.model.ConceptMap; -import org.hl7.fhir.r4.model.ValueSet; +import org.hl7.fhir.r4.model.*; import org.springframework.beans.factory.annotation.Autowired; import java.util.ArrayList; @@ -149,4 +149,8 @@ public class HapiTerminologySvcDstu2 extends BaseHapiTerminologySvcImpl { return retVal; } + @Override + public ValidateCodeResult validateCodeIsInPreExpandedValueSet(IBaseResource theValueSet, String theSystem, String theCode, String theDisplay, IBaseDatatype theCoding, IBaseDatatype theCodeableConcept) { + throw new UnsupportedOperationException(); + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcDstu3.java index 4866ddd45e0..09f17ea98bf 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcDstu3.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcDstu3.java @@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.term; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem; +import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult; import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; @@ -15,6 +16,7 @@ import org.hl7.fhir.dstu3.model.CodeSystem.ConceptDefinitionComponent; import org.hl7.fhir.dstu3.model.ValueSet.ConceptSetComponent; import org.hl7.fhir.dstu3.model.ValueSet.ValueSetExpansionComponent; import org.hl7.fhir.exceptions.FHIRException; +import org.hl7.fhir.instance.model.api.IBaseDatatype; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.utilities.validation.ValidationMessage.IssueSeverity; @@ -29,7 +31,8 @@ import java.util.Collections; import java.util.List; import java.util.Optional; -import static org.apache.commons.lang3.StringUtils.*; +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.apache.commons.lang3.StringUtils.isNotBlank; /* * #%L @@ -359,5 +362,26 @@ public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvcImpl implemen return null; } + @Override + public ValidateCodeResult validateCodeIsInPreExpandedValueSet(IBaseResource theValueSet, String theSystem, String theCode, String theDisplay, IBaseDatatype theCoding, IBaseDatatype theCodeableConcept) { + ValueSet valueSet = (ValueSet) theValueSet; + Coding coding = (Coding) theCoding; + CodeableConcept codeableConcept = (CodeableConcept) theCodeableConcept; + try { + org.hl7.fhir.r4.model.ValueSet valueSetR4; + valueSetR4 = VersionConvertor_30_40.convertValueSet(valueSet); + + org.hl7.fhir.r4.model.Coding codingR4 = new org.hl7.fhir.r4.model.Coding(coding.getSystem(), coding.getCode(), coding.getDisplay()); + + org.hl7.fhir.r4.model.CodeableConcept codeableConceptR4 = new org.hl7.fhir.r4.model.CodeableConcept(); + for (Coding nestedCoding : codeableConcept.getCoding()) { + codeableConceptR4.addCoding(new org.hl7.fhir.r4.model.Coding(nestedCoding.getSystem(), nestedCoding.getCode(), nestedCoding.getDisplay())); + } + + return super.validateCodeIsInPreExpandedValueSet(valueSetR4, theSystem, theCode, theDisplay, codingR4, codeableConceptR4); + } catch (FHIRException e) { + throw new InternalErrorException(e); + } + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcR4.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcR4.java index 6ded7d0a469..a27afaccd49 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcR4.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcR4.java @@ -2,18 +2,17 @@ package ca.uhn.fhir.jpa.term; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.dao.IFhirResourceDao; +import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult; import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.util.CoverageIgnore; import ca.uhn.fhir.util.UrlUtil; +import org.hl7.fhir.instance.model.api.IBaseDatatype; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.hapi.ctx.IValidationSupport; -import org.hl7.fhir.r4.model.CodeSystem; +import org.hl7.fhir.r4.model.*; import org.hl7.fhir.r4.model.CodeSystem.ConceptDefinitionComponent; -import org.hl7.fhir.r4.model.ConceptMap; -import org.hl7.fhir.r4.model.StructureDefinition; -import org.hl7.fhir.r4.model.ValueSet; import org.hl7.fhir.r4.model.ValueSet.ConceptSetComponent; import org.hl7.fhir.r4.terminologies.ValueSetExpander; import org.hl7.fhir.utilities.validation.ValidationMessage.IssueSeverity; @@ -278,4 +277,11 @@ public class HapiTerminologySvcR4 extends BaseHapiTerminologySvcImpl implements return super.lookupCode(theContext, theSystem, theCode); } + @Override + public ValidateCodeResult validateCodeIsInPreExpandedValueSet(IBaseResource theValueSet, String theSystem, String theCode, String theDisplay, IBaseDatatype theCoding, IBaseDatatype theCodeableConcept) { + ValueSet valueSet = (ValueSet) theValueSet; + Coding coding = (Coding) theCoding; + CodeableConcept codeableConcept = (CodeableConcept) theCodeableConcept; + return super.validateCodeIsInPreExpandedValueSet(valueSet, theSystem, theCode, theDisplay, coding, codeableConcept); + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcR5.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcR5.java index c90099016f0..b828627e94e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcR5.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/HapiTerminologySvcR5.java @@ -2,14 +2,16 @@ package ca.uhn.fhir.jpa.term; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.dao.IFhirResourceDao; +import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult; import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.util.CoverageIgnore; import ca.uhn.fhir.util.UrlUtil; +import org.hl7.fhir.instance.model.api.IBaseDatatype; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; -import org.hl7.fhir.r5.hapi.ctx.IValidationSupport; import org.hl7.fhir.r5.model.*; +import org.hl7.fhir.r5.hapi.ctx.IValidationSupport; import org.hl7.fhir.r5.model.CodeSystem.ConceptDefinitionComponent; import org.hl7.fhir.r5.model.ValueSet.ConceptSetComponent; import org.hl7.fhir.r5.terminologies.ValueSetExpander; @@ -287,4 +289,19 @@ public class HapiTerminologySvcR5 extends BaseHapiTerminologySvcImpl implements return super.lookupCode(theContext, theSystem, theCode); } + @Override + public ValidateCodeResult validateCodeIsInPreExpandedValueSet(IBaseResource theValueSet, String theSystem, String theCode, String theDisplay, IBaseDatatype theCoding, IBaseDatatype theCodeableConcept) { + org.hl7.fhir.r4.model.ValueSet valueSetR4 = org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet((ValueSet) theValueSet); + + Coding coding = (Coding) theCoding; + org.hl7.fhir.r4.model.Coding codingR4 = new org.hl7.fhir.r4.model.Coding(coding.getSystem(), coding.getCode(), coding.getDisplay()); + + CodeableConcept codeableConcept = (CodeableConcept) theCodeableConcept; + org.hl7.fhir.r4.model.CodeableConcept codeableConceptR4 = new org.hl7.fhir.r4.model.CodeableConcept(); + for (Coding nestedCoding : codeableConcept.getCoding()) { + codeableConceptR4.addCoding(new org.hl7.fhir.r4.model.Coding(nestedCoding.getSystem(), nestedCoding.getCode(), nestedCoding.getDisplay())); + } + + return super.validateCodeIsInPreExpandedValueSet(valueSetR4, theSystem, theCode, theDisplay, codingR4, codeableConceptR4); + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IHapiTerminologySvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IHapiTerminologySvc.java index 195a4192c3a..8675d594205 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IHapiTerminologySvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IHapiTerminologySvc.java @@ -1,16 +1,12 @@ package ca.uhn.fhir.jpa.term; import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem; +import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult; import ca.uhn.fhir.jpa.entity.*; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.rest.api.server.RequestDetails; -import org.hl7.fhir.instance.model.api.IBaseCoding; -import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.instance.model.api.IIdType; -import org.hl7.fhir.instance.model.api.IPrimitiveType; -import org.hl7.fhir.r4.model.CodeSystem; -import org.hl7.fhir.r4.model.ConceptMap; -import org.hl7.fhir.r4.model.ValueSet; +import org.hl7.fhir.instance.model.api.*; +import org.hl7.fhir.r4.model.*; import javax.annotation.Nullable; import java.util.List; @@ -116,4 +112,9 @@ public interface IHapiTerminologySvc { AtomicInteger applyDeltaCodesystemsRemove(String theSystem, CodeSystem theDelta); void preExpandValueSetToTerminologyTables(); + + /** + * Version independent + */ + ValidateCodeResult validateCodeIsInPreExpandedValueSet(IBaseResource theValueSet, String theSystem, String theCode, String theDisplay, IBaseDatatype theCoding, IBaseDatatype theCodeableConcept); } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java index f87145ce9f1..a5998ad8b4a 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java @@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.term; import ca.uhn.fhir.context.support.IContextValidationSupport; import ca.uhn.fhir.jpa.dao.DaoConfig; +import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult; import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test; import ca.uhn.fhir.jpa.entity.*; import ca.uhn.fhir.jpa.model.entity.ResourceTable; @@ -13,7 +14,7 @@ import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import ca.uhn.fhir.util.TestUtil; import com.google.common.collect.Lists; import org.hl7.fhir.instance.model.api.IIdType; -import org.hl7.fhir.r4.hapi.ctx.IValidationSupport; +import org.hl7.fhir.r4.hapi.ctx.IValidationSupport.CodeValidationResult; import org.hl7.fhir.r4.model.*; import org.hl7.fhir.r4.model.Enumerations.ConceptMapEquivalence; import org.hl7.fhir.r4.model.codesystems.ConceptSubsumptionOutcome; @@ -29,7 +30,6 @@ import org.springframework.transaction.support.TransactionTemplate; import javax.annotation.Nonnull; import java.io.IOException; -import java.util.Date; import java.util.List; import java.util.Optional; import java.util.concurrent.atomic.AtomicInteger; @@ -2607,13 +2607,63 @@ public class TerminologySvcImplR4Test extends BaseJpaR4Test { public void testValidateCode() { createCodeSystem(); - IValidationSupport.CodeValidationResult validation = myTermSvc.validateCode(myFhirCtx, CS_URL, "ParentWithNoChildrenA", null); + CodeValidationResult validation = myTermSvc.validateCode(myFhirCtx, CS_URL, "ParentWithNoChildrenA", null); assertEquals(true, validation.isOk()); validation = myTermSvc.validateCode(myFhirCtx, CS_URL, "ZZZZZZZ", null); assertEquals(false, validation.isOk()); } + @Test + public void testValidateCodeIsInPreExpandedValueSet() throws Exception { + myDaoConfig.setPreExpandValueSetsExperimental(true); + + loadAndPersistCodeSystemAndValueSetWithDesignations(); + + CodeSystem codeSystem = myCodeSystemDao.read(myExtensionalCsId); + ourLog.info("CodeSystem:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(codeSystem)); + + ValueSet valueSet = myValueSetDao.read(myExtensionalVsId); + ourLog.info("ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(valueSet)); + + myTermSvc.preExpandValueSetToTerminologyTables(); + + ValidateCodeResult result = myTermSvc.validateCodeIsInPreExpandedValueSet(valueSet, null, null, null, null, null); + assertNull(result); + + result = myTermSvc.validateCodeIsInPreExpandedValueSet(valueSet, null, "BOGUS", null, null, null); + assertNull(result); + + result = myTermSvc.validateCodeIsInPreExpandedValueSet(valueSet, null, "11378-7", null, null, null); + assertTrue(result.isResult()); + assertEquals("Validation succeeded", result.getMessage()); + assertEquals("Systolic blood pressure at First encounter", result.getDisplay()); + + result = myTermSvc.validateCodeIsInPreExpandedValueSet(valueSet, null, "11378-7", "Systolic blood pressure at First encounter", null, null); + assertTrue(result.isResult()); + assertEquals("Validation succeeded", result.getMessage()); + assertEquals("Systolic blood pressure at First encounter", result.getDisplay()); + + result = myTermSvc.validateCodeIsInPreExpandedValueSet(valueSet, "http://acme.org", "11378-7", null, null, null); + assertTrue(result.isResult()); + assertEquals("Validation succeeded", result.getMessage()); + assertEquals("Systolic blood pressure at First encounter", result.getDisplay()); + + Coding coding = new Coding("http://acme.org", "11378-7", "Systolic blood pressure at First encounter"); + result = myTermSvc.validateCodeIsInPreExpandedValueSet(valueSet, null, null, null, coding, null); + assertTrue(result.isResult()); + assertEquals("Validation succeeded", result.getMessage()); + assertEquals("Systolic blood pressure at First encounter", result.getDisplay()); + + CodeableConcept codeableConcept = new CodeableConcept(); + codeableConcept.addCoding(new Coding("BOGUS", "BOGUS", "BOGUS")); + codeableConcept.addCoding(coding); + result = myTermSvc.validateCodeIsInPreExpandedValueSet(valueSet, null, null, null, null, codeableConcept); + assertTrue(result.isResult()); + assertEquals("Validation succeeded", result.getMessage()); + assertEquals("Systolic blood pressure at First encounter", result.getDisplay()); + } + @AfterClass public static void afterClassClearContext() { TestUtil.clearAllStaticFieldsForUnitTest();