Work on removing validstion cache

This commit is contained in:
James Agnew 2018-08-28 05:26:06 -04:00
parent d0761ea604
commit fee0166800
16 changed files with 334 additions and 86 deletions

View File

@ -73,8 +73,10 @@ public class FhirResourceDaoDstu3<T extends IAnyResource> extends BaseHapiFhirRe
@Override @Override
public MethodOutcome validate(T theResource, IIdType theId, String theRawResource, EncodingEnum theEncoding, ValidationModeEnum theMode, String theProfile, RequestDetails theRequestDetails) { public MethodOutcome validate(T theResource, IIdType theId, String theRawResource, EncodingEnum theEncoding, ValidationModeEnum theMode, String theProfile, RequestDetails theRequestDetails) {
ActionRequestDetails requestDetails = new ActionRequestDetails(theRequestDetails, theResource, null, theId); if (theRequestDetails != null) {
notifyInterceptors(RestOperationTypeEnum.VALIDATE, requestDetails); ActionRequestDetails requestDetails = new ActionRequestDetails(theRequestDetails, theResource, null, theId);
notifyInterceptors(RestOperationTypeEnum.VALIDATE, requestDetails);
}
if (theMode == ValidationModeEnum.DELETE) { if (theMode == ValidationModeEnum.DELETE) {
if (theId == null || theId.hasIdPart() == false) { if (theId == null || theId.hasIdPart() == false) {

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.dao.dstu3;
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
* You may obtain a copy of the License at * You may obtain a copy of the License at
* *
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* *
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -20,19 +20,22 @@ package ca.uhn.fhir.jpa.dao.dstu3;
* #L% * #L%
*/ */
import static org.apache.commons.lang3.StringUtils.isBlank; import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem;
import static org.apache.commons.lang3.StringUtils.isNotBlank; import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem.LookupCodeResult;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet;
import java.util.Collections; import ca.uhn.fhir.jpa.util.LogicUtil;
import java.util.List; import ca.uhn.fhir.rest.api.server.RequestDetails;
import java.util.stream.Collectors; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.util.ElementUtil;
import org.apache.commons.codec.binary.StringUtils; import org.apache.commons.codec.binary.StringUtils;
import org.hl7.fhir.dstu3.hapi.ctx.HapiWorkerContext; import org.hl7.fhir.dstu3.hapi.ctx.HapiWorkerContext;
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport; import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
import org.hl7.fhir.dstu3.model.*; import org.hl7.fhir.dstu3.model.*;
import org.hl7.fhir.dstu3.model.Enumerations.PublicationStatus; import org.hl7.fhir.dstu3.model.Enumerations.PublicationStatus;
import org.hl7.fhir.dstu3.model.ValueSet.*; import org.hl7.fhir.dstu3.model.ValueSet.ConceptSetComponent;
import org.hl7.fhir.dstu3.model.ValueSet.ConceptSetFilterComponent;
import org.hl7.fhir.dstu3.model.ValueSet.FilterOperator;
import org.hl7.fhir.dstu3.model.ValueSet.ValueSetExpansionContainsComponent;
import org.hl7.fhir.dstu3.terminologies.ValueSetExpander.ValueSetExpansionOutcome; import org.hl7.fhir.dstu3.terminologies.ValueSetExpander.ValueSetExpansionOutcome;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.hl7.fhir.instance.model.api.IPrimitiveType;
@ -41,20 +44,18 @@ import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem; import java.util.Collections;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem.LookupCodeResult; import java.util.List;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet;
import ca.uhn.fhir.jpa.util.LogicUtil; import static org.apache.commons.lang3.StringUtils.isBlank;
import ca.uhn.fhir.rest.api.server.RequestDetails; import static org.apache.commons.lang3.StringUtils.isNotBlank;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.util.ElementUtil;
public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3<ValueSet> implements IFhirResourceDaoValueSet<ValueSet, Coding, CodeableConcept> { public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3<ValueSet> implements IFhirResourceDaoValueSet<ValueSet, Coding, CodeableConcept> {
private static final Logger ourLog = LoggerFactory.getLogger(FhirResourceDaoValueSetDstu3.class);
@Autowired @Autowired
@Qualifier("myJpaValidationSupportChainDstu3") @Qualifier("myJpaValidationSupportChainDstu3")
private IValidationSupport myValidationSupport; private IValidationSupport myValidationSupport;
@Autowired @Autowired
private IFhirResourceDaoCodeSystem<CodeSystem, Coding, CodeableConcept> myCodeSystemDao; private IFhirResourceDaoCodeSystem<CodeSystem, Coding, CodeableConcept> myCodeSystemDao;
@ -69,21 +70,32 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3<ValueSet>
validateIncludes("include", theSource.getCompose().getInclude()); validateIncludes("include", theSource.getCompose().getInclude());
validateIncludes("exclude", theSource.getCompose().getExclude()); validateIncludes("exclude", theSource.getCompose().getExclude());
/*
* If all of the code systems are supported by the HAPI FHIR terminology service, let's
* use that as it's more efficient.
*/
boolean allSystemsAreSuppportedByTerminologyService = true;
for (ConceptSetComponent next : theSource.getCompose().getInclude()) {
if (!myTerminologySvc.supportsSystem(next.getSystem())) {
allSystemsAreSuppportedByTerminologyService = false;
}
}
for (ConceptSetComponent next : theSource.getCompose().getExclude()) {
if (!myTerminologySvc.supportsSystem(next.getSystem())) {
allSystemsAreSuppportedByTerminologyService = false;
}
}
if (allSystemsAreSuppportedByTerminologyService) {
return (ValueSet) myTerminologySvc.expandValueSet(theSource);
}
HapiWorkerContext workerContext = new HapiWorkerContext(getContext(), myValidationSupport); HapiWorkerContext workerContext = new HapiWorkerContext(getContext(), myValidationSupport);
ValueSetExpansionOutcome outcome = workerContext.expand(theSource, null); ValueSetExpansionOutcome outcome = workerContext.expand(theSource, null);
ValueSet retVal = outcome.getValueset(); ValueSet retVal = outcome.getValueset();
retVal.setStatus(PublicationStatus.ACTIVE); retVal.setStatus(PublicationStatus.ACTIVE);
return retVal; return retVal;
// ValueSetExpansionComponent expansion = outcome.getValueset().getExpansion();
//
// ValueSet retVal = new ValueSet();
// retVal.getMeta().setLastUpdated(new Date());
// retVal.setExpansion(expansion);
// return retVal;
} }
private void validateIncludes(String name, List<ConceptSetComponent> listToValidate) { private void validateIncludes(String name, List<ConceptSetComponent> listToValidate) {
@ -185,8 +197,8 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3<ValueSet>
@Override @Override
public ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult validateCode(IPrimitiveType<String> theValueSetIdentifier, IIdType theId, IPrimitiveType<String> theCode, public ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult validateCode(IPrimitiveType<String> theValueSetIdentifier, IIdType theId, IPrimitiveType<String> theCode,
IPrimitiveType<String> theSystem, IPrimitiveType<String> theDisplay, Coding theCoding, IPrimitiveType<String> theSystem, IPrimitiveType<String> theDisplay, Coding theCoding,
CodeableConcept theCodeableConcept, RequestDetails theRequestDetails) { CodeableConcept theCodeableConcept, RequestDetails theRequestDetails) {
List<IIdType> valueSetIds = Collections.emptyList(); List<IIdType> valueSetIds = Collections.emptyList();
@ -242,15 +254,12 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3<ValueSet>
} }
private static final Logger ourLog = LoggerFactory.getLogger(FhirResourceDaoValueSetDstu3.class);
private String toStringOrNull(IPrimitiveType<String> thePrimitive) { private String toStringOrNull(IPrimitiveType<String> thePrimitive) {
return thePrimitive != null ? thePrimitive.getValue() : null; return thePrimitive != null ? thePrimitive.getValue() : null;
} }
private ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult validateCodeIsInContains(List<ValueSetExpansionContainsComponent> contains, String theSystem, String theCode, private ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult validateCodeIsInContains(List<ValueSetExpansionContainsComponent> contains, String theSystem, String theCode,
Coding theCoding, CodeableConcept theCodeableConcept) { Coding theCoding, CodeableConcept theCodeableConcept) {
for (ValueSetExpansionContainsComponent nextCode : contains) { for (ValueSetExpansionContainsComponent nextCode : contains) {
ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult result = validateCodeIsInContains(nextCode.getContains(), theSystem, theCode, theCoding, theCodeableConcept); ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult result = validateCodeIsInContains(nextCode.getContains(), theSystem, theCode, theCoding, theCodeableConcept);
if (result != null) { if (result != null) {

View File

@ -63,8 +63,25 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4<ValueSet> imple
private ValueSet doExpand(ValueSet theSource) { private ValueSet doExpand(ValueSet theSource) {
validateIncludes("include", theSource.getCompose().getInclude()); /*
validateIncludes("exclude", theSource.getCompose().getExclude()); * If all of the code systems are supported by the HAPI FHIR terminology service, let's
* use that as it's more efficient.
*/
boolean allSystemsAreSuppportedByTerminologyService = true;
for (ConceptSetComponent next : theSource.getCompose().getInclude()) {
if (!myTerminologySvc.supportsSystem(next.getSystem())) {
allSystemsAreSuppportedByTerminologyService = false;
}
}
for (ConceptSetComponent next : theSource.getCompose().getExclude()) {
if (!myTerminologySvc.supportsSystem(next.getSystem())) {
allSystemsAreSuppportedByTerminologyService = false;
}
}
if (allSystemsAreSuppportedByTerminologyService) {
return myTerminologySvc.expandValueSet(theSource);
}
HapiWorkerContext workerContext = new HapiWorkerContext(getContext(), myValidationSupport); HapiWorkerContext workerContext = new HapiWorkerContext(getContext(), myValidationSupport);

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.term;
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
* You may obtain a copy of the License at * You may obtain a copy of the License at
* *
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* *
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -52,8 +52,6 @@ import org.hibernate.search.jpa.FullTextEntityManager;
import org.hibernate.search.jpa.FullTextQuery; import org.hibernate.search.jpa.FullTextQuery;
import org.hibernate.search.query.dsl.BooleanJunction; import org.hibernate.search.query.dsl.BooleanJunction;
import org.hibernate.search.query.dsl.QueryBuilder; import org.hibernate.search.query.dsl.QueryBuilder;
import org.hibernate.search.query.dsl.TermMatchingContext;
import org.hibernate.search.query.dsl.TermTermination;
import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.CodeSystem; import org.hl7.fhir.r4.model.CodeSystem;
@ -84,6 +82,7 @@ import javax.persistence.TypedQuery;
import javax.persistence.criteria.*; import javax.persistence.criteria.*;
import java.util.*; import java.util.*;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Supplier; import java.util.function.Supplier;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -141,8 +140,9 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
/** /**
* @param theAdd If true, add the code. If false, remove the code. * @param theAdd If true, add the code. If false, remove the code.
* @param theCodeCounter
*/ */
private void addCodeIfNotAlreadyAdded(String theCodeSystem, ValueSet.ValueSetExpansionComponent theExpansionComponent, Set<String> theAddedCodes, TermConcept theConcept, boolean theAdd) { private void addCodeIfNotAlreadyAdded(String theCodeSystem, ValueSet.ValueSetExpansionComponent theExpansionComponent, Set<String> theAddedCodes, TermConcept theConcept, boolean theAdd, AtomicInteger theCodeCounter) {
String code = theConcept.getCode(); String code = theConcept.getCode();
if (theAdd && theAddedCodes.add(code)) { if (theAdd && theAddedCodes.add(code)) {
ValueSet.ValueSetExpansionContainsComponent contains = theExpansionComponent.addContains(); ValueSet.ValueSetExpansionContainsComponent contains = theExpansionComponent.addContains();
@ -158,10 +158,13 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
.setCode(nextDesignation.getUseCode()) .setCode(nextDesignation.getUseCode())
.setDisplay(nextDesignation.getUseDisplay()); .setDisplay(nextDesignation.getUseDisplay());
} }
theCodeCounter.incrementAndGet();
} }
if (!theAdd && theAddedCodes.remove(code)) { if (!theAdd && theAddedCodes.remove(code)) {
removeCodeFromExpansion(theCodeSystem, code, theExpansionComponent); removeCodeFromExpansion(theCodeSystem, code, theExpansionComponent);
theCodeCounter.decrementAndGet();
} }
} }
@ -412,22 +415,32 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
@Override @Override
@Transactional(propagation = Propagation.REQUIRED) @Transactional(propagation = Propagation.REQUIRED)
public ValueSet expandValueSet(ValueSet theValueSetToExpand) { public ValueSet expandValueSet(ValueSet theValueSetToExpand) {
ValueSet.ValueSetExpansionComponent expansionComponent = new ValueSet.ValueSetExpansionComponent(); ValueSet.ValueSetExpansionComponent expansionComponent = new ValueSet.ValueSetExpansionComponent();
expansionComponent.setIdentifier(UUID.randomUUID().toString());
expansionComponent.setTimestamp(new Date());
Set<String> addedCodes = new HashSet<>(); Set<String> addedCodes = new HashSet<>();
AtomicInteger codeCounter = new AtomicInteger(0);
// Handle includes // Handle includes
ourLog.debug("Handling includes");
for (ValueSet.ConceptSetComponent include : theValueSetToExpand.getCompose().getInclude()) { for (ValueSet.ConceptSetComponent include : theValueSetToExpand.getCompose().getInclude()) {
boolean add = true; boolean add = true;
expandValueSetHandleIncludeOrExclude(expansionComponent, addedCodes, include, add); expandValueSetHandleIncludeOrExclude(expansionComponent, addedCodes, include, add, codeCounter);
} }
// Handle excludes // Handle excludes
ourLog.debug("Handling excludes");
for (ValueSet.ConceptSetComponent include : theValueSetToExpand.getCompose().getExclude()) { for (ValueSet.ConceptSetComponent include : theValueSetToExpand.getCompose().getExclude()) {
boolean add = false; boolean add = false;
expandValueSetHandleIncludeOrExclude(expansionComponent, addedCodes, include, add); expandValueSetHandleIncludeOrExclude(expansionComponent, addedCodes, include, add, codeCounter);
} }
expansionComponent.setTotal(codeCounter.get());
ValueSet valueSet = new ValueSet(); ValueSet valueSet = new ValueSet();
valueSet.setCompose(theValueSetToExpand.getCompose());
valueSet.setExpansion(expansionComponent); valueSet.setExpansion(expansionComponent);
return valueSet; return valueSet;
} }
@ -445,10 +458,10 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
return retVal; return retVal;
} }
public void expandValueSetHandleIncludeOrExclude(ValueSet.ValueSetExpansionComponent theExpansionComponent, Set<String> theAddedCodes, ValueSet.ConceptSetComponent include, boolean theAdd) { public void expandValueSetHandleIncludeOrExclude(ValueSet.ValueSetExpansionComponent theExpansionComponent, Set<String> theAddedCodes, ValueSet.ConceptSetComponent theInclude, boolean theAdd, AtomicInteger theCodeCounter) {
String system = include.getSystem(); String system = theInclude.getSystem();
ourLog.info("Starting {} expansion around code system: {}", (theAdd ? "inclusion" : "exclusion"), system);
if (isNotBlank(system)) { if (isNotBlank(system)) {
ourLog.info("Starting expansion around code system: {}", system);
TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(system); TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(system);
if (cs != null) { if (cs != null) {
@ -463,9 +476,9 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
* Filters * Filters
*/ */
if (include.getFilter().size() > 0) { if (theInclude.getFilter().size() > 0) {
for (ValueSet.ConceptSetFilterComponent nextFilter : include.getFilter()) { for (ValueSet.ConceptSetFilterComponent nextFilter : theInclude.getFilter()) {
if (isBlank(nextFilter.getValue()) && nextFilter.getOp() == null && isBlank(nextFilter.getProperty())) { if (isBlank(nextFilter.getValue()) && nextFilter.getOp() == null && isBlank(nextFilter.getProperty())) {
continue; continue;
} }
@ -542,13 +555,13 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
* Include Concepts * Include Concepts
*/ */
List<Term> codes = include List<Term> codes = theInclude
.getConcept() .getConcept()
.stream() .stream()
.filter(Objects::nonNull) .filter(Objects::nonNull)
.map(ValueSet.ConceptReferenceComponent::getCode) .map(ValueSet.ConceptReferenceComponent::getCode)
.filter(StringUtils::isNotBlank) .filter(StringUtils::isNotBlank)
.map(t->new Term("myCode", t)) .map(t -> new Term("myCode", t))
.collect(Collectors.toList()); .collect(Collectors.toList());
if (codes.size() > 0) { if (codes.size() > 0) {
MultiPhraseQuery query = new MultiPhraseQuery(); MultiPhraseQuery query = new MultiPhraseQuery();
@ -564,19 +577,25 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
*/ */
FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, TermConcept.class); FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, TermConcept.class);
jpaQuery.setMaxResults(1000); int maxResult = 50000;
jpaQuery.setMaxResults(maxResult);
StopWatch sw = new StopWatch(); StopWatch sw = new StopWatch();
AtomicInteger count = new AtomicInteger(0);
@SuppressWarnings("unchecked") for (Object next : jpaQuery.getResultList()) {
List<TermConcept> result = jpaQuery.getResultList(); count.incrementAndGet();
TermConcept concept = (TermConcept) next;
ourLog.info("Expansion completed in {}ms", sw.getMillis()); addCodeIfNotAlreadyAdded(system, theExpansionComponent, theAddedCodes, concept, theAdd, theCodeCounter);
for (TermConcept nextConcept : result) {
addCodeIfNotAlreadyAdded(system, theExpansionComponent, theAddedCodes, nextConcept, theAdd);
} }
if (maxResult == count.get()) {
throw new InternalErrorException("Expansion fragment produced too many (>= " + maxResult + ") results");
}
ourLog.info("Expansion for {} produced {} results in {}ms", (theAdd ? "inclusion" : "exclusion"), count, sw.getMillis());
} else { } else {
// No codesystem matching the URL found in the database // No codesystem matching the URL found in the database
@ -585,8 +604,8 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
throw new InvalidRequestException("Unknown code system: " + system); throw new InvalidRequestException("Unknown code system: " + system);
} }
if (include.getConcept().isEmpty() == false) { if (theInclude.getConcept().isEmpty() == false) {
for (ValueSet.ConceptReferenceComponent next : include.getConcept()) { for (ValueSet.ConceptReferenceComponent next : theInclude.getConcept()) {
String nextCode = next.getCode(); String nextCode = next.getCode();
if (isNotBlank(nextCode) && !theAddedCodes.contains(nextCode)) { if (isNotBlank(nextCode) && !theAddedCodes.contains(nextCode)) {
CodeSystem.ConceptDefinitionComponent code = findCode(codeSystemFromContext.getConcept(), nextCode); CodeSystem.ConceptDefinitionComponent code = findCode(codeSystemFromContext.getConcept(), nextCode);

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.term;
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
* You may obtain a copy of the License at * You may obtain a copy of the License at
* *
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* *
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.term;
*/ */
import org.hl7.fhir.instance.hapi.validation.IValidationSupport; import org.hl7.fhir.instance.hapi.validation.IValidationSupport;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.CodeSystem; import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.ConceptMap; import org.hl7.fhir.r4.model.ConceptMap;
@ -80,6 +81,11 @@ public class HapiTerminologySvcDstu2 extends BaseHapiTerminologySvcImpl {
return null; return null;
} }
@Override
public IBaseResource expandValueSet(IBaseResource theValueSetToExpand) {
throw new UnsupportedOperationException();
}
@Override @Override
public List<VersionIndependentConcept> expandValueSet(String theValueSet) { public List<VersionIndependentConcept> expandValueSet(String theValueSet) {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();

View File

@ -166,6 +166,20 @@ public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvcImpl implemen
} }
} }
@Override
public IBaseResource expandValueSet(IBaseResource theInput) {
ValueSet valueSetToExpand = (ValueSet) theInput;
try {
org.hl7.fhir.r4.model.ValueSet valueSetToExpandR4;
valueSetToExpandR4 = VersionConvertor_30_40.convertValueSet(valueSetToExpand);
org.hl7.fhir.r4.model.ValueSet expandedR4 = super.expandValueSet(valueSetToExpandR4);
return VersionConvertor_30_40.convertValueSet(expandedR4);
} catch (FHIRException e) {
throw new InternalErrorException(e);
}
}
@Override @Override
public List<VersionIndependentConcept> expandValueSet(String theValueSet) { public List<VersionIndependentConcept> expandValueSet(String theValueSet) {
ValueSet vs = myValidationSupport.fetchResource(myContext, ValueSet.class, theValueSet); ValueSet vs = myValidationSupport.fetchResource(myContext, ValueSet.class, theValueSet);

View File

@ -39,9 +39,9 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
* You may obtain a copy of the License at * You may obtain a copy of the License at
* *
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* *
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -134,6 +134,13 @@ public class HapiTerminologySvcR4 extends BaseHapiTerminologySvcImpl implements
return expandValueSetAndReturnVersionIndependentConcepts(vs); return expandValueSetAndReturnVersionIndependentConcepts(vs);
} }
@Override
public IBaseResource expandValueSet(IBaseResource theInput) {
ValueSet valueSetToExpand = (ValueSet) theInput;
return super.expandValueSet(valueSetToExpand);
}
@Override @Override
public ValueSetExpansionComponent expandValueSet(FhirContext theContext, ConceptSetComponent theInclude) { public ValueSetExpansionComponent expandValueSet(FhirContext theContext, ConceptSetComponent theInclude) {
ValueSet valueSetToExpand = new ValueSet(); ValueSet valueSetToExpand = new ValueSet();

View File

@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.term;
import ca.uhn.fhir.jpa.entity.*; import ca.uhn.fhir.jpa.entity.*;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.ConceptMap; import org.hl7.fhir.r4.model.ConceptMap;
import org.hl7.fhir.r4.model.ValueSet; import org.hl7.fhir.r4.model.ValueSet;
@ -35,6 +36,11 @@ public interface IHapiTerminologySvc {
ValueSet expandValueSet(ValueSet theValueSetToExpand); ValueSet expandValueSet(ValueSet theValueSetToExpand);
/**
* Version independent
*/
IBaseResource expandValueSet(IBaseResource theValueSetToExpand);
List<VersionIndependentConcept> expandValueSet(String theValueSet); List<VersionIndependentConcept> expandValueSet(String theValueSet);
TermConcept findCode(String theCodeSystem, String theCode); TermConcept findCode(String theCodeSystem, String theCode);

View File

@ -28,6 +28,48 @@ public class FhirResourceDaoDstu3ValidateTest extends BaseJpaDstu3Test {
TestUtil.clearAllStaticFieldsForUnitTest(); TestUtil.clearAllStaticFieldsForUnitTest();
} }
@Test
public void testValidateChangedQuestionnaire() {
Questionnaire q = new Questionnaire();
q.setId("QUEST");
q.addItem().setLinkId("A").setType(Questionnaire.QuestionnaireItemType.STRING).setRequired(true);
myQuestionnaireDao.update(q);
try {
QuestionnaireResponse qr = new QuestionnaireResponse();
qr.setStatus(QuestionnaireResponse.QuestionnaireResponseStatus.COMPLETED);
qr.getQuestionnaire().setReference("Questionnaire/QUEST");
qr.addItem().setLinkId("A").addAnswer().setValue(new StringType("AAA"));
MethodOutcome results = myQuestionnaireResponseDao.validate(qr, null, null, null, null, null, null);
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(results.getOperationOutcome()));
} catch (PreconditionFailedException e) {
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(e.getOperationOutcome()));
fail(e.toString());
}
q = new Questionnaire();
q.setId("QUEST");
q.addItem().setLinkId("B").setType(Questionnaire.QuestionnaireItemType.STRING).setRequired(true);
myQuestionnaireDao.update(q);
try {
QuestionnaireResponse qr = new QuestionnaireResponse();
qr.setStatus(QuestionnaireResponse.QuestionnaireResponseStatus.COMPLETED);
qr.getQuestionnaire().setReference("Questionnaire/QUEST");
qr.addItem().setLinkId("A").addAnswer().setValue(new StringType("AAA"));
MethodOutcome results = myQuestionnaireResponseDao.validate(qr, null, null, null, null, null, null);
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(results.getOperationOutcome()));
fail();
} catch (PreconditionFailedException e) {
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(e.getOperationOutcome()));
// good
}
}
@Test @Test
public void testValidateStructureDefinition() throws Exception { public void testValidateStructureDefinition() throws Exception {
String input = IOUtils.toString(getClass().getResourceAsStream("/sd-david-dhtest7.json"), StandardCharsets.UTF_8); String input = IOUtils.toString(getClass().getResourceAsStream("/sd-david-dhtest7.json"), StandardCharsets.UTF_8);

View File

@ -32,6 +32,8 @@ import org.springframework.transaction.annotation.Transactional;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import static ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoDstu3TerminologyTest.URL_MY_CODE_SYSTEM; import static ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoDstu3TerminologyTest.URL_MY_CODE_SYSTEM;
import static ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoDstu3TerminologyTest.URL_MY_VALUE_SET; import static ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoDstu3TerminologyTest.URL_MY_VALUE_SET;
@ -96,6 +98,39 @@ public class ResourceProviderDstu3ValueSetTest extends BaseResourceProviderDstu3
createLocalVs(codeSystem); createLocalVs(codeSystem);
} }
public void createLoincSystemWithSomeCodes() {
runInTransaction(() -> {
CodeSystem codeSystem = new CodeSystem();
codeSystem.setUrl(CS_URL);
codeSystem.setContent(CodeSystemContentMode.NOTPRESENT);
IIdType id = myCodeSystemDao.create(codeSystem, mySrd).getId().toUnqualified();
ResourceTable table = myResourceTableDao.findById(id.getIdPartAsLong()).orElseThrow(IllegalArgumentException::new);
TermCodeSystemVersion cs = new TermCodeSystemVersion();
cs.setResource(table);
TermConcept code;
code = new TermConcept(cs, "50015-7");
code.addPropertyString("SYSTEM", "Bld/Bone mar^Donor");
cs.getConcepts().add(code);
code = new TermConcept(cs, "43343-3");
code.addPropertyString("SYSTEM", "Ser");
code.addPropertyString("HELLO", "12345-1");
cs.getConcepts().add(code);
code = new TermConcept(cs, "43343-4");
code.addPropertyString("SYSTEM", "Ser");
code.addPropertyString("HELLO", "12345-2");
cs.getConcepts().add(code);
myTermSvc.storeNewCodeSystemVersion(table.getId(), CS_URL, "SYSTEM NAME", cs);
});
}
private void createLocalVs(CodeSystem codeSystem) { private void createLocalVs(CodeSystem codeSystem) {
myLocalVs = new ValueSet(); myLocalVs = new ValueSet();
myLocalVs.setUrl(URL_MY_VALUE_SET); myLocalVs.setUrl(URL_MY_VALUE_SET);
@ -132,6 +167,71 @@ public class ResourceProviderDstu3ValueSetTest extends BaseResourceProviderDstu3
myLocalValueSetId = myValueSetDao.create(myLocalVs, mySrd).getId().toUnqualifiedVersionless(); myLocalValueSetId = myValueSetDao.create(myLocalVs, mySrd).getId().toUnqualifiedVersionless();
} }
@Test
public void testExpandValueSetPropertySearchWithRegexExcludeUsingOr() {
createLoincSystemWithSomeCodes();
List<String> codes;
ValueSet vs;
ValueSet outcome;
ValueSet.ConceptSetComponent exclude;
// Include
vs = new ValueSet();
vs.getCompose()
.addInclude()
.setSystem(CS_URL);
exclude = vs.getCompose().addExclude();
exclude.setSystem(CS_URL);
exclude
.addFilter()
.setProperty("HELLO")
.setOp(ValueSet.FilterOperator.REGEX)
.setValue("12345-1|12345-2");
IIdType vsId = ourClient.create().resource(vs).execute().getId();
outcome = (ValueSet) ourClient.operation().onInstance(vsId).named("expand").withNoParameters(Parameters.class).execute().getParameter().get(0).getResource();
codes = toCodesContains(outcome.getExpansion().getContains());
ourLog.info("** Got codes: {}", codes);
assertThat(codes, containsInAnyOrder("50015-7"));
assertEquals(1, outcome.getCompose().getInclude().size());
assertEquals(1, outcome.getCompose().getExclude().size());
assertEquals(1, outcome.getExpansion().getTotal());
}
@Test
public void testExpandValueSetPropertySearchWithRegexExcludeNoFilter() {
createLoincSystemWithSomeCodes();
List<String> codes;
ValueSet vs;
ValueSet outcome;
ValueSet.ConceptSetComponent exclude;
// Include
vs = new ValueSet();
vs.getCompose()
.addInclude()
.setSystem(CS_URL);
exclude = vs.getCompose().addExclude();
exclude.setSystem(CS_URL);
IIdType vsId = ourClient.create().resource(vs).execute().getId();
outcome = (ValueSet) ourClient.operation().onInstance(vsId).named("expand").withNoParameters(Parameters.class).execute().getParameter().get(0).getResource();
codes = toCodesContains(outcome.getExpansion().getContains());
assertThat(codes, empty());
}
@Test @Test
public void testExpandById() throws IOException { public void testExpandById() throws IOException {
//@formatter:off //@formatter:off
@ -611,4 +711,15 @@ public class ResourceProviderDstu3ValueSetTest extends BaseResourceProviderDstu3
return codeSystem; return codeSystem;
} }
public static List<String> toCodesContains(List<ValueSet.ValueSetExpansionContainsComponent> theContains) {
List<String> retVal = new ArrayList<>();
for (ValueSet.ValueSetExpansionContainsComponent next : theContains) {
retVal.add(next.getCode());
}
return retVal;
}
} }

View File

@ -591,7 +591,7 @@ public class TerminologySvcImplDstu3Test extends BaseJpaDstu3Test {
} }
} }
private List<String> toCodesContains(List<ValueSet.ValueSetExpansionContainsComponent> theContains) { public static List<String> toCodesContains(List<ValueSet.ValueSetExpansionContainsComponent> theContains) {
List<String> retVal = new ArrayList<>(); List<String> retVal = new ArrayList<>();
for (ValueSet.ValueSetExpansionContainsComponent next : theContains) { for (ValueSet.ValueSetExpansionContainsComponent next : theContains) {

View File

@ -168,6 +168,14 @@
</dependency> </dependency>
--> -->
<!-- Used by the validator -->
<dependency>
<groupId>com.github.ben-manes.caffeine</groupId>
<artifactId>caffeine</artifactId>
<optional>true</optional>
</dependency>
<!-- Testing --> <!-- Testing -->
<dependency> <dependency>
<groupId>org.xmlunit</groupId> <groupId>org.xmlunit</groupId>

View File

@ -4,6 +4,8 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.util.CoverageIgnore; import ca.uhn.fhir.util.CoverageIgnore;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.hl7.fhir.dstu3.context.IWorkerContext; import org.hl7.fhir.dstu3.context.IWorkerContext;
import org.hl7.fhir.dstu3.formats.IParser; import org.hl7.fhir.dstu3.formats.IParser;
@ -22,13 +24,15 @@ import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.utilities.validation.ValidationMessage.IssueSeverity; import org.hl7.fhir.utilities.validation.ValidationMessage.IssueSeverity;
import java.util.*; import java.util.*;
import java.util.concurrent.TimeUnit;
import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank;
public final class HapiWorkerContext implements IWorkerContext, ValueSetExpander, ValueSetExpanderFactory { public final class HapiWorkerContext implements IWorkerContext, ValueSetExpander, ValueSetExpanderFactory {
private final FhirContext myCtx; private final FhirContext myCtx;
private Map<String, Resource> myFetchedResourceCache = new HashMap<String, Resource>(); private final Cache<String, Resource> myFetchedResourceCache;
private IValidationSupport myValidationSupport; private IValidationSupport myValidationSupport;
private ExpansionProfile myExpansionProfile; private ExpansionProfile myExpansionProfile;
@ -37,6 +41,7 @@ public final class HapiWorkerContext implements IWorkerContext, ValueSetExpander
Validate.notNull(theValidationSupport, "theValidationSupport must not be null"); Validate.notNull(theValidationSupport, "theValidationSupport must not be null");
myCtx = theCtx; myCtx = theCtx;
myValidationSupport = theValidationSupport; myValidationSupport = theValidationSupport;
myFetchedResourceCache = Caffeine.newBuilder().expireAfterWrite(10, TimeUnit.SECONDS).build();
} }
@Override @Override
@ -92,13 +97,9 @@ public final class HapiWorkerContext implements IWorkerContext, ValueSetExpander
return null; return null;
} else { } else {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
T retVal = (T) myFetchedResourceCache.get(theUri); T retVal = (T) myFetchedResourceCache.get(theUri, t->{
if (retVal == null) { return myValidationSupport.fetchResource(myCtx, theClass, theUri);
retVal = myValidationSupport.fetchResource(myCtx, theClass, theUri); });
if (retVal != null) {
myFetchedResourceCache.put(theUri, retVal);
}
}
return retVal; return retVal;
} }
} }

View File

@ -50,6 +50,13 @@
<optional>true</optional> <optional>true</optional>
</dependency> </dependency>
<!-- Used by the validator -->
<dependency>
<groupId>com.github.ben-manes.caffeine</groupId>
<artifactId>caffeine</artifactId>
<optional>true</optional>
</dependency>
<!-- <!--
Test dependencies on other optional parts of HAPI Test dependencies on other optional parts of HAPI
--> -->

View File

@ -1,10 +1,11 @@
package org.hl7.fhir.r4.hapi.ctx; package org.hl7.fhir.r4.hapi.ctx;
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.util.CoverageIgnore; import ca.uhn.fhir.util.CoverageIgnore;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.fhir.ucum.UcumService; import org.fhir.ucum.UcumService;
import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.exceptions.FHIRException;
@ -28,13 +29,14 @@ import org.hl7.fhir.utilities.TranslationServices;
import org.hl7.fhir.utilities.validation.ValidationMessage.IssueSeverity; import org.hl7.fhir.utilities.validation.ValidationMessage.IssueSeverity;
import java.util.*; import java.util.*;
import java.util.concurrent.TimeUnit;
import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank;
public final class HapiWorkerContext implements IWorkerContext, ValueSetExpander, ValueSetExpanderFactory { public final class HapiWorkerContext implements IWorkerContext, ValueSetExpander, ValueSetExpanderFactory {
private final FhirContext myCtx; private final FhirContext myCtx;
private Map<String, Resource> myFetchedResourceCache = new HashMap<String, Resource>(); private final Cache<String, Resource> myFetchedResourceCache;
private IValidationSupport myValidationSupport; private IValidationSupport myValidationSupport;
private ExpansionProfile myExpansionProfile; private ExpansionProfile myExpansionProfile;
@ -43,6 +45,7 @@ public final class HapiWorkerContext implements IWorkerContext, ValueSetExpander
Validate.notNull(theValidationSupport, "theValidationSupport must not be null"); Validate.notNull(theValidationSupport, "theValidationSupport must not be null");
myCtx = theCtx; myCtx = theCtx;
myValidationSupport = theValidationSupport; myValidationSupport = theValidationSupport;
myFetchedResourceCache = Caffeine.newBuilder().expireAfterWrite(10, TimeUnit.SECONDS).build();
} }
@Override @Override
@ -206,9 +209,9 @@ public final class HapiWorkerContext implements IWorkerContext, ValueSetExpander
ValueSetExpansionOutcome expandedValueSet = null; ValueSetExpansionOutcome expandedValueSet = null;
/* /*
* The following valueset is a special case, since the BCP codesystem is very difficult to expand * The following valueset is a special case, since the BCP codesystem is very difficult to expand
*/ */
if (theVs != null && "http://hl7.org/fhir/ValueSet/languages".equals(theVs.getId())) { if (theVs != null && "http://hl7.org/fhir/ValueSet/languages".equals(theVs.getId())) {
ValueSet expansion = new ValueSet(); ValueSet expansion = new ValueSet();
for (ConceptSetComponent nextInclude : theVs.getCompose().getInclude()) { for (ConceptSetComponent nextInclude : theVs.getCompose().getInclude()) {
@ -338,13 +341,9 @@ public final class HapiWorkerContext implements IWorkerContext, ValueSetExpander
return null; return null;
} else { } else {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
T retVal = (T) myFetchedResourceCache.get(theUri); T retVal = (T) myFetchedResourceCache.get(theUri, t -> {
if (retVal == null) { return myValidationSupport.fetchResource(myCtx, theClass, theUri);
retVal = myValidationSupport.fetchResource(myCtx, theClass, theUri); });
if (retVal != null) {
myFetchedResourceCache.put(theUri, (Resource) retVal);
}
}
return retVal; return retVal;
} }
} }