Merge branch 'master' into jpa_migration

This commit is contained in:
James Agnew 2018-09-02 06:05:11 +08:00
commit 0f7c4219cb
34 changed files with 678 additions and 245 deletions

View File

@ -21,12 +21,7 @@ package ca.uhn.fhir.rest.api;
*/ */
import java.nio.charset.Charset; import java.nio.charset.Charset;
import java.util.Arrays; import java.util.*;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
public class Constants { public class Constants {
@ -197,6 +192,10 @@ public class Constants {
public static final String POWERED_BY_HEADER = "X-Powered-By"; public static final String POWERED_BY_HEADER = "X-Powered-By";
public static final Charset CHARSET_US_ASCII; public static final Charset CHARSET_US_ASCII;
public static final String PARAM_PAGEID = "_pageId"; public static final String PARAM_PAGEID = "_pageId";
/**
* This is provided for testing only! Use with caution as this property may change.
*/
public static final String TEST_SYSTEM_PROP_VALIDATION_RESOURCE_CACHES_MS = "TEST_SYSTEM_PROP_VALIDATION_RESOURCE_CACHES_MS";
static { static {
CHARSET_UTF8 = Charset.forName(CHARSET_NAME_UTF8); CHARSET_UTF8 = Charset.forName(CHARSET_NAME_UTF8);

View File

@ -87,7 +87,9 @@ public class FhirResourceDaoConceptMapDstu3 extends FhirResourceDaoDstu3<Concept
if (targetsToReturn.add(target)) { if (targetsToReturn.add(target)) {
translationMatch = new TranslationMatch(); translationMatch = new TranslationMatch();
translationMatch.setEquivalence(new CodeType(target.getEquivalence().toCode())); if (target.getEquivalence() != null) {
translationMatch.setEquivalence(new CodeType(target.getEquivalence().toCode()));
}
translationMatch.setConcept( translationMatch.setConcept(
new Coding() new Coding()

View File

@ -73,8 +73,10 @@ public class FhirResourceDaoDstu3<T extends IAnyResource> extends BaseHapiFhirRe
@Override @Override
public MethodOutcome validate(T theResource, IIdType theId, String theRawResource, EncodingEnum theEncoding, ValidationModeEnum theMode, String theProfile, RequestDetails theRequestDetails) { public MethodOutcome validate(T theResource, IIdType theId, String theRawResource, EncodingEnum theEncoding, ValidationModeEnum theMode, String theProfile, RequestDetails theRequestDetails) {
ActionRequestDetails requestDetails = new ActionRequestDetails(theRequestDetails, theResource, null, theId); if (theRequestDetails != null) {
notifyInterceptors(RestOperationTypeEnum.VALIDATE, requestDetails); ActionRequestDetails requestDetails = new ActionRequestDetails(theRequestDetails, theResource, null, theId);
notifyInterceptors(RestOperationTypeEnum.VALIDATE, requestDetails);
}
if (theMode == ValidationModeEnum.DELETE) { if (theMode == ValidationModeEnum.DELETE) {
if (theId == null || theId.hasIdPart() == false) { if (theId == null || theId.hasIdPart() == false) {

View File

@ -20,19 +20,22 @@ package ca.uhn.fhir.jpa.dao.dstu3;
* #L% * #L%
*/ */
import static org.apache.commons.lang3.StringUtils.isBlank; import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem;
import static org.apache.commons.lang3.StringUtils.isNotBlank; import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem.LookupCodeResult;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet;
import java.util.Collections; import ca.uhn.fhir.jpa.util.LogicUtil;
import java.util.List; import ca.uhn.fhir.rest.api.server.RequestDetails;
import java.util.stream.Collectors; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.util.ElementUtil;
import org.apache.commons.codec.binary.StringUtils; import org.apache.commons.codec.binary.StringUtils;
import org.hl7.fhir.dstu3.hapi.ctx.HapiWorkerContext; import org.hl7.fhir.dstu3.hapi.ctx.HapiWorkerContext;
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport; import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
import org.hl7.fhir.dstu3.model.*; import org.hl7.fhir.dstu3.model.*;
import org.hl7.fhir.dstu3.model.Enumerations.PublicationStatus; import org.hl7.fhir.dstu3.model.Enumerations.PublicationStatus;
import org.hl7.fhir.dstu3.model.ValueSet.*; import org.hl7.fhir.dstu3.model.ValueSet.ConceptSetComponent;
import org.hl7.fhir.dstu3.model.ValueSet.ConceptSetFilterComponent;
import org.hl7.fhir.dstu3.model.ValueSet.FilterOperator;
import org.hl7.fhir.dstu3.model.ValueSet.ValueSetExpansionContainsComponent;
import org.hl7.fhir.dstu3.terminologies.ValueSetExpander.ValueSetExpansionOutcome; import org.hl7.fhir.dstu3.terminologies.ValueSetExpander.ValueSetExpansionOutcome;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.hl7.fhir.instance.model.api.IPrimitiveType;
@ -41,20 +44,18 @@ import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem; import java.util.Collections;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoCodeSystem.LookupCodeResult; import java.util.List;
import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet;
import ca.uhn.fhir.jpa.util.LogicUtil; import static org.apache.commons.lang3.StringUtils.isBlank;
import ca.uhn.fhir.rest.api.server.RequestDetails; import static org.apache.commons.lang3.StringUtils.isNotBlank;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.util.ElementUtil;
public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3<ValueSet> implements IFhirResourceDaoValueSet<ValueSet, Coding, CodeableConcept> { public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3<ValueSet> implements IFhirResourceDaoValueSet<ValueSet, Coding, CodeableConcept> {
private static final Logger ourLog = LoggerFactory.getLogger(FhirResourceDaoValueSetDstu3.class);
@Autowired @Autowired
@Qualifier("myJpaValidationSupportChainDstu3") @Qualifier("myJpaValidationSupportChainDstu3")
private IValidationSupport myValidationSupport; private IValidationSupport myValidationSupport;
@Autowired @Autowired
private IFhirResourceDaoCodeSystem<CodeSystem, Coding, CodeableConcept> myCodeSystemDao; private IFhirResourceDaoCodeSystem<CodeSystem, Coding, CodeableConcept> myCodeSystemDao;
@ -69,21 +70,32 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3<ValueSet>
validateIncludes("include", theSource.getCompose().getInclude()); validateIncludes("include", theSource.getCompose().getInclude());
validateIncludes("exclude", theSource.getCompose().getExclude()); validateIncludes("exclude", theSource.getCompose().getExclude());
/*
* If all of the code systems are supported by the HAPI FHIR terminology service, let's
* use that as it's more efficient.
*/
boolean allSystemsAreSuppportedByTerminologyService = true;
for (ConceptSetComponent next : theSource.getCompose().getInclude()) {
if (!myTerminologySvc.supportsSystem(next.getSystem())) {
allSystemsAreSuppportedByTerminologyService = false;
}
}
for (ConceptSetComponent next : theSource.getCompose().getExclude()) {
if (!myTerminologySvc.supportsSystem(next.getSystem())) {
allSystemsAreSuppportedByTerminologyService = false;
}
}
if (allSystemsAreSuppportedByTerminologyService) {
return (ValueSet) myTerminologySvc.expandValueSet(theSource);
}
HapiWorkerContext workerContext = new HapiWorkerContext(getContext(), myValidationSupport); HapiWorkerContext workerContext = new HapiWorkerContext(getContext(), myValidationSupport);
ValueSetExpansionOutcome outcome = workerContext.expand(theSource, null); ValueSetExpansionOutcome outcome = workerContext.expand(theSource, null);
ValueSet retVal = outcome.getValueset(); ValueSet retVal = outcome.getValueset();
retVal.setStatus(PublicationStatus.ACTIVE); retVal.setStatus(PublicationStatus.ACTIVE);
return retVal; return retVal;
// ValueSetExpansionComponent expansion = outcome.getValueset().getExpansion();
//
// ValueSet retVal = new ValueSet();
// retVal.getMeta().setLastUpdated(new Date());
// retVal.setExpansion(expansion);
// return retVal;
} }
private void validateIncludes(String name, List<ConceptSetComponent> listToValidate) { private void validateIncludes(String name, List<ConceptSetComponent> listToValidate) {
@ -185,8 +197,8 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3<ValueSet>
@Override @Override
public ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult validateCode(IPrimitiveType<String> theValueSetIdentifier, IIdType theId, IPrimitiveType<String> theCode, public ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult validateCode(IPrimitiveType<String> theValueSetIdentifier, IIdType theId, IPrimitiveType<String> theCode,
IPrimitiveType<String> theSystem, IPrimitiveType<String> theDisplay, Coding theCoding, IPrimitiveType<String> theSystem, IPrimitiveType<String> theDisplay, Coding theCoding,
CodeableConcept theCodeableConcept, RequestDetails theRequestDetails) { CodeableConcept theCodeableConcept, RequestDetails theRequestDetails) {
List<IIdType> valueSetIds = Collections.emptyList(); List<IIdType> valueSetIds = Collections.emptyList();
@ -242,15 +254,12 @@ public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3<ValueSet>
} }
private static final Logger ourLog = LoggerFactory.getLogger(FhirResourceDaoValueSetDstu3.class);
private String toStringOrNull(IPrimitiveType<String> thePrimitive) { private String toStringOrNull(IPrimitiveType<String> thePrimitive) {
return thePrimitive != null ? thePrimitive.getValue() : null; return thePrimitive != null ? thePrimitive.getValue() : null;
} }
private ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult validateCodeIsInContains(List<ValueSetExpansionContainsComponent> contains, String theSystem, String theCode, private ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult validateCodeIsInContains(List<ValueSetExpansionContainsComponent> contains, String theSystem, String theCode,
Coding theCoding, CodeableConcept theCodeableConcept) { Coding theCoding, CodeableConcept theCodeableConcept) {
for (ValueSetExpansionContainsComponent nextCode : contains) { for (ValueSetExpansionContainsComponent nextCode : contains) {
ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult result = validateCodeIsInContains(nextCode.getContains(), theSystem, theCode, theCoding, theCodeableConcept); ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult result = validateCodeIsInContains(nextCode.getContains(), theSystem, theCode, theCoding, theCodeableConcept);
if (result != null) { if (result != null) {

View File

@ -83,7 +83,9 @@ public class FhirResourceDaoConceptMapR4 extends FhirResourceDaoR4<ConceptMap> i
if (targetsToReturn.add(target)) { if (targetsToReturn.add(target)) {
translationMatch = new TranslationMatch(); translationMatch = new TranslationMatch();
translationMatch.setEquivalence(new CodeType(target.getEquivalence().toCode())); if (target.getEquivalence() != null) {
translationMatch.setEquivalence(new CodeType(target.getEquivalence().toCode()));
}
translationMatch.setConcept( translationMatch.setConcept(
new Coding() new Coding()

View File

@ -63,8 +63,25 @@ public class FhirResourceDaoValueSetR4 extends FhirResourceDaoR4<ValueSet> imple
private ValueSet doExpand(ValueSet theSource) { private ValueSet doExpand(ValueSet theSource) {
validateIncludes("include", theSource.getCompose().getInclude()); /*
validateIncludes("exclude", theSource.getCompose().getExclude()); * If all of the code systems are supported by the HAPI FHIR terminology service, let's
* use that as it's more efficient.
*/
boolean allSystemsAreSuppportedByTerminologyService = true;
for (ConceptSetComponent next : theSource.getCompose().getInclude()) {
if (!isBlank(next.getSystem()) && !myTerminologySvc.supportsSystem(next.getSystem())) {
allSystemsAreSuppportedByTerminologyService = false;
}
}
for (ConceptSetComponent next : theSource.getCompose().getExclude()) {
if (!isBlank(next.getSystem()) && !myTerminologySvc.supportsSystem(next.getSystem())) {
allSystemsAreSuppportedByTerminologyService = false;
}
}
if (allSystemsAreSuppportedByTerminologyService) {
return myTerminologySvc.expandValueSet(theSource);
}
HapiWorkerContext workerContext = new HapiWorkerContext(getContext(), myValidationSupport); HapiWorkerContext workerContext = new HapiWorkerContext(getContext(), myValidationSupport);

View File

@ -32,7 +32,7 @@ import javax.persistence.*;
}) })
public class ResourceIndexedCompositeStringUnique implements Comparable<ResourceIndexedCompositeStringUnique> { public class ResourceIndexedCompositeStringUnique implements Comparable<ResourceIndexedCompositeStringUnique> {
public static final int MAX_STRING_LENGTH = 150; public static final int MAX_STRING_LENGTH = 200;
public static final String IDX_IDXCMPSTRUNIQ_STRING = "IDX_IDXCMPSTRUNIQ_STRING"; public static final String IDX_IDXCMPSTRUNIQ_STRING = "IDX_IDXCMPSTRUNIQ_STRING";
public static final String IDX_IDXCMPSTRUNIQ_RESOURCE = "IDX_IDXCMPSTRUNIQ_RESOURCE"; public static final String IDX_IDXCMPSTRUNIQ_RESOURCE = "IDX_IDXCMPSTRUNIQ_RESOURCE";

View File

@ -49,6 +49,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
@Index(name = "IDX_CONCEPT_UPDATED", columnList = "CONCEPT_UPDATED") @Index(name = "IDX_CONCEPT_UPDATED", columnList = "CONCEPT_UPDATED")
}) })
public class TermConcept implements Serializable { public class TermConcept implements Serializable {
public static final int CODE_LENGTH = 500;
protected static final int MAX_DESC_LENGTH = 400; protected static final int MAX_DESC_LENGTH = 400;
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TermConcept.class); private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TermConcept.class);
@ -57,7 +58,7 @@ public class TermConcept implements Serializable {
@OneToMany(fetch = FetchType.LAZY, mappedBy = "myParent", cascade = {}) @OneToMany(fetch = FetchType.LAZY, mappedBy = "myParent", cascade = {})
private Collection<TermConceptParentChildLink> myChildren; private Collection<TermConceptParentChildLink> myChildren;
@Column(name = "CODE", length = 100, nullable = false) @Column(name = "CODE", length = CODE_LENGTH, nullable = false)
@Fields({@Field(name = "myCode", index = org.hibernate.search.annotations.Index.YES, store = Store.YES, analyze = Analyze.YES, analyzer = @Analyzer(definition = "exactAnalyzer")),}) @Fields({@Field(name = "myCode", index = org.hibernate.search.annotations.Index.YES, store = Store.YES, analyze = Analyze.YES, analyzer = @Analyzer(definition = "exactAnalyzer")),})
private String myCode; private String myCode;
@Temporal(TemporalType.TIMESTAMP) @Temporal(TemporalType.TIMESTAMP)

View File

@ -45,7 +45,7 @@ public class TermConceptMapGroupElement implements Serializable {
@JoinColumn(name = "CONCEPT_MAP_GROUP_PID", nullable = false, referencedColumnName = "PID", foreignKey=@ForeignKey(name="FK_TCMGELEMENT_GROUP")) @JoinColumn(name = "CONCEPT_MAP_GROUP_PID", nullable = false, referencedColumnName = "PID", foreignKey=@ForeignKey(name="FK_TCMGELEMENT_GROUP"))
private TermConceptMapGroup myConceptMapGroup; private TermConceptMapGroup myConceptMapGroup;
@Column(name = "SOURCE_CODE", nullable = false, length = 100) @Column(name = "SOURCE_CODE", nullable = false, length = TermConcept.CODE_LENGTH)
private String myCode; private String myCode;
@Column(name = "SOURCE_DISPLAY", length = TermConcept.MAX_DESC_LENGTH) @Column(name = "SOURCE_DISPLAY", length = TermConcept.MAX_DESC_LENGTH)

View File

@ -44,7 +44,7 @@ public class TermConceptMapGroupElementTarget implements Serializable {
@JoinColumn(name = "CONCEPT_MAP_GRP_ELM_PID", nullable = false, referencedColumnName = "PID", foreignKey=@ForeignKey(name="FK_TCMGETARGET_ELEMENT")) @JoinColumn(name = "CONCEPT_MAP_GRP_ELM_PID", nullable = false, referencedColumnName = "PID", foreignKey=@ForeignKey(name="FK_TCMGETARGET_ELEMENT"))
private TermConceptMapGroupElement myConceptMapGroupElement; private TermConceptMapGroupElement myConceptMapGroupElement;
@Column(name = "TARGET_CODE", nullable = false, length = 50) @Column(name = "TARGET_CODE", nullable = false, length = TermConcept.CODE_LENGTH)
private String myCode; private String myCode;
@Column(name = "TARGET_DISPLAY", length = TermConcept.MAX_DESC_LENGTH) @Column(name = "TARGET_DISPLAY", length = TermConcept.MAX_DESC_LENGTH)

View File

@ -52,14 +52,9 @@ import org.hibernate.search.jpa.FullTextEntityManager;
import org.hibernate.search.jpa.FullTextQuery; import org.hibernate.search.jpa.FullTextQuery;
import org.hibernate.search.query.dsl.BooleanJunction; import org.hibernate.search.query.dsl.BooleanJunction;
import org.hibernate.search.query.dsl.QueryBuilder; import org.hibernate.search.query.dsl.QueryBuilder;
import org.hibernate.search.query.dsl.TermMatchingContext;
import org.hibernate.search.query.dsl.TermTermination;
import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.CodeSystem; import org.hl7.fhir.r4.model.*;
import org.hl7.fhir.r4.model.Coding;
import org.hl7.fhir.r4.model.ConceptMap;
import org.hl7.fhir.r4.model.ValueSet;
import org.springframework.beans.BeansException; import org.springframework.beans.BeansException;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContext;
@ -84,6 +79,7 @@ import javax.persistence.TypedQuery;
import javax.persistence.criteria.*; import javax.persistence.criteria.*;
import java.util.*; import java.util.*;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Supplier; import java.util.function.Supplier;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -140,14 +136,16 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
private ApplicationContext myApplicationContext; private ApplicationContext myApplicationContext;
/** /**
* @param theAdd If true, add the code. If false, remove the code. * @param theAdd If true, add the code. If false, remove the code.
* @param theCodeCounter
*/ */
private void addCodeIfNotAlreadyAdded(String theCodeSystem, ValueSet.ValueSetExpansionComponent theExpansionComponent, Set<String> theAddedCodes, TermConcept theConcept, boolean theAdd) { private void addCodeIfNotAlreadyAdded(ValueSet.ValueSetExpansionComponent theExpansionComponent, Set<String> theAddedCodes, TermConcept theConcept, boolean theAdd, AtomicInteger theCodeCounter) {
String code = theConcept.getCode(); String code = theConcept.getCode();
if (theAdd && theAddedCodes.add(code)) { if (theAdd && theAddedCodes.add(code)) {
String codeSystem = theConcept.getCodeSystemVersion().getCodeSystem().getCodeSystemUri();
ValueSet.ValueSetExpansionContainsComponent contains = theExpansionComponent.addContains(); ValueSet.ValueSetExpansionContainsComponent contains = theExpansionComponent.addContains();
contains.setCode(code); contains.setCode(code);
contains.setSystem(theCodeSystem); contains.setSystem(codeSystem);
contains.setDisplay(theConcept.getDisplay()); contains.setDisplay(theConcept.getDisplay());
for (TermConceptDesignation nextDesignation : theConcept.getDesignations()) { for (TermConceptDesignation nextDesignation : theConcept.getDesignations()) {
contains contains
@ -158,10 +156,14 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
.setCode(nextDesignation.getUseCode()) .setCode(nextDesignation.getUseCode())
.setDisplay(nextDesignation.getUseDisplay()); .setDisplay(nextDesignation.getUseDisplay());
} }
theCodeCounter.incrementAndGet();
} }
if (!theAdd && theAddedCodes.remove(code)) { if (!theAdd && theAddedCodes.remove(code)) {
removeCodeFromExpansion(theCodeSystem, code, theExpansionComponent); String codeSystem = theConcept.getCodeSystemVersion().getCodeSystem().getCodeSystemUri();
removeCodeFromExpansion(codeSystem, code, theExpansionComponent);
theCodeCounter.decrementAndGet();
} }
} }
@ -412,22 +414,33 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
@Override @Override
@Transactional(propagation = Propagation.REQUIRED) @Transactional(propagation = Propagation.REQUIRED)
public ValueSet expandValueSet(ValueSet theValueSetToExpand) { public ValueSet expandValueSet(ValueSet theValueSetToExpand) {
ValueSet.ValueSetExpansionComponent expansionComponent = new ValueSet.ValueSetExpansionComponent(); ValueSet.ValueSetExpansionComponent expansionComponent = new ValueSet.ValueSetExpansionComponent();
expansionComponent.setIdentifier(UUID.randomUUID().toString());
expansionComponent.setTimestamp(new Date());
Set<String> addedCodes = new HashSet<>(); Set<String> addedCodes = new HashSet<>();
AtomicInteger codeCounter = new AtomicInteger(0);
// Handle includes // Handle includes
ourLog.debug("Handling includes");
for (ValueSet.ConceptSetComponent include : theValueSetToExpand.getCompose().getInclude()) { for (ValueSet.ConceptSetComponent include : theValueSetToExpand.getCompose().getInclude()) {
boolean add = true; boolean add = true;
expandValueSetHandleIncludeOrExclude(expansionComponent, addedCodes, include, add); expandValueSetHandleIncludeOrExclude(expansionComponent, addedCodes, include, add, codeCounter);
} }
// Handle excludes // Handle excludes
ourLog.debug("Handling excludes");
for (ValueSet.ConceptSetComponent include : theValueSetToExpand.getCompose().getExclude()) { for (ValueSet.ConceptSetComponent include : theValueSetToExpand.getCompose().getExclude()) {
boolean add = false; boolean add = false;
expandValueSetHandleIncludeOrExclude(expansionComponent, addedCodes, include, add); expandValueSetHandleIncludeOrExclude(expansionComponent, addedCodes, include, add, codeCounter);
} }
expansionComponent.setTotal(codeCounter.get());
ValueSet valueSet = new ValueSet(); ValueSet valueSet = new ValueSet();
valueSet.setStatus(Enumerations.PublicationStatus.ACTIVE);
valueSet.setCompose(theValueSetToExpand.getCompose());
valueSet.setExpansion(expansionComponent); valueSet.setExpansion(expansionComponent);
return valueSet; return valueSet;
} }
@ -445,10 +458,13 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
return retVal; return retVal;
} }
public void expandValueSetHandleIncludeOrExclude(ValueSet.ValueSetExpansionComponent theExpansionComponent, Set<String> theAddedCodes, ValueSet.ConceptSetComponent include, boolean theAdd) { public void expandValueSetHandleIncludeOrExclude(ValueSet.ValueSetExpansionComponent theExpansionComponent, Set<String> theAddedCodes, ValueSet.ConceptSetComponent theInclude, boolean theAdd, AtomicInteger theCodeCounter) {
String system = include.getSystem(); String system = theInclude.getSystem();
if (isNotBlank(system)) { boolean hasSystem = isNotBlank(system);
ourLog.info("Starting expansion around code system: {}", system); boolean hasValueSet = theInclude.getValueSet().size() > 0;
if (hasSystem) {
ourLog.info("Starting {} expansion around code system: {}", (theAdd ? "inclusion" : "exclusion"), system);
TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(system); TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(system);
if (cs != null) { if (cs != null) {
@ -463,9 +479,9 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
* Filters * Filters
*/ */
if (include.getFilter().size() > 0) { if (theInclude.getFilter().size() > 0) {
for (ValueSet.ConceptSetFilterComponent nextFilter : include.getFilter()) { for (ValueSet.ConceptSetFilterComponent nextFilter : theInclude.getFilter()) {
if (isBlank(nextFilter.getValue()) && nextFilter.getOp() == null && isBlank(nextFilter.getProperty())) { if (isBlank(nextFilter.getValue()) && nextFilter.getOp() == null && isBlank(nextFilter.getProperty())) {
continue; continue;
} }
@ -542,13 +558,13 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
* Include Concepts * Include Concepts
*/ */
List<Term> codes = include List<Term> codes = theInclude
.getConcept() .getConcept()
.stream() .stream()
.filter(Objects::nonNull) .filter(Objects::nonNull)
.map(ValueSet.ConceptReferenceComponent::getCode) .map(ValueSet.ConceptReferenceComponent::getCode)
.filter(StringUtils::isNotBlank) .filter(StringUtils::isNotBlank)
.map(t->new Term("myCode", t)) .map(t -> new Term("myCode", t))
.collect(Collectors.toList()); .collect(Collectors.toList());
if (codes.size() > 0) { if (codes.size() > 0) {
MultiPhraseQuery query = new MultiPhraseQuery(); MultiPhraseQuery query = new MultiPhraseQuery();
@ -564,19 +580,25 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
*/ */
FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, TermConcept.class); FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, TermConcept.class);
jpaQuery.setMaxResults(1000); int maxResult = 50000;
jpaQuery.setMaxResults(maxResult);
StopWatch sw = new StopWatch(); StopWatch sw = new StopWatch();
AtomicInteger count = new AtomicInteger(0);
@SuppressWarnings("unchecked") for (Object next : jpaQuery.getResultList()) {
List<TermConcept> result = jpaQuery.getResultList(); count.incrementAndGet();
TermConcept concept = (TermConcept) next;
ourLog.info("Expansion completed in {}ms", sw.getMillis()); addCodeIfNotAlreadyAdded(theExpansionComponent, theAddedCodes, concept, theAdd, theCodeCounter);
for (TermConcept nextConcept : result) {
addCodeIfNotAlreadyAdded(system, theExpansionComponent, theAddedCodes, nextConcept, theAdd);
} }
if (maxResult == count.get()) {
throw new InternalErrorException("Expansion fragment produced too many (>= " + maxResult + ") results");
}
ourLog.info("Expansion for {} produced {} results in {}ms", (theAdd ? "inclusion" : "exclusion"), count, sw.getMillis());
} else { } else {
// No codesystem matching the URL found in the database // No codesystem matching the URL found in the database
@ -585,8 +607,8 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
throw new InvalidRequestException("Unknown code system: " + system); throw new InvalidRequestException("Unknown code system: " + system);
} }
if (include.getConcept().isEmpty() == false) { if (theInclude.getConcept().isEmpty() == false) {
for (ValueSet.ConceptReferenceComponent next : include.getConcept()) { for (ValueSet.ConceptReferenceComponent next : theInclude.getConcept()) {
String nextCode = next.getCode(); String nextCode = next.getCode();
if (isNotBlank(nextCode) && !theAddedCodes.contains(nextCode)) { if (isNotBlank(nextCode) && !theAddedCodes.contains(nextCode)) {
CodeSystem.ConceptDefinitionComponent code = findCode(codeSystemFromContext.getConcept(), nextCode); CodeSystem.ConceptDefinitionComponent code = findCode(codeSystemFromContext.getConcept(), nextCode);
@ -609,6 +631,25 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
} }
} }
} else if (hasValueSet) {
for (CanonicalType nextValueSet : theInclude.getValueSet()) {
ourLog.info("Starting {} expansion around ValueSet URI: {}", (theAdd ? "inclusion" : "exclusion"), nextValueSet.getValueAsString());
List<VersionIndependentConcept> expanded = expandValueSet(nextValueSet.getValueAsString());
for (VersionIndependentConcept nextConcept : expanded) {
if (theAdd) {
TermCodeSystem codeSystem = myCodeSystemDao.findByCodeSystemUri(nextConcept.getSystem());
TermConcept concept = myConceptDao.findByCodeSystemAndCode(codeSystem.getCurrentVersion(), nextConcept.getCode());
addCodeIfNotAlreadyAdded(theExpansionComponent, theAddedCodes, concept, theAdd, theCodeCounter);
}
if (!theAdd && theAddedCodes.remove(nextConcept.getCode())) {
removeCodeFromExpansion(nextConcept.getSystem(), nextConcept.getCode(), theExpansionComponent);
}
}
}
} else {
throw new InvalidRequestException("ValueSet contains " + (theAdd ? "include" : "exclude") + " criteria with no system defined");
} }
} }
@ -640,7 +681,10 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
if (theCode.equals(next.getCode())) { if (theCode.equals(next.getCode())) {
return next; return next;
} }
findCode(next.getConcept(), theCode); CodeSystem.ConceptDefinitionComponent val = findCode(next.getConcept(), theCode);
if (val != null) {
return val;
}
} }
return null; return null;
} }

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.term;
*/ */
import org.hl7.fhir.instance.hapi.validation.IValidationSupport; import org.hl7.fhir.instance.hapi.validation.IValidationSupport;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.CodeSystem; import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.ConceptMap; import org.hl7.fhir.r4.model.ConceptMap;
@ -80,6 +81,11 @@ public class HapiTerminologySvcDstu2 extends BaseHapiTerminologySvcImpl {
return null; return null;
} }
@Override
public IBaseResource expandValueSet(IBaseResource theValueSetToExpand) {
throw new UnsupportedOperationException();
}
@Override @Override
public List<VersionIndependentConcept> expandValueSet(String theValueSet) { public List<VersionIndependentConcept> expandValueSet(String theValueSet) {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();

View File

@ -166,6 +166,20 @@ public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvcImpl implemen
} }
} }
@Override
public IBaseResource expandValueSet(IBaseResource theInput) {
ValueSet valueSetToExpand = (ValueSet) theInput;
try {
org.hl7.fhir.r4.model.ValueSet valueSetToExpandR4;
valueSetToExpandR4 = VersionConvertor_30_40.convertValueSet(valueSetToExpand);
org.hl7.fhir.r4.model.ValueSet expandedR4 = super.expandValueSet(valueSetToExpandR4);
return VersionConvertor_30_40.convertValueSet(expandedR4);
} catch (FHIRException e) {
throw new InternalErrorException(e);
}
}
@Override @Override
public List<VersionIndependentConcept> expandValueSet(String theValueSet) { public List<VersionIndependentConcept> expandValueSet(String theValueSet) {
ValueSet vs = myValidationSupport.fetchResource(myContext, ValueSet.class, theValueSet); ValueSet vs = myValidationSupport.fetchResource(myContext, ValueSet.class, theValueSet);

View File

@ -134,6 +134,13 @@ public class HapiTerminologySvcR4 extends BaseHapiTerminologySvcImpl implements
return expandValueSetAndReturnVersionIndependentConcepts(vs); return expandValueSetAndReturnVersionIndependentConcepts(vs);
} }
@Override
public IBaseResource expandValueSet(IBaseResource theInput) {
ValueSet valueSetToExpand = (ValueSet) theInput;
return super.expandValueSet(valueSetToExpand);
}
@Override @Override
public ValueSetExpansionComponent expandValueSet(FhirContext theContext, ConceptSetComponent theInclude) { public ValueSetExpansionComponent expandValueSet(FhirContext theContext, ConceptSetComponent theInclude) {
ValueSet valueSetToExpand = new ValueSet(); ValueSet valueSetToExpand = new ValueSet();

View File

@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.term;
import ca.uhn.fhir.jpa.entity.*; import ca.uhn.fhir.jpa.entity.*;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.ConceptMap; import org.hl7.fhir.r4.model.ConceptMap;
import org.hl7.fhir.r4.model.ValueSet; import org.hl7.fhir.r4.model.ValueSet;
@ -35,6 +36,11 @@ public interface IHapiTerminologySvc {
ValueSet expandValueSet(ValueSet theValueSetToExpand); ValueSet expandValueSet(ValueSet theValueSetToExpand);
/**
* Version independent
*/
IBaseResource expandValueSet(IBaseResource theValueSetToExpand);
List<VersionIndependentConcept> expandValueSet(String theValueSet); List<VersionIndependentConcept> expandValueSet(String theValueSet);
TermConcept findCode(String theCodeSystem, String theCode); TermConcept findCode(String theCodeSystem, String theCode);

View File

@ -63,9 +63,6 @@ public class LoincAnswerListHandler extends BaseLoincHandler {
String extCodeSystem = trim(theRecord.get("ExtCodeSystem")); String extCodeSystem = trim(theRecord.get("ExtCodeSystem"));
String extCodeSystemVersion = trim(theRecord.get("ExtCodeSystemVersion")); String extCodeSystemVersion = trim(theRecord.get("ExtCodeSystemVersion"));
if (isBlank(answerString)) {
return;
}
// Answer list code // Answer list code
if (!myCode2Concept.containsKey(answerListId)) { if (!myCode2Concept.containsKey(answerListId)) {
@ -74,16 +71,6 @@ public class LoincAnswerListHandler extends BaseLoincHandler {
myCode2Concept.put(answerListId, concept); myCode2Concept.put(answerListId, concept);
} }
// Answer code
if (!myCode2Concept.containsKey(answerString)) {
TermConcept concept = new TermConcept(myCodeSystemVersion, answerString);
concept.setDisplay(displayText);
if (isNotBlank(sequenceNumber) && sequenceNumber.matches("^[0-9]$")) {
concept.setSequence(Integer.parseInt(sequenceNumber));
}
myCode2Concept.put(answerString, concept);
}
// Answer list ValueSet // Answer list ValueSet
ValueSet vs = getValueSet(answerListId, "http://loinc.org/vs/" + answerListId, answerListName, "answerlist.version"); ValueSet vs = getValueSet(answerListId, "http://loinc.org/vs/" + answerListId, answerListName, "answerlist.version");
if (vs.getIdentifier().isEmpty()) { if (vs.getIdentifier().isEmpty()) {
@ -92,13 +79,28 @@ public class LoincAnswerListHandler extends BaseLoincHandler {
.setValue("urn:oid:" + answerListOid); .setValue("urn:oid:" + answerListOid);
} }
vs if (isNotBlank(answerString)) {
.getCompose()
.getIncludeFirstRep() // Answer code
.setSystem(IHapiTerminologyLoaderSvc.LOINC_URI) if (!myCode2Concept.containsKey(answerString)) {
.addConcept() TermConcept concept = new TermConcept(myCodeSystemVersion, answerString);
.setCode(answerString) concept.setDisplay(displayText);
.setDisplay(displayText); if (isNotBlank(sequenceNumber) && sequenceNumber.matches("^[0-9]$")) {
concept.setSequence(Integer.parseInt(sequenceNumber));
}
myCode2Concept.put(answerString, concept);
}
vs
.getCompose()
.getIncludeFirstRep()
.setSystem(IHapiTerminologyLoaderSvc.LOINC_URI)
.addConcept()
.setCode(answerString)
.setDisplay(displayText);
}
} }
} }

View File

@ -12,6 +12,7 @@ import ca.uhn.fhir.jpa.util.JpaConstants;
import ca.uhn.fhir.jpa.util.LoggingRule; import ca.uhn.fhir.jpa.util.LoggingRule;
import ca.uhn.fhir.model.dstu2.resource.Bundle; import ca.uhn.fhir.model.dstu2.resource.Bundle;
import ca.uhn.fhir.model.dstu2.resource.Bundle.Entry; import ca.uhn.fhir.model.dstu2.resource.Bundle.Entry;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.IRequestOperationCallback; import ca.uhn.fhir.rest.api.server.IRequestOperationCallback;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
@ -58,6 +59,10 @@ import static org.mockito.Mockito.when;
public abstract class BaseJpaTest { public abstract class BaseJpaTest {
static {
System.setProperty(Constants.TEST_SYSTEM_PROP_VALIDATION_RESOURCE_CACHES_MS, "1000");
}
protected static final String CM_URL = "http://example.com/my_concept_map"; protected static final String CM_URL = "http://example.com/my_concept_map";
protected static final String CS_URL = "http://example.com/my_code_system"; protected static final String CS_URL = "http://example.com/my_code_system";
protected static final String CS_URL_2 = "http://example.com/my_code_system2"; protected static final String CS_URL_2 = "http://example.com/my_code_system2";

View File

@ -1,31 +1,77 @@
package ca.uhn.fhir.jpa.dao.dstu3; package ca.uhn.fhir.jpa.dao.dstu3;
import static org.hamcrest.Matchers.containsString; import ca.uhn.fhir.rest.api.EncodingEnum;
import static org.junit.Assert.assertThat; import ca.uhn.fhir.rest.api.MethodOutcome;
import static org.junit.Assert.fail; import ca.uhn.fhir.rest.api.ValidationModeEnum;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import java.io.IOException; import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
import java.nio.charset.StandardCharsets; import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.TestUtil;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.hl7.fhir.dstu3.model.*; import org.hl7.fhir.dstu3.model.*;
import org.hl7.fhir.dstu3.model.Bundle.BundleEntryComponent; import org.hl7.fhir.dstu3.model.Bundle.BundleEntryComponent;
import org.hl7.fhir.dstu3.model.Observation.ObservationStatus; import org.hl7.fhir.dstu3.model.Observation.ObservationStatus;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.junit.*; import org.junit.AfterClass;
import org.junit.Ignore;
import org.junit.Test;
import ca.uhn.fhir.util.StopWatch; import java.io.IOException;
import ca.uhn.fhir.rest.api.*; import java.nio.charset.StandardCharsets;
import ca.uhn.fhir.rest.server.exceptions.*;
import ca.uhn.fhir.util.TestUtil; import static ca.uhn.fhir.jpa.util.TestUtil.sleepAtLeast;
import static org.hamcrest.Matchers.containsString;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
public class FhirResourceDaoDstu3ValidateTest extends BaseJpaDstu3Test { public class FhirResourceDaoDstu3ValidateTest extends BaseJpaDstu3Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu3ValidateTest.class); private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu3ValidateTest.class);
@AfterClass @Test
public static void afterClassClearContext() { public void testValidateChangedQuestionnaire() {
TestUtil.clearAllStaticFieldsForUnitTest(); Questionnaire q = new Questionnaire();
q.setId("QUEST");
q.addItem().setLinkId("A").setType(Questionnaire.QuestionnaireItemType.STRING).setRequired(true);
myQuestionnaireDao.update(q);
try {
QuestionnaireResponse qr = new QuestionnaireResponse();
qr.setStatus(QuestionnaireResponse.QuestionnaireResponseStatus.COMPLETED);
qr.getQuestionnaire().setReference("Questionnaire/QUEST");
qr.addItem().setLinkId("A").addAnswer().setValue(new StringType("AAA"));
MethodOutcome results = myQuestionnaireResponseDao.validate(qr, null, null, null, null, null, null);
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(results.getOperationOutcome()));
} catch (PreconditionFailedException e) {
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(e.getOperationOutcome()));
fail(e.toString());
}
q = new Questionnaire();
q.setId("QUEST");
q.addItem().setLinkId("B").setType(Questionnaire.QuestionnaireItemType.STRING).setRequired(true);
myQuestionnaireDao.update(q);
QuestionnaireResponse qr = new QuestionnaireResponse();
qr.setStatus(QuestionnaireResponse.QuestionnaireResponseStatus.COMPLETED);
qr.getQuestionnaire().setReference("Questionnaire/QUEST");
qr.addItem().setLinkId("A").addAnswer().setValue(new StringType("AAA"));
MethodOutcome results = myQuestionnaireResponseDao.validate(qr, null, null, null, null, null, null);
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(results.getOperationOutcome()));
sleepAtLeast(2500);
try {
myQuestionnaireResponseDao.validate(qr, null, null, null, null, null, null);
fail();
} catch (PreconditionFailedException e) {
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(e.getOperationOutcome()));
// good
}
} }
@Test @Test
@ -125,24 +171,24 @@ public class FhirResourceDaoDstu3ValidateTest extends BaseJpaDstu3Test {
MethodOutcome outcome = null; MethodOutcome outcome = null;
ValidationModeEnum mode = ValidationModeEnum.CREATE; ValidationModeEnum mode = ValidationModeEnum.CREATE;
switch (enc) { switch (enc) {
case JSON: case JSON:
encoded = myFhirCtx.newJsonParser().encodeResourceToString(input); encoded = myFhirCtx.newJsonParser().encodeResourceToString(input);
try { try {
myObservationDao.validate(input, null, encoded, EncodingEnum.JSON, mode, null, mySrd); myObservationDao.validate(input, null, encoded, EncodingEnum.JSON, mode, null, mySrd);
fail(); fail();
} catch (PreconditionFailedException e) { } catch (PreconditionFailedException e) {
return (OperationOutcome) e.getOperationOutcome(); return (OperationOutcome) e.getOperationOutcome();
} }
break; break;
case XML: case XML:
encoded = myFhirCtx.newXmlParser().encodeResourceToString(input); encoded = myFhirCtx.newXmlParser().encodeResourceToString(input);
try { try {
myObservationDao.validate(input, null, encoded, EncodingEnum.XML, mode, null, mySrd); myObservationDao.validate(input, null, encoded, EncodingEnum.XML, mode, null, mySrd);
fail(); fail();
} catch (PreconditionFailedException e) { } catch (PreconditionFailedException e) {
return (OperationOutcome) e.getOperationOutcome(); return (OperationOutcome) e.getOperationOutcome();
} }
break; break;
} }
throw new IllegalStateException(); // shouldn't get here throw new IllegalStateException(); // shouldn't get here
@ -290,11 +336,11 @@ public class FhirResourceDaoDstu3ValidateTest extends BaseJpaDstu3Test {
@Test @Test
@Ignore @Ignore
public void testValidateNewQuestionnaireFormat() throws Exception { public void testValidateNewQuestionnaireFormat() throws Exception {
String input =IOUtils.toString(FhirResourceDaoDstu3ValidateTest.class.getResourceAsStream("/questionnaire_dstu3.xml")); String input = IOUtils.toString(FhirResourceDaoDstu3ValidateTest.class.getResourceAsStream("/questionnaire_dstu3.xml"));
try { try {
MethodOutcome results = myQuestionnaireDao.validate(null, null, input, EncodingEnum.XML, ValidationModeEnum.UPDATE, null, mySrd); MethodOutcome results = myQuestionnaireDao.validate(null, null, input, EncodingEnum.XML, ValidationModeEnum.UPDATE, null, mySrd);
OperationOutcome oo = (OperationOutcome) results.getOperationOutcome(); OperationOutcome oo = (OperationOutcome) results.getOperationOutcome();
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo)); ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(oo));
} catch (PreconditionFailedException e) { } catch (PreconditionFailedException e) {
// this is a failure of the test // this is a failure of the test
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(e.getOperationOutcome())); ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(e.getOperationOutcome()));
@ -302,4 +348,9 @@ public class FhirResourceDaoDstu3ValidateTest extends BaseJpaDstu3Test {
} }
} }
@AfterClass
public static void afterClassClearContext() {
TestUtil.clearAllStaticFieldsForUnitTest();
}
} }

View File

@ -561,6 +561,7 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test {
} }
@Test @Test
@Ignore
public void testExpandWithNoResultsInLocalValueSet1() { public void testExpandWithNoResultsInLocalValueSet1() {
createLocalCsAndVs(); createLocalCsAndVs();
@ -609,30 +610,54 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test {
public void testExpandWithSystemAndCodesAndFilterKeywordInLocalValueSet() { public void testExpandWithSystemAndCodesAndFilterKeywordInLocalValueSet() {
createLocalCsAndVs(); createLocalCsAndVs();
ValueSet vs = new ValueSet(); {
ConceptSetComponent include = vs.getCompose().addInclude(); ValueSet vs = new ValueSet();
include.setSystem(URL_MY_CODE_SYSTEM); ConceptSetComponent include = vs.getCompose().addInclude();
include.addConcept().setCode("A"); include.setSystem(URL_MY_CODE_SYSTEM);
include.addConcept().setCode("AAA");
include.addFilter().setProperty("display").setOp(FilterOperator.EQUAL).setValue("AAA"); include.addFilter().setProperty("display").setOp(FilterOperator.EQUAL).setValue("AAA");
ValueSet result = myValueSetDao.expand(vs, null); ValueSet result = myValueSetDao.expand(vs, null);
// Technically it's not valid to expand a ValueSet with both includes and filters so the // Technically it's not valid to expand a ValueSet with both includes and filters so the
// result fails validation because of the input.. we're being permissive by allowing both // result fails validation because of the input.. we're being permissive by allowing both
// though, so we won't validate the input // though, so we won't validate the input
result.setCompose(new ValueSetComposeComponent()); result.setCompose(new ValueSetComposeComponent());
logAndValidateValueSet(result); logAndValidateValueSet(result);
ArrayList<String> codes = toCodesContains(result.getExpansion().getContains()); ArrayList<String> codes = toCodesContains(result.getExpansion().getContains());
assertThat(codes, containsInAnyOrder("A", "AAA")); assertThat(codes, containsInAnyOrder("AAA"));
int idx = codes.indexOf("AAA"); int idx = codes.indexOf("AAA");
assertEquals("AAA", result.getExpansion().getContains().get(idx).getCode()); assertEquals("AAA", result.getExpansion().getContains().get(idx).getCode());
assertEquals("Code AAA", result.getExpansion().getContains().get(idx).getDisplay()); assertEquals("Code AAA", result.getExpansion().getContains().get(idx).getDisplay());
assertEquals(URL_MY_CODE_SYSTEM, result.getExpansion().getContains().get(idx).getSystem()); assertEquals(URL_MY_CODE_SYSTEM, result.getExpansion().getContains().get(idx).getSystem());
// }
// Now with a disjunction
{
ValueSet vs = new ValueSet();
ConceptSetComponent include = vs.getCompose().addInclude();
include.setSystem(URL_MY_CODE_SYSTEM);
include.addConcept().setCode("A");
include.addFilter().setProperty("display").setOp(FilterOperator.EQUAL).setValue("AAA");
ValueSet result = myValueSetDao.expand(vs, null);
// Technically it's not valid to expand a ValueSet with both includes and filters so the
// result fails validation because of the input.. we're being permissive by allowing both
// though, so we won't validate the input
result.setCompose(new ValueSetComposeComponent());
logAndValidateValueSet(result);
ArrayList<String> codes = toCodesContains(result.getExpansion().getContains());
assertThat(codes, empty());
}
} }
@Test @Test

View File

@ -32,6 +32,8 @@ import org.springframework.transaction.annotation.Transactional;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import static ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoDstu3TerminologyTest.URL_MY_CODE_SYSTEM; import static ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoDstu3TerminologyTest.URL_MY_CODE_SYSTEM;
import static ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoDstu3TerminologyTest.URL_MY_VALUE_SET; import static ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoDstu3TerminologyTest.URL_MY_VALUE_SET;
@ -96,6 +98,39 @@ public class ResourceProviderDstu3ValueSetTest extends BaseResourceProviderDstu3
createLocalVs(codeSystem); createLocalVs(codeSystem);
} }
public void createLoincSystemWithSomeCodes() {
runInTransaction(() -> {
CodeSystem codeSystem = new CodeSystem();
codeSystem.setUrl(CS_URL);
codeSystem.setContent(CodeSystemContentMode.NOTPRESENT);
IIdType id = myCodeSystemDao.create(codeSystem, mySrd).getId().toUnqualified();
ResourceTable table = myResourceTableDao.findById(id.getIdPartAsLong()).orElseThrow(IllegalArgumentException::new);
TermCodeSystemVersion cs = new TermCodeSystemVersion();
cs.setResource(table);
TermConcept code;
code = new TermConcept(cs, "50015-7");
code.addPropertyString("SYSTEM", "Bld/Bone mar^Donor");
cs.getConcepts().add(code);
code = new TermConcept(cs, "43343-3");
code.addPropertyString("SYSTEM", "Ser");
code.addPropertyString("HELLO", "12345-1");
cs.getConcepts().add(code);
code = new TermConcept(cs, "43343-4");
code.addPropertyString("SYSTEM", "Ser");
code.addPropertyString("HELLO", "12345-2");
cs.getConcepts().add(code);
myTermSvc.storeNewCodeSystemVersion(table.getId(), CS_URL, "SYSTEM NAME", cs);
});
}
private void createLocalVs(CodeSystem codeSystem) { private void createLocalVs(CodeSystem codeSystem) {
myLocalVs = new ValueSet(); myLocalVs = new ValueSet();
myLocalVs.setUrl(URL_MY_VALUE_SET); myLocalVs.setUrl(URL_MY_VALUE_SET);
@ -132,6 +167,71 @@ public class ResourceProviderDstu3ValueSetTest extends BaseResourceProviderDstu3
myLocalValueSetId = myValueSetDao.create(myLocalVs, mySrd).getId().toUnqualifiedVersionless(); myLocalValueSetId = myValueSetDao.create(myLocalVs, mySrd).getId().toUnqualifiedVersionless();
} }
@Test
public void testExpandValueSetPropertySearchWithRegexExcludeUsingOr() {
createLoincSystemWithSomeCodes();
List<String> codes;
ValueSet vs;
ValueSet outcome;
ValueSet.ConceptSetComponent exclude;
// Include
vs = new ValueSet();
vs.getCompose()
.addInclude()
.setSystem(CS_URL);
exclude = vs.getCompose().addExclude();
exclude.setSystem(CS_URL);
exclude
.addFilter()
.setProperty("HELLO")
.setOp(ValueSet.FilterOperator.REGEX)
.setValue("12345-1|12345-2");
IIdType vsId = ourClient.create().resource(vs).execute().getId();
outcome = (ValueSet) ourClient.operation().onInstance(vsId).named("expand").withNoParameters(Parameters.class).execute().getParameter().get(0).getResource();
codes = toCodesContains(outcome.getExpansion().getContains());
ourLog.info("** Got codes: {}", codes);
assertThat(codes, containsInAnyOrder("50015-7"));
assertEquals(1, outcome.getCompose().getInclude().size());
assertEquals(1, outcome.getCompose().getExclude().size());
assertEquals(1, outcome.getExpansion().getTotal());
}
@Test
public void testExpandValueSetPropertySearchWithRegexExcludeNoFilter() {
createLoincSystemWithSomeCodes();
List<String> codes;
ValueSet vs;
ValueSet outcome;
ValueSet.ConceptSetComponent exclude;
// Include
vs = new ValueSet();
vs.getCompose()
.addInclude()
.setSystem(CS_URL);
exclude = vs.getCompose().addExclude();
exclude.setSystem(CS_URL);
IIdType vsId = ourClient.create().resource(vs).execute().getId();
outcome = (ValueSet) ourClient.operation().onInstance(vsId).named("expand").withNoParameters(Parameters.class).execute().getParameter().get(0).getResource();
codes = toCodesContains(outcome.getExpansion().getContains());
assertThat(codes, empty());
}
@Test @Test
public void testExpandById() throws IOException { public void testExpandById() throws IOException {
//@formatter:off //@formatter:off
@ -611,4 +711,15 @@ public class ResourceProviderDstu3ValueSetTest extends BaseResourceProviderDstu3
return codeSystem; return codeSystem;
} }
public static List<String> toCodesContains(List<ValueSet.ValueSetExpansionContainsComponent> theContains) {
List<String> retVal = new ArrayList<>();
for (ValueSet.ValueSetExpansionContainsComponent next : theContains) {
retVal.add(next.getCode());
}
return retVal;
}
} }

View File

@ -156,6 +156,10 @@ public class TerminologyLoaderSvcLoincTest {
assertEquals("LA6270-8", vs.getCompose().getInclude().get(0).getConcept().get(0).getCode()); assertEquals("LA6270-8", vs.getCompose().getInclude().get(0).getConcept().get(0).getCode());
assertEquals("Never", vs.getCompose().getInclude().get(0).getConcept().get(0).getDisplay()); assertEquals("Never", vs.getCompose().getInclude().get(0).getConcept().get(0).getDisplay());
// External AnswerList
vs = valueSets.get("LL1892-0");
assertEquals(0, vs.getCompose().getIncludeFirstRep().getConcept().size());
// Part // Part
code = concepts.get("LP101394-7"); code = concepts.get("LP101394-7");
assertEquals("LP101394-7", code.getCode()); assertEquals("LP101394-7", code.getCode());

View File

@ -129,6 +129,12 @@ public class TerminologySvcImplDstu3Test extends BaseJpaDstu3Test {
code = new TermConcept(cs, "43343-3"); code = new TermConcept(cs, "43343-3");
code.addPropertyString("SYSTEM", "Ser"); code.addPropertyString("SYSTEM", "Ser");
code.addPropertyString("HELLO", "12345-1");
cs.getConcepts().add(code);
code = new TermConcept(cs, "43343-4");
code.addPropertyString("SYSTEM", "Ser");
code.addPropertyString("HELLO", "12345-2");
cs.getConcepts().add(code); cs.getConcepts().add(code);
myTermSvc.storeNewCodeSystemVersion(table.getId(), CS_URL, "SYSTEM NAME", cs); myTermSvc.storeNewCodeSystemVersion(table.getId(), CS_URL, "SYSTEM NAME", cs);
@ -286,6 +292,33 @@ public class TerminologySvcImplDstu3Test extends BaseJpaDstu3Test {
assertThat(codes, containsInAnyOrder("43343-3")); assertThat(codes, containsInAnyOrder("43343-3"));
} }
@Test
public void testExpandValueSetPropertySearchWithRegexExcludeUsingOr() {
createLoincSystemWithSomeCodes();
List<String> codes;
ValueSet vs;
ValueSet outcome;
ValueSet.ConceptSetComponent exclude;
// Include
vs = new ValueSet();
vs.getCompose()
.addInclude()
.setSystem(CS_URL);
exclude = vs.getCompose().addExclude();
exclude.setSystem(CS_URL);
exclude
.addFilter()
.setProperty("HELLO")
.setOp(ValueSet.FilterOperator.REGEX)
.setValue("12345-1|12345-2");
outcome = myTermSvc.expandValueSet(vs);
codes = toCodesContains(outcome.getExpansion().getContains());
assertThat(codes, containsInAnyOrder("50015-7"));
}
@Test @Test
public void testExpandValueSetPropertySearchWithRegexInclude() { public void testExpandValueSetPropertySearchWithRegexInclude() {
// create codes with "SYSTEM" property "Bld/Bone mar^Donor" and "Ser" // create codes with "SYSTEM" property "Bld/Bone mar^Donor" and "Ser"
@ -558,7 +591,7 @@ public class TerminologySvcImplDstu3Test extends BaseJpaDstu3Test {
} }
} }
private List<String> toCodesContains(List<ValueSet.ValueSetExpansionContainsComponent> theContains) { public static List<String> toCodesContains(List<ValueSet.ValueSetExpansionContainsComponent> theContains) {
List<String> retVal = new ArrayList<>(); List<String> retVal = new ArrayList<>();
for (ValueSet.ValueSetExpansionContainsComponent next : theContains) { for (ValueSet.ValueSetExpansionContainsComponent next : theContains) {

View File

@ -1,11 +1,12 @@
"AnswerListId","AnswerListName" ,"AnswerListOID" ,"ExtDefinedYN","ExtDefinedAnswerListCodeSystem","ExtDefinedAnswerListLink","AnswerStringId","LocalAnswerCode","LocalAnswerCodeSystem","SequenceNumber","DisplayText" ,"ExtCodeId","ExtCodeDisplayName" ,"ExtCodeSystem" ,"ExtCodeSystemVersion" ,"ExtCodeSystemCopyrightNotice" ,"SubsequentTextPrompt","Description","Score" "AnswerListId","AnswerListName" ,"AnswerListOID" ,"ExtDefinedYN","ExtDefinedAnswerListCodeSystem","ExtDefinedAnswerListLink" ,"AnswerStringId","LocalAnswerCode","LocalAnswerCodeSystem","SequenceNumber","DisplayText" ,"ExtCodeId","ExtCodeDisplayName" ,"ExtCodeSystem" ,"ExtCodeSystemVersion" ,"ExtCodeSystemCopyrightNotice" ,"SubsequentTextPrompt","Description","Score"
"LL1000-0" ,"PhenX05_13_30D bread amt","1.3.6.1.4.1.12009.10.1.165","N" , , ,"LA13825-7" ,"1" , ,1 ,"1 slice or 1 dinner roll" , , , , , , , , "LL1000-0" ,"PhenX05_13_30D bread amt","1.3.6.1.4.1.12009.10.1.165" ,"N" , , ,"LA13825-7" ,"1" , ,1 ,"1 slice or 1 dinner roll" , , , , , , , ,
"LL1000-0" ,"PhenX05_13_30D bread amt","1.3.6.1.4.1.12009.10.1.165","N" , , ,"LA13838-0" ,"2" , ,2 ,"2 slices or 2 dinner rolls" , , , , , , , , "LL1000-0" ,"PhenX05_13_30D bread amt","1.3.6.1.4.1.12009.10.1.165" ,"N" , , ,"LA13838-0" ,"2" , ,2 ,"2 slices or 2 dinner rolls" , , , , , , , ,
"LL1000-0" ,"PhenX05_13_30D bread amt","1.3.6.1.4.1.12009.10.1.165","N" , , ,"LA13892-7" ,"3" , ,3 ,"More than 2 slices or 2 dinner rolls", , , , , , , , "LL1000-0" ,"PhenX05_13_30D bread amt","1.3.6.1.4.1.12009.10.1.165" ,"N" , , ,"LA13892-7" ,"3" , ,3 ,"More than 2 slices or 2 dinner rolls", , , , , , , ,
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166","N" , , ,"LA6270-8" ,"00" , ,1 ,"Never" , , , , , , , , "LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166" ,"N" , , ,"LA6270-8" ,"00" , ,1 ,"Never" , , , , , , , ,
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166","N" , , ,"LA13836-4" ,"01" , ,2 ,"1-3 times per month" , , , , , , , , "LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166" ,"N" , , ,"LA13836-4" ,"01" , ,2 ,"1-3 times per month" , , , , , , , ,
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166","N" , , ,"LA13834-9" ,"02" , ,3 ,"1-2 times per week" , , , , , , , , "LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166" ,"N" , , ,"LA13834-9" ,"02" , ,3 ,"1-2 times per week" , , , , , , , ,
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166","N" , , ,"LA13853-9" ,"03" , ,4 ,"3-4 times per week" , , , , , , , , "LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166" ,"N" , , ,"LA13853-9" ,"03" , ,4 ,"3-4 times per week" , , , , , , , ,
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166","N" , , ,"LA13860-4" ,"04" , ,5 ,"5-6 times per week" , , , , , , , , "LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166" ,"N" , , ,"LA13860-4" ,"04" , ,5 ,"5-6 times per week" , , , , , , , ,
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166","N" , , ,"LA13827-3" ,"05" , ,6 ,"1 time per day" , , , , , , , , "LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166" ,"N" , , ,"LA13827-3" ,"05" , ,6 ,"1 time per day" , , , , , , , ,
"LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166","N" , , ,"LA4389-8" ,"97" , ,11 ,"Refused" ,"443390004","Refused (qualifier value)","http://snomed.info/sct","http://snomed.info/sct/900000000000207008/version/20170731","This material includes SNOMED Clinical Terms® (SNOMED CT®) which is used by permission of the International Health Terminology Standards Development Organisation (IHTSDO) under license. All rights reserved. SNOMED CT® was originally created by The College", , , "LL1001-8" ,"PhenX05_14_30D freq amts","1.3.6.1.4.1.12009.10.1.166" ,"N" , , ,"LA4389-8" ,"97" , ,11 ,"Refused" ,"443390004","Refused (qualifier value)","http://snomed.info/sct","http://snomed.info/sct/900000000000207008/version/20170731","This material includes SNOMED Clinical Terms® (SNOMED CT®) which is used by permission of the International Health Terminology Standards Development Organisation (IHTSDO) under license. All rights reserved. SNOMED CT® was originally created by The College", , ,
"LL1892-0" ,"ICD-9_ICD-10" ,"1.3.6.1.4.1.12009.10.1.1069","Y" , ,"http://www.cdc.gov/nchs/icd.htm", , , , , , , , , , , , ,

Can't render this file because it contains an unexpected character in line 1 and column 31.

View File

@ -262,6 +262,10 @@ public interface IServerInterceptor {
* This method is called after all processing is completed for a request, but only if the * This method is called after all processing is completed for a request, but only if the
* request completes normally (i.e. no exception is thrown). * request completes normally (i.e. no exception is thrown).
* <p> * <p>
* This method should not throw any exceptions. Any exception that is thrown by this
* method will be logged, but otherwise not acted upon.
* </p>
* <p>
* Note that this individual interceptors will have this method called in the reverse order from the order in * Note that this individual interceptors will have this method called in the reverse order from the order in
* which the interceptors were registered with the server. * which the interceptors were registered with the server.
* </p> * </p>

View File

@ -168,6 +168,14 @@
</dependency> </dependency>
--> -->
<!-- Used by the validator -->
<dependency>
<groupId>com.github.ben-manes.caffeine</groupId>
<artifactId>caffeine</artifactId>
<optional>true</optional>
</dependency>
<!-- Testing --> <!-- Testing -->
<dependency> <dependency>
<groupId>org.xmlunit</groupId> <groupId>org.xmlunit</groupId>

View File

@ -1,10 +1,14 @@
package org.hl7.fhir.dstu3.hapi.ctx; package org.hl7.fhir.dstu3.hapi.ctx;
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.util.CoverageIgnore; import ca.uhn.fhir.util.CoverageIgnore;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.dstu3.context.IWorkerContext; import org.hl7.fhir.dstu3.context.IWorkerContext;
import org.hl7.fhir.dstu3.formats.IParser; import org.hl7.fhir.dstu3.formats.IParser;
import org.hl7.fhir.dstu3.formats.ParserType; import org.hl7.fhir.dstu3.formats.ParserType;
@ -22,13 +26,15 @@ import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.utilities.validation.ValidationMessage.IssueSeverity; import org.hl7.fhir.utilities.validation.ValidationMessage.IssueSeverity;
import java.util.*; import java.util.*;
import java.util.concurrent.TimeUnit;
import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank;
public final class HapiWorkerContext implements IWorkerContext, ValueSetExpander, ValueSetExpanderFactory { public final class HapiWorkerContext implements IWorkerContext, ValueSetExpander, ValueSetExpanderFactory {
private final FhirContext myCtx; private final FhirContext myCtx;
private Map<String, Resource> myFetchedResourceCache = new HashMap<String, Resource>(); private final Cache<String, Resource> myFetchedResourceCache;
private IValidationSupport myValidationSupport; private IValidationSupport myValidationSupport;
private ExpansionProfile myExpansionProfile; private ExpansionProfile myExpansionProfile;
@ -37,6 +43,12 @@ public final class HapiWorkerContext implements IWorkerContext, ValueSetExpander
Validate.notNull(theValidationSupport, "theValidationSupport must not be null"); Validate.notNull(theValidationSupport, "theValidationSupport must not be null");
myCtx = theCtx; myCtx = theCtx;
myValidationSupport = theValidationSupport; myValidationSupport = theValidationSupport;
long timeoutMillis = 10 * DateUtils.MILLIS_PER_SECOND;
if (System.getProperties().containsKey(Constants.TEST_SYSTEM_PROP_VALIDATION_RESOURCE_CACHES_MS)) {
timeoutMillis = Long.parseLong(System.getProperty(Constants.TEST_SYSTEM_PROP_VALIDATION_RESOURCE_CACHES_MS));
}
myFetchedResourceCache = Caffeine.newBuilder().expireAfterWrite(timeoutMillis, TimeUnit.MILLISECONDS).build();
} }
@Override @Override
@ -92,13 +104,9 @@ public final class HapiWorkerContext implements IWorkerContext, ValueSetExpander
return null; return null;
} else { } else {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
T retVal = (T) myFetchedResourceCache.get(theUri); T retVal = (T) myFetchedResourceCache.get(theUri, t->{
if (retVal == null) { return myValidationSupport.fetchResource(myCtx, theClass, theUri);
retVal = myValidationSupport.fetchResource(myCtx, theClass, theUri); });
if (retVal != null) {
myFetchedResourceCache.put(theUri, retVal);
}
}
return retVal; return retVal;
} }
} }

View File

@ -156,6 +156,28 @@ public class InterceptorDstu3Test {
assertNotNull(arTypeCapt.getValue().getResource()); assertNotNull(arTypeCapt.getValue().getResource());
} }
@Test
public void testExceptionInProcessingCompletedNormally() throws Exception {
ourServlet.setInterceptors(myInterceptor1);
when(myInterceptor1.incomingRequestPreProcessed(nullable(HttpServletRequest.class), nullable(HttpServletResponse.class))).thenReturn(true);
when(myInterceptor1.incomingRequestPostProcessed(nullable(ServletRequestDetails.class), nullable(HttpServletRequest.class), nullable(HttpServletResponse.class))).thenReturn(true);
when(myInterceptor1.outgoingResponse(nullable(ServletRequestDetails.class), nullable(OperationOutcome.class))).thenReturn(true);
when(myInterceptor1.outgoingResponse(nullable(ServletRequestDetails.class), nullable(ResponseDetails.class), nullable(HttpServletRequest.class), nullable(HttpServletResponse.class))).thenReturn(true);
doThrow(new NullPointerException("FOO")).when(myInterceptor1).processingCompletedNormally(any());
String input = createInput();
HttpPost httpPost = new HttpPost("http://localhost:" + ourPort + "/Patient");
httpPost.setEntity(new StringEntity(input, ContentType.create(Constants.CT_FHIR_JSON, "UTF-8")));
HttpResponse status = ourClient.execute(httpPost);
try {
assertEquals(201, status.getStatusLine().getStatusCode());
} finally {
IOUtils.closeQuietly(status.getEntity().getContent());
}
}
@Test @Test
public void testResponseWithNothing() throws Exception { public void testResponseWithNothing() throws Exception {
ourServlet.setInterceptors(myInterceptor1); ourServlet.setInterceptors(myInterceptor1);
@ -229,34 +251,6 @@ public class InterceptorDstu3Test {
i.resourceUpdated(null, null, null); i.resourceUpdated(null, null, null);
} }
@AfterClass
public static void afterClassClearContext() throws Exception {
ourServer.stop();
TestUtil.clearAllStaticFieldsForUnitTest();
}
@BeforeClass
public static void beforeClass() throws Exception {
ourPort = PortUtil.findFreePort();
ourServer = new Server(ourPort);
DummyPatientResourceProvider patientProvider = new DummyPatientResourceProvider();
ServletHandler proxyHandler = new ServletHandler();
ourServlet = new RestfulServer(ourCtx);
ourServlet.setResourceProviders(patientProvider);
ServletHolder servletHolder = new ServletHolder(ourServlet);
proxyHandler.addServletWithMapping(servletHolder, "/*");
ourServer.setHandler(proxyHandler);
ourServer.start();
PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS);
HttpClientBuilder builder = HttpClientBuilder.create();
builder.setConnectionManager(connectionManager);
ourClient = builder.build();
}
public static class DummyPatientResourceProvider implements IResourceProvider { public static class DummyPatientResourceProvider implements IResourceProvider {
@Create() @Create()
@ -285,4 +279,32 @@ public class InterceptorDstu3Test {
} }
@AfterClass
public static void afterClassClearContext() throws Exception {
ourServer.stop();
TestUtil.clearAllStaticFieldsForUnitTest();
}
@BeforeClass
public static void beforeClass() throws Exception {
ourPort = PortUtil.findFreePort();
ourServer = new Server(ourPort);
DummyPatientResourceProvider patientProvider = new DummyPatientResourceProvider();
ServletHandler proxyHandler = new ServletHandler();
ourServlet = new RestfulServer(ourCtx);
ourServlet.setResourceProviders(patientProvider);
ServletHolder servletHolder = new ServletHolder(ourServlet);
proxyHandler.addServletWithMapping(servletHolder, "/*");
ourServer.setHandler(proxyHandler);
ourServer.start();
PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS);
HttpClientBuilder builder = HttpClientBuilder.create();
builder.setConnectionManager(connectionManager);
ourClient = builder.build();
}
} }

View File

@ -50,6 +50,13 @@
<optional>true</optional> <optional>true</optional>
</dependency> </dependency>
<!-- Used by the validator -->
<dependency>
<groupId>com.github.ben-manes.caffeine</groupId>
<artifactId>caffeine</artifactId>
<optional>true</optional>
</dependency>
<!-- <!--
Test dependencies on other optional parts of HAPI Test dependencies on other optional parts of HAPI
--> -->

View File

@ -1,11 +1,14 @@
package org.hl7.fhir.r4.hapi.ctx; package org.hl7.fhir.r4.hapi.ctx;
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.util.CoverageIgnore; import ca.uhn.fhir.util.CoverageIgnore;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.time.DateUtils;
import org.fhir.ucum.UcumService; import org.fhir.ucum.UcumService;
import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.exceptions.TerminologyServiceException; import org.hl7.fhir.exceptions.TerminologyServiceException;
@ -28,13 +31,14 @@ import org.hl7.fhir.utilities.TranslationServices;
import org.hl7.fhir.utilities.validation.ValidationMessage.IssueSeverity; import org.hl7.fhir.utilities.validation.ValidationMessage.IssueSeverity;
import java.util.*; import java.util.*;
import java.util.concurrent.TimeUnit;
import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank;
public final class HapiWorkerContext implements IWorkerContext, ValueSetExpander, ValueSetExpanderFactory { public final class HapiWorkerContext implements IWorkerContext, ValueSetExpander, ValueSetExpanderFactory {
private final FhirContext myCtx; private final FhirContext myCtx;
private Map<String, Resource> myFetchedResourceCache = new HashMap<String, Resource>(); private final Cache<String, Resource> myFetchedResourceCache;
private IValidationSupport myValidationSupport; private IValidationSupport myValidationSupport;
private ExpansionProfile myExpansionProfile; private ExpansionProfile myExpansionProfile;
@ -43,6 +47,13 @@ public final class HapiWorkerContext implements IWorkerContext, ValueSetExpander
Validate.notNull(theValidationSupport, "theValidationSupport must not be null"); Validate.notNull(theValidationSupport, "theValidationSupport must not be null");
myCtx = theCtx; myCtx = theCtx;
myValidationSupport = theValidationSupport; myValidationSupport = theValidationSupport;
long timeoutMillis = 10 * DateUtils.MILLIS_PER_SECOND;
if (System.getProperties().containsKey(ca.uhn.fhir.rest.api.Constants.TEST_SYSTEM_PROP_VALIDATION_RESOURCE_CACHES_MS)) {
timeoutMillis = Long.parseLong(System.getProperty(Constants.TEST_SYSTEM_PROP_VALIDATION_RESOURCE_CACHES_MS));
}
myFetchedResourceCache = Caffeine.newBuilder().expireAfterWrite(timeoutMillis, TimeUnit.MILLISECONDS).build();
} }
@Override @Override
@ -206,9 +217,9 @@ public final class HapiWorkerContext implements IWorkerContext, ValueSetExpander
ValueSetExpansionOutcome expandedValueSet = null; ValueSetExpansionOutcome expandedValueSet = null;
/* /*
* The following valueset is a special case, since the BCP codesystem is very difficult to expand * The following valueset is a special case, since the BCP codesystem is very difficult to expand
*/ */
if (theVs != null && "http://hl7.org/fhir/ValueSet/languages".equals(theVs.getId())) { if (theVs != null && "http://hl7.org/fhir/ValueSet/languages".equals(theVs.getId())) {
ValueSet expansion = new ValueSet(); ValueSet expansion = new ValueSet();
for (ConceptSetComponent nextInclude : theVs.getCompose().getInclude()) { for (ConceptSetComponent nextInclude : theVs.getCompose().getInclude()) {
@ -338,13 +349,9 @@ public final class HapiWorkerContext implements IWorkerContext, ValueSetExpander
return null; return null;
} else { } else {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
T retVal = (T) myFetchedResourceCache.get(theUri); T retVal = (T) myFetchedResourceCache.get(theUri, t -> {
if (retVal == null) { return myValidationSupport.fetchResource(myCtx, theClass, theUri);
retVal = myValidationSupport.fetchResource(myCtx, theClass, theUri); });
if (retVal != null) {
myFetchedResourceCache.put(theUri, (Resource) retVal);
}
}
return retVal; return retVal;
} }
} }

View File

@ -36,6 +36,20 @@ public class JsonParserR4Test {
return b; return b;
} }
@Test
public void testDontStripVersions() {
FhirContext ctx = FhirContext.forR4();
ctx.getParserOptions().setDontStripVersionsFromReferencesAtPaths("QuestionnaireResponse.questionnaire");
QuestionnaireResponse qr = new QuestionnaireResponse();
qr.getQuestionnaireElement().setValueAsString("Questionnaire/123/_history/456");
String output = ctx.newJsonParser().setPrettyPrint(true).encodeResourceToString(qr);
ourLog.info(output);
assertThat(output, containsString("\"Questionnaire/123/_history/456\""));
}
/** /**
* See #814 * See #814
*/ */

View File

@ -2,6 +2,7 @@ package org.hl7.fhir.dstu3.hapi.validation;
import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.EncodingEnum; import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.validation.IValidationContext; import ca.uhn.fhir.validation.IValidationContext;
@ -15,6 +16,7 @@ import com.google.gson.JsonObject;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.time.DateUtils;
import org.fhir.ucum.UcumService; import org.fhir.ucum.UcumService;
import org.hl7.fhir.convertors.VersionConvertor_30_40; import org.hl7.fhir.convertors.VersionConvertor_30_40;
import org.hl7.fhir.dstu3.hapi.ctx.HapiWorkerContext; import org.hl7.fhir.dstu3.hapi.ctx.HapiWorkerContext;
@ -272,14 +274,24 @@ public class FhirInstanceValidator extends BaseValidatorBridge implements IValid
private final HapiWorkerContext myWrap; private final HapiWorkerContext myWrap;
private final VersionConvertor_30_40 myConverter; private final VersionConvertor_30_40 myConverter;
private volatile List<org.hl7.fhir.r4.model.StructureDefinition> myAllStructures; private volatile List<org.hl7.fhir.r4.model.StructureDefinition> myAllStructures;
private LoadingCache<ResourceKey, org.hl7.fhir.r4.model.Resource> myFetchResourceCache private LoadingCache<ResourceKey, org.hl7.fhir.r4.model.Resource> myFetchResourceCache;
= Caffeine.newBuilder()
.expireAfterWrite(10, TimeUnit.SECONDS) public WorkerContextWrapper(HapiWorkerContext theWorkerContext) {
myWrap = theWorkerContext;
myConverter = new VersionConvertor_30_40();
long timeoutMillis = 10 * DateUtils.MILLIS_PER_SECOND;
if (System.getProperties().containsKey(ca.uhn.fhir.rest.api.Constants.TEST_SYSTEM_PROP_VALIDATION_RESOURCE_CACHES_MS)) {
timeoutMillis = Long.parseLong(System.getProperty(Constants.TEST_SYSTEM_PROP_VALIDATION_RESOURCE_CACHES_MS));
}
myFetchResourceCache = Caffeine.newBuilder()
.expireAfterWrite(timeoutMillis, TimeUnit.MILLISECONDS)
.maximumSize(10000) .maximumSize(10000)
.build(new CacheLoader<ResourceKey, org.hl7.fhir.r4.model.Resource>() { .build(new CacheLoader<ResourceKey, org.hl7.fhir.r4.model.Resource>() {
@Override @Override
public org.hl7.fhir.r4.model.Resource load(FhirInstanceValidator.ResourceKey key) throws Exception { public org.hl7.fhir.r4.model.Resource load(ResourceKey key) throws Exception {
org.hl7.fhir.dstu3.model.Resource fetched; Resource fetched;
switch (key.getResourceName()) { switch (key.getResourceName()) {
case "StructureDefinition": case "StructureDefinition":
fetched = myWrap.fetchResource(StructureDefinition.class, key.getUri()); fetched = myWrap.fetchResource(StructureDefinition.class, key.getUri());
@ -308,10 +320,6 @@ public class FhirInstanceValidator extends BaseValidatorBridge implements IValid
} }
} }
}); });
public WorkerContextWrapper(HapiWorkerContext theWorkerContext) {
myWrap = theWorkerContext;
myConverter = new VersionConvertor_30_40();
} }
@Override @Override

View File

@ -465,12 +465,13 @@
<name>Ana Maria Radu</name> <name>Ana Maria Radu</name>
<organization>Cerner Corporation</organization> <organization>Cerner Corporation</organization>
</developer> </developer>
<developer>
<id>jbalbien</id>
</developer>
<developer> <developer>
<id>alinleonard</id> <id>alinleonard</id>
<name>Alin Leonard</name> <name>Alin Leonard</name>
<organization>Cerner Corporation</organization>
</developer>
<developer>
<id>jbalbien</id>
</developer> </developer>
</developers> </developers>

View File

@ -289,6 +289,19 @@
was never depended on by anyone. Please post on the HAPI FHIR mailing list if this was never depended on by anyone. Please post on the HAPI FHIR mailing list if this
change affects you. change affects you.
</action> </action>
<action type="fix">
A crash was fixed when using the ConceptMap/$translate operation to translate a mapping
where the equivalence was not specified.
</action>
<action type="add">
The maximum length for codes in the JPA server terminology service have been increased
to 500 in order to better accomodate code systems with very long codes.
</action>
<action type="fix">
A bug in the DSTU3 validator was fixed where validation resources such as StructureDefinitions
and Questionnaires were cached in a cache that never expired, leading to validations against
stale versions of resources.
</action>
</release> </release>
<release version="3.4.0" date="2018-05-28"> <release version="3.4.0" date="2018-05-28">
<action type="add"> <action type="add">