Performance improvements
This commit is contained in:
parent
9a880361b6
commit
e5cb609f4d
|
@ -502,6 +502,7 @@
|
|||
<groupId>com.github.ben-manes.caffeine</groupId>
|
||||
<artifactId>caffeine</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava-testlib</artifactId>
|
||||
|
|
|
@ -8,10 +8,10 @@ import ca.uhn.fhir.jpa.util.ResourceCountCache;
|
|||
import ca.uhn.fhir.model.dstu2.composite.MetaDt;
|
||||
import ca.uhn.fhir.validation.IValidatorModule;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.instance.hapi.validation.CachingValidationSupport;
|
||||
import org.hl7.fhir.instance.hapi.validation.DefaultProfileValidationSupport;
|
||||
import org.hl7.fhir.instance.hapi.validation.FhirInstanceValidator;
|
||||
import org.hl7.fhir.instance.hapi.validation.ValidationSupportChain;
|
||||
import org.hl7.fhir.instance.utils.IResourceValidator.BestPracticeWarningLevel;
|
||||
import org.hl7.fhir.r4.utils.IResourceValidator;
|
||||
import org.springframework.beans.factory.annotation.Autowire;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
|
@ -29,9 +29,9 @@ import org.springframework.transaction.annotation.EnableTransactionManagement;
|
|||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
@ -81,7 +81,7 @@ public class BaseDstu2Config extends BaseConfig {
|
|||
public IValidatorModule instanceValidatorDstu2() {
|
||||
FhirInstanceValidator retVal = new FhirInstanceValidator();
|
||||
retVal.setBestPracticeWarningLevel(IResourceValidator.BestPracticeWarningLevel.Warning);
|
||||
retVal.setValidationSupport(new ValidationSupportChain(new DefaultProfileValidationSupport(), jpaValidationSupportDstu2()));
|
||||
retVal.setValidationSupport(new CachingValidationSupport(new ValidationSupportChain(new DefaultProfileValidationSupport(), jpaValidationSupportDstu2())));
|
||||
return retVal;
|
||||
}
|
||||
|
||||
|
@ -91,6 +91,13 @@ public class BaseDstu2Config extends BaseConfig {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
@Bean(name = "myResourceCountsCache")
|
||||
public ResourceCountCache resourceCountsCache() {
|
||||
ResourceCountCache retVal = new ResourceCountCache(() -> systemDaoDstu2().getResourceCounts());
|
||||
retVal.setCacheMillis(60 * DateUtils.MILLIS_PER_SECOND);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Bean(autowire = Autowire.BY_TYPE)
|
||||
public IFulltextSearchSvc searchDao() {
|
||||
FulltextSearchSvcImpl searchDao = new FulltextSearchSvcImpl();
|
||||
|
@ -121,13 +128,6 @@ public class BaseDstu2Config extends BaseConfig {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
@Bean(name = "myResourceCountsCache")
|
||||
public ResourceCountCache resourceCountsCache() {
|
||||
ResourceCountCache retVal = new ResourceCountCache(() -> systemDaoDstu2().getResourceCounts());
|
||||
retVal.setCacheMillis(60 * DateUtils.MILLIS_PER_SECOND);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Bean(autowire = Autowire.BY_TYPE)
|
||||
public IHapiTerminologySvc terminologyService() {
|
||||
return new HapiTerminologySvcDstu2();
|
||||
|
|
|
@ -19,6 +19,7 @@ import ca.uhn.fhir.jpa.validation.JpaValidationSupportChainDstu3;
|
|||
import ca.uhn.fhir.validation.IValidatorModule;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
|
||||
import org.hl7.fhir.dstu3.hapi.validation.CachingValidationSupport;
|
||||
import org.hl7.fhir.dstu3.hapi.validation.FhirInstanceValidator;
|
||||
import org.hl7.fhir.r4.utils.IResourceValidator;
|
||||
import org.springframework.beans.factory.annotation.Autowire;
|
||||
|
@ -37,9 +38,9 @@ import org.springframework.transaction.annotation.EnableTransactionManagement;
|
|||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
@ -78,13 +79,17 @@ public class BaseDstu3Config extends BaseConfig {
|
|||
return val;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public JpaValidationSupportChainDstu3 jpaValidationSupportChain() {
|
||||
return new JpaValidationSupportChainDstu3();
|
||||
}
|
||||
|
||||
@Bean(name = "myJpaValidationSupportDstu3", autowire = Autowire.BY_NAME)
|
||||
public ca.uhn.fhir.jpa.dao.dstu3.IJpaValidationSupportDstu3 jpaValidationSupportDstu3() {
|
||||
ca.uhn.fhir.jpa.dao.dstu3.JpaValidationSupportDstu3 retVal = new ca.uhn.fhir.jpa.dao.dstu3.JpaValidationSupportDstu3();
|
||||
return retVal;
|
||||
}
|
||||
|
||||
|
||||
@Bean(name = "myResourceCountsCache")
|
||||
public ResourceCountCache resourceCountsCache() {
|
||||
ResourceCountCache retVal = new ResourceCountCache(() -> systemDaoDstu3().getResourceCounts());
|
||||
|
@ -142,7 +147,7 @@ public class BaseDstu3Config extends BaseConfig {
|
|||
@Primary
|
||||
@Bean(autowire = Autowire.BY_NAME, name = "myJpaValidationSupportChainDstu3")
|
||||
public IValidationSupport validationSupportChainDstu3() {
|
||||
return new JpaValidationSupportChainDstu3();
|
||||
return new CachingValidationSupport(jpaValidationSupportChain());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ import ca.uhn.fhir.validation.IValidatorModule;
|
|||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
|
||||
import org.hl7.fhir.r4.hapi.rest.server.GraphQLProvider;
|
||||
import org.hl7.fhir.r4.hapi.validation.CachingValidationSupport;
|
||||
import org.hl7.fhir.r4.hapi.validation.FhirInstanceValidator;
|
||||
import org.hl7.fhir.r4.utils.GraphQLEngine;
|
||||
import org.hl7.fhir.r4.utils.IResourceValidator.BestPracticeWarningLevel;
|
||||
|
@ -40,9 +41,9 @@ import org.springframework.transaction.annotation.EnableTransactionManagement;
|
|||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
@ -93,6 +94,11 @@ public class BaseR4Config extends BaseConfig {
|
|||
return val;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public JpaValidationSupportChainR4 jpaValidationSupportChain() {
|
||||
return new JpaValidationSupportChainR4();
|
||||
}
|
||||
|
||||
@Bean(name = "myJpaValidationSupportR4", autowire = Autowire.BY_NAME)
|
||||
public ca.uhn.fhir.jpa.dao.r4.IJpaValidationSupportR4 jpaValidationSupportR4() {
|
||||
ca.uhn.fhir.jpa.dao.r4.JpaValidationSupportR4 retVal = new ca.uhn.fhir.jpa.dao.r4.JpaValidationSupportR4();
|
||||
|
@ -156,7 +162,7 @@ public class BaseR4Config extends BaseConfig {
|
|||
@Primary
|
||||
@Bean(autowire = Autowire.BY_NAME, name = "myJpaValidationSupportChainR4")
|
||||
public IValidationSupport validationSupportChainR4() {
|
||||
return new JpaValidationSupportChainR4();
|
||||
return new CachingValidationSupport(jpaValidationSupportChain());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -305,7 +305,8 @@ public abstract class BaseHapiFhirSystemDao<T, MT> extends BaseHapiFhirDao<IBase
|
|||
|
||||
final IBaseResource resource = toResource(resourceTable, false);
|
||||
|
||||
@SuppressWarnings("rawtypes") final IFhirResourceDao dao = getDaoOrThrowException(resource.getClass());
|
||||
Class<? extends IBaseResource> resourceClass = getContext().getResourceDefinition(resourceTable.getResourceType()).getImplementingClass();
|
||||
@SuppressWarnings("rawtypes") final IFhirResourceDao dao = getDaoOrThrowException(resourceClass);
|
||||
dao.reindex(resource, resourceTable);
|
||||
return null;
|
||||
|
||||
|
|
|
@ -28,6 +28,7 @@ import java.util.*;
|
|||
import javax.annotation.PostConstruct;
|
||||
|
||||
import org.apache.commons.codec.binary.StringUtils;
|
||||
import org.hl7.fhir.instance.hapi.validation.CachingValidationSupport;
|
||||
import org.hl7.fhir.instance.hapi.validation.DefaultProfileValidationSupport;
|
||||
import org.hl7.fhir.instance.hapi.validation.ValidationSupportChain;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
@ -62,7 +63,7 @@ public class FhirResourceDaoValueSetDstu2 extends FhirResourceDaoDstu2<ValueSet>
|
|||
@Qualifier("myFhirContextDstu2Hl7Org")
|
||||
private FhirContext myRiCtx;
|
||||
|
||||
private ValidationSupportChain myValidationSupport;
|
||||
private CachingValidationSupport myValidationSupport;
|
||||
|
||||
private void addCompose(String theFilter, ValueSet theValueSetToPopulate, ValueSet theSourceValueSet, CodeSystemConcept theConcept) {
|
||||
if (isBlank(theFilter)) {
|
||||
|
@ -252,7 +253,7 @@ public class FhirResourceDaoValueSetDstu2 extends FhirResourceDaoDstu2<ValueSet>
|
|||
public void postConstruct() {
|
||||
super.postConstruct();
|
||||
myDefaultProfileValidationSupport = new DefaultProfileValidationSupport();
|
||||
myValidationSupport = new ValidationSupportChain(myDefaultProfileValidationSupport, myJpaValidationSupport);
|
||||
myValidationSupport = new CachingValidationSupport(new ValidationSupportChain(myDefaultProfileValidationSupport, myJpaValidationSupport));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -26,12 +26,13 @@ import static org.apache.commons.lang3.StringUtils.trim;
|
|||
import java.math.BigDecimal;
|
||||
import java.util.*;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.measure.unit.NonSI;
|
||||
import javax.measure.unit.Unit;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.hl7.fhir.dstu3.context.IWorkerContext;
|
||||
import org.hl7.fhir.dstu3.hapi.ctx.HapiWorkerContext;
|
||||
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
|
||||
import org.hl7.fhir.dstu3.model.*;
|
||||
import org.hl7.fhir.dstu3.model.CapabilityStatement.CapabilityStatementRestSecurityComponent;
|
||||
|
@ -58,6 +59,13 @@ public class SearchParamExtractorDstu3 extends BaseSearchParamExtractor implemen
|
|||
@Autowired
|
||||
private org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport myValidationSupport;
|
||||
|
||||
private HapiWorkerContext myWorkerContext;
|
||||
|
||||
@PostConstruct
|
||||
public void start() {
|
||||
myWorkerContext = new HapiWorkerContext(getContext(), myValidationSupport);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
|
@ -695,8 +703,7 @@ public class SearchParamExtractorDstu3 extends BaseSearchParamExtractor implemen
|
|||
*/
|
||||
@Override
|
||||
protected List<Object> extractValues(String thePaths, IBaseResource theResource) {
|
||||
IWorkerContext worker = new org.hl7.fhir.dstu3.hapi.ctx.HapiWorkerContext(getContext(), myValidationSupport);
|
||||
FHIRPathEngine fp = new FHIRPathEngine(worker);
|
||||
FHIRPathEngine fp = new FHIRPathEngine(myWorkerContext);
|
||||
|
||||
List<Object> values = new ArrayList<>();
|
||||
try {
|
||||
|
|
|
@ -38,7 +38,7 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
|
|||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport.CodeValidationResult;
|
||||
import org.hl7.fhir.r4.hapi.ctx.ValidationSupportChain;
|
||||
import org.hl7.fhir.r4.hapi.validation.ValidationSupportChain;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.CodeSystem.CodeSystemContentMode;
|
||||
import org.hl7.fhir.r4.model.CodeSystem.ConceptDefinitionComponent;
|
||||
|
@ -47,7 +47,6 @@ import org.hl7.fhir.r4.model.Coding;
|
|||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
|
|
@ -98,10 +98,18 @@ public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchP
|
|||
b.append(getResource(), obj.getResource());
|
||||
b.append(getLatitude(), obj.getLatitude());
|
||||
b.append(getLongitude(), obj.getLongitude());
|
||||
b.append(myHashIdentity, obj.myHashIdentity);
|
||||
b.append(getHashIdentity(), obj.getHashIdentity());
|
||||
return b.isEquals();
|
||||
}
|
||||
|
||||
public Long getHashIdentity() {
|
||||
return myHashIdentity;
|
||||
}
|
||||
|
||||
public void setHashIdentity(Long theHashIdentity) {
|
||||
myHashIdentity = theHashIdentity;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Long getId() {
|
||||
return myId;
|
||||
|
@ -133,10 +141,6 @@ public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchP
|
|||
return b.toHashCode();
|
||||
}
|
||||
|
||||
public void setHashIdentity(Long theHashIdentity) {
|
||||
myHashIdentity = theHashIdentity;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IQueryParameterType toQueryParameterType() {
|
||||
return null;
|
||||
|
|
|
@ -114,10 +114,18 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar
|
|||
b.append(getResource(), obj.getResource());
|
||||
b.append(getTimeFromDate(getValueHigh()), getTimeFromDate(obj.getValueHigh()));
|
||||
b.append(getTimeFromDate(getValueLow()), getTimeFromDate(obj.getValueLow()));
|
||||
b.append(myHashIdentity, obj.myHashIdentity);
|
||||
b.append(getHashIdentity(), obj.getHashIdentity());
|
||||
return b.isEquals();
|
||||
}
|
||||
|
||||
public Long getHashIdentity() {
|
||||
return myHashIdentity;
|
||||
}
|
||||
|
||||
public void setHashIdentity(Long theHashIdentity) {
|
||||
myHashIdentity = theHashIdentity;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Long getId() {
|
||||
return myId;
|
||||
|
@ -156,10 +164,6 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar
|
|||
return b.toHashCode();
|
||||
}
|
||||
|
||||
public void setHashIdentity(Long theHashIdentity) {
|
||||
myHashIdentity = theHashIdentity;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IQueryParameterType toQueryParameterType() {
|
||||
DateTimeType value = new DateTimeType(myOriginalValue);
|
||||
|
|
|
@ -99,10 +99,18 @@ public class ResourceIndexedSearchParamNumber extends BaseResourceIndexedSearchP
|
|||
b.append(getResource(), obj.getResource());
|
||||
b.append(getValue(), obj.getValue());
|
||||
b.append(isMissing(), obj.isMissing());
|
||||
b.append(myHashIdentity, obj.myHashIdentity);
|
||||
b.append(getHashIdentity(), obj.getHashIdentity());
|
||||
return b.isEquals();
|
||||
}
|
||||
|
||||
public Long getHashIdentity() {
|
||||
return myHashIdentity;
|
||||
}
|
||||
|
||||
public void setHashIdentity(Long theHashIdentity) {
|
||||
myHashIdentity = theHashIdentity;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Long getId() {
|
||||
return myId;
|
||||
|
@ -126,10 +134,6 @@ public class ResourceIndexedSearchParamNumber extends BaseResourceIndexedSearchP
|
|||
return b.toHashCode();
|
||||
}
|
||||
|
||||
public void setHashIdentity(Long theHashIdentity) {
|
||||
myHashIdentity = theHashIdentity;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IQueryParameterType toQueryParameterType() {
|
||||
return new NumberParam(myValue.toPlainString());
|
||||
|
|
|
@ -119,8 +119,8 @@ public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearc
|
|||
b.append(getSystem(), obj.getSystem());
|
||||
b.append(getUnits(), obj.getUnits());
|
||||
b.append(getValue(), obj.getValue());
|
||||
b.append(myHashIdentity, obj.myHashIdentity);
|
||||
b.append(myHashIdentityAndUnits, obj.myHashIdentityAndUnits);
|
||||
b.append(getHashIdentity(), obj.getHashIdentity());
|
||||
b.append(getHashIdentityAndUnits(), obj.getHashIdentityAndUnits());
|
||||
return b.isEquals();
|
||||
}
|
||||
|
||||
|
|
|
@ -152,7 +152,7 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
|
|||
|
||||
@PrePersist
|
||||
public void calculateHashes() {
|
||||
if (myHashNormalizedPrefix == null) {
|
||||
if (myHashNormalizedPrefix == null && myDaoConfig != null) {
|
||||
String resourceType = getResourceType();
|
||||
String paramName = getParamName();
|
||||
String valueNormalized = getValueNormalized();
|
||||
|
@ -184,8 +184,8 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
|
|||
b.append(getParamName(), obj.getParamName());
|
||||
b.append(getResource(), obj.getResource());
|
||||
b.append(getValueExact(), obj.getValueExact());
|
||||
b.append(myHashExact, obj.myHashExact);
|
||||
b.append(myHashNormalizedPrefix, obj.myHashNormalizedPrefix);
|
||||
b.append(getHashExact(), obj.getHashExact());
|
||||
b.append(getHashNormalizedPrefix(), obj.getHashNormalizedPrefix());
|
||||
return b.isEquals();
|
||||
}
|
||||
|
||||
|
|
|
@ -145,10 +145,10 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
|
|||
b.append(getResource(), obj.getResource());
|
||||
b.append(getSystem(), obj.getSystem());
|
||||
b.append(getValue(), obj.getValue());
|
||||
b.append(myHashIdentity, obj.myHashIdentity);
|
||||
b.append(myHashSystem, obj.myHashSystem);
|
||||
b.append(myHashSystemAndValue, obj.myHashSystemAndValue);
|
||||
b.append(myHashValue, obj.myHashValue);
|
||||
b.append(getHashIdentity(), obj.getHashIdentity());
|
||||
b.append(getHashSystem(), obj.getHashSystem());
|
||||
b.append(getHashSystemAndValue(), obj.getHashSystemAndValue());
|
||||
b.append(getHashValue(), obj.getHashValue());
|
||||
return b.isEquals();
|
||||
}
|
||||
|
||||
|
|
|
@ -103,7 +103,6 @@ public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchPara
|
|||
b.append(getResource(), obj.getResource());
|
||||
b.append(getUri(), obj.getUri());
|
||||
b.append(getHashUri(), obj.getHashUri());
|
||||
b.append(myHashUri, obj.myHashUri);
|
||||
return b.isEquals();
|
||||
}
|
||||
|
||||
|
|
|
@ -11,6 +11,7 @@ import ca.uhn.fhir.jpa.term.snomedct.SctHandlerRelationship;
|
|||
import ca.uhn.fhir.jpa.util.Counter;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Charsets;
|
||||
|
@ -123,12 +124,20 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
|
||||
}
|
||||
|
||||
private void iterateOverZipFile(LoadedFileDescriptors theDescriptors, String theFileNamePart, IRecordHandler theHandler, char theDelimiter, QuoteMode theQuoteMode) {
|
||||
private void iterateOverZipFile(LoadedFileDescriptors theDescriptors, String theFileNamePart, IRecordHandler theHandler, char theDelimiter, QuoteMode theQuoteMode, boolean theIsPartialFilename) {
|
||||
|
||||
boolean foundMatch = false;
|
||||
for (FileDescriptor nextZipBytes : theDescriptors.getUncompressedFileDescriptors()) {
|
||||
String nextFilename = nextZipBytes.getFilename();
|
||||
if (nextFilename.endsWith("/" + theFileNamePart)) {
|
||||
boolean matches;
|
||||
if (theIsPartialFilename) {
|
||||
matches = nextFilename.contains(theFileNamePart);
|
||||
} else {
|
||||
matches = nextFilename.endsWith("/" + theFileNamePart) || nextFilename.equals(theFileNamePart);
|
||||
}
|
||||
if (matches) {
|
||||
ourLog.info("Processing file {}", nextFilename);
|
||||
foundMatch = true;
|
||||
|
||||
Reader reader;
|
||||
CSVParser parsed;
|
||||
|
@ -168,6 +177,10 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
|
||||
}
|
||||
|
||||
if (!foundMatch) {
|
||||
throw new InvalidRequestException("Did not find file matching " + theFileNamePart);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -256,72 +269,72 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
|
||||
// Part file
|
||||
handler = new LoincPartHandler(codeSystemVersion, code2concept);
|
||||
iterateOverZipFile(theDescriptors, LOINC_PART_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
||||
iterateOverZipFile(theDescriptors, LOINC_PART_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
Map<PartTypeAndPartName, String> partTypeAndPartNameToPartNumber = ((LoincPartHandler) handler).getPartTypeAndPartNameToPartNumber();
|
||||
|
||||
// Loinc Codes
|
||||
handler = new LoincHandler(codeSystemVersion, code2concept, propertyNamesToTypes, partTypeAndPartNameToPartNumber);
|
||||
iterateOverZipFile(theDescriptors, LOINC_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
||||
iterateOverZipFile(theDescriptors, LOINC_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Loinc Hierarchy
|
||||
handler = new LoincHierarchyHandler(codeSystemVersion, code2concept);
|
||||
iterateOverZipFile(theDescriptors, LOINC_HIERARCHY_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
||||
iterateOverZipFile(theDescriptors, LOINC_HIERARCHY_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Answer lists (ValueSets of potential answers/values for loinc "questions")
|
||||
handler = new LoincAnswerListHandler(codeSystemVersion, code2concept, valueSets, conceptMaps, uploadProperties);
|
||||
iterateOverZipFile(theDescriptors, LOINC_ANSWERLIST_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
||||
iterateOverZipFile(theDescriptors, LOINC_ANSWERLIST_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Answer list links (connects loinc observation codes to answerlist codes)
|
||||
handler = new LoincAnswerListLinkHandler(code2concept, valueSets);
|
||||
iterateOverZipFile(theDescriptors, LOINC_ANSWERLIST_LINK_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
||||
iterateOverZipFile(theDescriptors, LOINC_ANSWERLIST_LINK_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Part link file
|
||||
handler = new LoincPartLinkHandler(codeSystemVersion, code2concept);
|
||||
iterateOverZipFile(theDescriptors, LOINC_PART_LINK_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
||||
iterateOverZipFile(theDescriptors, LOINC_PART_LINK_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Part related code mapping
|
||||
handler = new LoincPartRelatedCodeMappingHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||
iterateOverZipFile(theDescriptors, LOINC_PART_RELATED_CODE_MAPPING_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
||||
iterateOverZipFile(theDescriptors, LOINC_PART_RELATED_CODE_MAPPING_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Document Ontology File
|
||||
handler = new LoincDocumentOntologyHandler(code2concept, propertyNamesToTypes, valueSets, conceptMaps, uploadProperties);
|
||||
iterateOverZipFile(theDescriptors, LOINC_DOCUMENT_ONTOLOGY_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
||||
iterateOverZipFile(theDescriptors, LOINC_DOCUMENT_ONTOLOGY_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// RSNA Playbook file
|
||||
handler = new LoincRsnaPlaybookHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||
iterateOverZipFile(theDescriptors, LOINC_RSNA_PLAYBOOK_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
||||
iterateOverZipFile(theDescriptors, LOINC_RSNA_PLAYBOOK_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Top 2000 Codes - US
|
||||
handler = new LoincTop2000LabResultsUsHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||
iterateOverZipFile(theDescriptors, LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
||||
iterateOverZipFile(theDescriptors, LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Top 2000 Codes - SI
|
||||
handler = new LoincTop2000LabResultsSiHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||
iterateOverZipFile(theDescriptors, LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
||||
iterateOverZipFile(theDescriptors, LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Universal Lab Order ValueSet
|
||||
handler = new LoincUniversalOrderSetHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||
iterateOverZipFile(theDescriptors, LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
||||
iterateOverZipFile(theDescriptors, LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// IEEE Medical Device Codes
|
||||
handler = new LoincIeeeMedicalDeviceCodeHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||
iterateOverZipFile(theDescriptors, LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV, handler, ',', QuoteMode.NON_NUMERIC);
|
||||
iterateOverZipFile(theDescriptors, LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Imaging Document Codes
|
||||
handler = new LoincImagingDocumentCodeHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||
iterateOverZipFile(theDescriptors, LOINC_IMAGING_DOCUMENT_CODES_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
||||
iterateOverZipFile(theDescriptors, LOINC_IMAGING_DOCUMENT_CODES_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Group File
|
||||
handler = new LoincGroupFileHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||
iterateOverZipFile(theDescriptors, LOINC_GROUP_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
||||
iterateOverZipFile(theDescriptors, LOINC_GROUP_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Group Terms File
|
||||
handler = new LoincGroupTermsFileHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||
iterateOverZipFile(theDescriptors, LOINC_GROUP_TERMS_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
||||
iterateOverZipFile(theDescriptors, LOINC_GROUP_TERMS_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
// Parent Group File
|
||||
handler = new LoincParentGroupFileHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||
iterateOverZipFile(theDescriptors, LOINC_PARENT_GROUP_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
||||
iterateOverZipFile(theDescriptors, LOINC_PARENT_GROUP_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||
|
||||
IOUtils.closeQuietly(theDescriptors);
|
||||
|
||||
|
@ -349,18 +362,18 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
|||
final Set<String> validConceptIds = new HashSet<>();
|
||||
|
||||
IRecordHandler handler = new SctHandlerConcept(validConceptIds);
|
||||
iterateOverZipFile(theDescriptors, SCT_FILE_CONCEPT, handler, '\t', null);
|
||||
iterateOverZipFile(theDescriptors, SCT_FILE_CONCEPT, handler, '\t', null, true);
|
||||
|
||||
ourLog.info("Have {} valid concept IDs", validConceptIds.size());
|
||||
|
||||
handler = new SctHandlerDescription(validConceptIds, code2concept, id2concept, codeSystemVersion);
|
||||
iterateOverZipFile(theDescriptors, SCT_FILE_DESCRIPTION, handler, '\t', null);
|
||||
iterateOverZipFile(theDescriptors, SCT_FILE_DESCRIPTION, handler, '\t', null, true);
|
||||
|
||||
ourLog.info("Got {} concepts, cloning map", code2concept.size());
|
||||
final HashMap<String, TermConcept> rootConcepts = new HashMap<>(code2concept);
|
||||
|
||||
handler = new SctHandlerRelationship(codeSystemVersion, rootConcepts, code2concept);
|
||||
iterateOverZipFile(theDescriptors, SCT_FILE_RELATIONSHIP, handler, '\t', null);
|
||||
iterateOverZipFile(theDescriptors, SCT_FILE_RELATIONSHIP, handler, '\t', null, true);
|
||||
|
||||
IOUtils.closeQuietly(theDescriptors);
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ import javax.annotation.PostConstruct;
|
|||
import javax.annotation.PreDestroy;
|
||||
|
||||
import org.hl7.fhir.r4.hapi.ctx.DefaultProfileValidationSupport;
|
||||
import org.hl7.fhir.r4.hapi.ctx.ValidationSupportChain;
|
||||
import org.hl7.fhir.r4.hapi.validation.ValidationSupportChain;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
|
||||
|
|
|
@ -83,6 +83,7 @@ public class SearchParamExtractorDstu3Test {
|
|||
};
|
||||
|
||||
SearchParamExtractorDstu3 extractor = new SearchParamExtractorDstu3(new DaoConfig(), ourCtx, ourValidationSupport, searchParamRegistry);
|
||||
extractor.start();
|
||||
Set<BaseResourceIndexedSearchParam> tokens = extractor.extractSearchParamTokens(new ResourceTable(), obs);
|
||||
assertEquals(1, tokens.size());
|
||||
ResourceIndexedSearchParamToken token = (ResourceIndexedSearchParamToken) tokens.iterator().next();
|
||||
|
|
|
@ -485,11 +485,18 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
Class<ResourceIndexedSearchParamNumber> type = ResourceIndexedSearchParamNumber.class;
|
||||
List<ResourceIndexedSearchParamNumber> results = myEntityManager.createQuery("SELECT i FROM " + type.getSimpleName() + " i", type).getResultList();
|
||||
ourLog.info(toStringMultiline(results));
|
||||
assertThat(results, containsInAnyOrder(
|
||||
((ResourceIndexedSearchParamNumber) (new ResourceIndexedSearchParamNumber(ImmunizationRecommendation.SP_DOSE_SEQUENCE, null).setResource(resource).setMissing(true))),
|
||||
((ResourceIndexedSearchParamNumber) (new ResourceIndexedSearchParamNumber(ImmunizationRecommendation.SP_DOSE_NUMBER, new BigDecimal("1.00")).setResource(resource))),
|
||||
((ResourceIndexedSearchParamNumber) (new ResourceIndexedSearchParamNumber(ImmunizationRecommendation.SP_DOSE_NUMBER, new BigDecimal("2.00")).setResource(resource)))
|
||||
));
|
||||
|
||||
ResourceIndexedSearchParamNumber expect0 = new ResourceIndexedSearchParamNumber(ImmunizationRecommendation.SP_DOSE_NUMBER, new BigDecimal("2.00"));
|
||||
expect0.setResource(resource);
|
||||
expect0.calculateHashes();
|
||||
ResourceIndexedSearchParamNumber expect1 = new ResourceIndexedSearchParamNumber(ImmunizationRecommendation.SP_DOSE_SEQUENCE, null);
|
||||
expect1.setResource(resource).setMissing(true);
|
||||
expect1.calculateHashes();
|
||||
ResourceIndexedSearchParamNumber expect2 = new ResourceIndexedSearchParamNumber(ImmunizationRecommendation.SP_DOSE_NUMBER, new BigDecimal("1.00"));
|
||||
expect2.setResource(resource);
|
||||
expect2.calculateHashes();
|
||||
|
||||
assertThat(results, containsInAnyOrder(expect0, expect1, expect2));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -679,6 +679,8 @@ public class FhirResourceDaoR4UpdateTest extends BaseJpaR4Test {
|
|||
ourLog.info("Now have {} inserts", QueryCountHolder.getGrandTotal().getInsert());
|
||||
QueryCountHolder.clear();
|
||||
|
||||
ourLog.info("** About to update");
|
||||
|
||||
pt.setId(id);
|
||||
pt.getNameFirstRep().addGiven("GIVEN1C");
|
||||
myPatientDao.update(pt);
|
||||
|
|
|
@ -38,6 +38,7 @@ public class TerminologyUploaderProviderDstu3Test extends BaseResourceProviderDs
|
|||
ZipOutputStream zos = new ZipOutputStream(bos);
|
||||
|
||||
addFile(zos, "loincupload.properties");
|
||||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_PART_FILE);
|
||||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_FILE);
|
||||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_HIERARCHY_FILE);
|
||||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_FILE);
|
||||
|
@ -45,7 +46,6 @@ public class TerminologyUploaderProviderDstu3Test extends BaseResourceProviderDs
|
|||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_GROUP_FILE);
|
||||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_GROUP_TERMS_FILE);
|
||||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_PARENT_GROUP_FILE);
|
||||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_PART_FILE);
|
||||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_PART_LINK_FILE);
|
||||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_PART_RELATED_CODE_MAPPING_FILE);
|
||||
addFile(zos, TerminologyLoaderSvcImpl.LOINC_DOCUMENT_ONTOLOGY_FILE);
|
||||
|
@ -166,7 +166,7 @@ public class TerminologyUploaderProviderDstu3Test extends BaseResourceProviderDs
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testUploadPackageMissingUrl() throws Exception {
|
||||
public void testUploadPackageMissingUrl() {
|
||||
try {
|
||||
ourClient
|
||||
.operation()
|
||||
|
|
|
@ -0,0 +1,69 @@
|
|||
package ca.uhn.fhir.jpa.stresstest;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.dao.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.jpa.dao.dstu3.SearchParamExtractorDstu3;
|
||||
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamString;
|
||||
import ca.uhn.fhir.jpa.entity.ResourceTable;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
|
||||
import org.hl7.fhir.dstu3.hapi.validation.CachingValidationSupport;
|
||||
import org.hl7.fhir.dstu3.hapi.validation.DefaultProfileValidationSupport;
|
||||
import org.hl7.fhir.dstu3.hapi.validation.ValidationSupportChain;
|
||||
import org.hl7.fhir.dstu3.model.Patient;
|
||||
import org.junit.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
public class IndexStressTest {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(IndexStressTest.class);
|
||||
|
||||
@Test
|
||||
public void testExtractSearchParams() {
|
||||
Patient p = new Patient();
|
||||
p.addName().setFamily("FOO").addGiven("BAR").addGiven("BAR");
|
||||
p.getMaritalStatus().setText("DDDDD");
|
||||
p.addAddress().addLine("A").addLine("B").addLine("C");
|
||||
|
||||
DaoConfig daoConfig = new DaoConfig();
|
||||
FhirContext ctx = FhirContext.forDstu3();
|
||||
IValidationSupport mockValidationSupport = mock(IValidationSupport.class);
|
||||
IValidationSupport validationSupport = new CachingValidationSupport(new ValidationSupportChain(new DefaultProfileValidationSupport(), mockValidationSupport));
|
||||
ISearchParamRegistry searchParamRegistry = mock(ISearchParamRegistry.class);
|
||||
SearchParamExtractorDstu3 extractor = new SearchParamExtractorDstu3(daoConfig, ctx, validationSupport, searchParamRegistry);
|
||||
extractor.start();
|
||||
|
||||
Map<String, RuntimeSearchParam> spMap = ctx
|
||||
.getResourceDefinition("Patient")
|
||||
.getSearchParams()
|
||||
.stream()
|
||||
.collect(Collectors.toMap(RuntimeSearchParam::getName, t -> t));
|
||||
when(searchParamRegistry.getActiveSearchParams(eq("Patient"))).thenReturn(spMap);
|
||||
|
||||
ResourceTable entity = new ResourceTable();
|
||||
Set<ResourceIndexedSearchParamString> params = extractor.extractSearchParamStrings(entity, p);
|
||||
|
||||
StopWatch sw = new StopWatch();
|
||||
int loops = 100;
|
||||
for (int i = 0; i < loops; i++) {
|
||||
entity = new ResourceTable();
|
||||
params = extractor.extractSearchParamStrings(entity, p);
|
||||
}
|
||||
|
||||
ourLog.info("Indexed {} times in {}ms/time", loops, sw.getMillisPerOperation(loops));
|
||||
|
||||
assertEquals(9, params.size());
|
||||
verify(mockValidationSupport, times(1)).fetchAllStructureDefinitions((any(FhirContext.class)));
|
||||
}
|
||||
}
|
|
@ -48,7 +48,6 @@ public class StressTestDstu3Test extends BaseResourceProviderDstu3Test {
|
|||
myRequestValidatingInterceptor.addValidatorModule(module);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testMultithreadedSearch() throws Exception {
|
||||
Bundle input = new Bundle();
|
||||
|
|
|
@ -27,6 +27,7 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
|
@ -362,7 +363,9 @@ public class TerminologyLoaderSvcLoincTest {
|
|||
mySvc.loadLoinc(myFiles.getFiles(), details);
|
||||
fail();
|
||||
} catch (UnprocessableEntityException e) {
|
||||
assertEquals("Could not find the following mandatory files in input: [Loinc.csv, MultiAxialHierarchy.csv]", e.getMessage());
|
||||
assertThat(e.getMessage(), containsString("Could not find the following mandatory files in input:"));
|
||||
assertThat(e.getMessage(), containsString("Loinc.csv"));
|
||||
assertThat(e.getMessage(), containsString("MultiAxialHierarchy.csv"));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -146,7 +146,6 @@
|
|||
<version>1.1</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>xpp3</groupId>
|
||||
<artifactId>xpp3</artifactId>
|
||||
|
|
|
@ -19,7 +19,6 @@ import org.hl7.fhir.dstu3.terminologies.ValueSetExpanderFactory;
|
|||
import org.hl7.fhir.dstu3.terminologies.ValueSetExpanderSimple;
|
||||
import org.hl7.fhir.dstu3.utils.INarrativeGenerator;
|
||||
import org.hl7.fhir.exceptions.FHIRException;
|
||||
import org.hl7.fhir.exceptions.TerminologyServiceException;
|
||||
import org.hl7.fhir.utilities.validation.ValidationMessage.IssueSeverity;
|
||||
|
||||
import java.util.*;
|
||||
|
@ -28,311 +27,310 @@ import static org.apache.commons.lang3.StringUtils.isBlank;
|
|||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
public final class HapiWorkerContext implements IWorkerContext, ValueSetExpander, ValueSetExpanderFactory {
|
||||
private final FhirContext myCtx;
|
||||
private Map<String, Resource> myFetchedResourceCache = new HashMap<String, Resource>();
|
||||
private IValidationSupport myValidationSupport;
|
||||
private ExpansionProfile myExpansionProfile;
|
||||
private final FhirContext myCtx;
|
||||
private Map<String, Resource> myFetchedResourceCache = new HashMap<String, Resource>();
|
||||
private IValidationSupport myValidationSupport;
|
||||
private ExpansionProfile myExpansionProfile;
|
||||
|
||||
public HapiWorkerContext(FhirContext theCtx, IValidationSupport theValidationSupport) {
|
||||
Validate.notNull(theCtx, "theCtx must not be null");
|
||||
Validate.notNull(theValidationSupport, "theValidationSupport must not be null");
|
||||
myCtx = theCtx;
|
||||
myValidationSupport = theValidationSupport;
|
||||
}
|
||||
public HapiWorkerContext(FhirContext theCtx, IValidationSupport theValidationSupport) {
|
||||
Validate.notNull(theCtx, "theCtx must not be null");
|
||||
Validate.notNull(theValidationSupport, "theValidationSupport must not be null");
|
||||
myCtx = theCtx;
|
||||
myValidationSupport = theValidationSupport;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<StructureDefinition> allStructures() {
|
||||
return myValidationSupport.fetchAllStructureDefinitions(myCtx);
|
||||
}
|
||||
@Override
|
||||
@CoverageIgnore
|
||||
public List<MetadataResource> allConformanceResources() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public CodeSystem fetchCodeSystem(String theSystem) {
|
||||
if (myValidationSupport == null) {
|
||||
return null;
|
||||
} else {
|
||||
return myValidationSupport.fetchCodeSystem(myCtx, theSystem);
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public List<StructureDefinition> allStructures() {
|
||||
return myValidationSupport.fetchAllStructureDefinitions(myCtx);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T extends Resource> T fetchResource(Class<T> theClass, String theUri) {
|
||||
if (myValidationSupport == null) {
|
||||
return null;
|
||||
} else {
|
||||
@SuppressWarnings("unchecked")
|
||||
T retVal = (T) myFetchedResourceCache.get(theUri);
|
||||
if (retVal == null) {
|
||||
retVal = myValidationSupport.fetchResource(myCtx, theClass, theUri);
|
||||
if (retVal != null) {
|
||||
myFetchedResourceCache.put(theUri, retVal);
|
||||
}
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public ValueSetExpansionOutcome expand(ValueSet theSource, ExpansionProfile theProfile) {
|
||||
ValueSetExpansionOutcome vso;
|
||||
try {
|
||||
vso = getExpander().expand(theSource, theProfile);
|
||||
} catch (InvalidRequestException e) {
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
if (vso.getError() != null) {
|
||||
throw new InvalidRequestException(vso.getError());
|
||||
} else {
|
||||
return vso;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ConceptMap> findMapsForSource(String theUrl) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
@Override
|
||||
public ValueSetExpansionOutcome expandVS(ValueSet theSource, boolean theCacheOk, boolean theHeiarchical) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getAbbreviation(String theName) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
@Override
|
||||
public ValueSetExpansionComponent expandVS(ConceptSetComponent theInc, boolean theHeiarchical) {
|
||||
return myValidationSupport.expandValueSet(myCtx, theInc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSetExpander getExpander() {
|
||||
ValueSetExpanderSimple retVal = new ValueSetExpanderSimple(this, this);
|
||||
retVal.setMaxExpansionSize(Integer.MAX_VALUE);
|
||||
return retVal;
|
||||
}
|
||||
@Override
|
||||
public CodeSystem fetchCodeSystem(String theSystem) {
|
||||
if (myValidationSupport == null) {
|
||||
return null;
|
||||
} else {
|
||||
return myValidationSupport.fetchCodeSystem(myCtx, theSystem);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public INarrativeGenerator getNarrativeGenerator(String thePrefix, String theBasePath) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
@Override
|
||||
public <T extends Resource> T fetchResource(Class<T> theClass, String theUri) {
|
||||
if (myValidationSupport == null) {
|
||||
return null;
|
||||
} else {
|
||||
@SuppressWarnings("unchecked")
|
||||
T retVal = (T) myFetchedResourceCache.get(theUri);
|
||||
if (retVal == null) {
|
||||
retVal = myValidationSupport.fetchResource(myCtx, theClass, theUri);
|
||||
if (retVal != null) {
|
||||
myFetchedResourceCache.put(theUri, retVal);
|
||||
}
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public IParser getParser(ParserType theType) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
@Override
|
||||
public <T extends Resource> T fetchResourceWithException(Class<T> theClass_, String theUri) throws FHIRException {
|
||||
T retVal = fetchResource(theClass_, theUri);
|
||||
if (retVal == null) {
|
||||
throw new FHIRException("Unable to fetch " + theUri);
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IParser getParser(String theType) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
@Override
|
||||
public List<ConceptMap> findMapsForSource(String theUrl) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getResourceNames() {
|
||||
List<String> result = new ArrayList<String>();
|
||||
for (ResourceType next : ResourceType.values()) {
|
||||
result.add(next.name());
|
||||
}
|
||||
Collections.sort(result);
|
||||
return result;
|
||||
}
|
||||
@Override
|
||||
public String getAbbreviation(String theName) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T extends Resource> boolean hasResource(Class<T> theClass_, String theUri) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
@Override
|
||||
public ValueSetExpander getExpander() {
|
||||
ValueSetExpanderSimple retVal = new ValueSetExpanderSimple(this, this);
|
||||
retVal.setMaxExpansionSize(Integer.MAX_VALUE);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IParser newJsonParser() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
@Override
|
||||
public ExpansionProfile getExpansionProfile() {
|
||||
return myExpansionProfile;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setExpansionProfile(ExpansionProfile theExpProfile) {
|
||||
myExpansionProfile = theExpProfile;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IParser newXmlParser() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
@Override
|
||||
public INarrativeGenerator getNarrativeGenerator(String thePrefix, String theBasePath) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String oid2Uri(String theCode) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
@Override
|
||||
public IParser getParser(ParserType theType) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supportsSystem(String theSystem) {
|
||||
if (myValidationSupport == null) {
|
||||
return false;
|
||||
} else {
|
||||
return myValidationSupport.isCodeSystemSupported(myCtx, theSystem);
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public IParser getParser(String theType) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> typeTails() {
|
||||
return new HashSet<String>(Arrays.asList("Integer", "UnsignedInt", "PositiveInt", "Decimal", "DateTime", "Date", "Time", "Instant", "String", "Uri", "Oid", "Uuid", "Id", "Boolean", "Code",
|
||||
"Markdown", "Base64Binary", "Coding", "CodeableConcept", "Attachment", "Identifier", "Quantity", "SampledData", "Range", "Period", "Ratio", "HumanName", "Address", "ContactPoint",
|
||||
"Timing", "Reference", "Annotation", "Signature", "Meta"));
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValidationResult validateCode(CodeableConcept theCode, ValueSet theVs) {
|
||||
for (Coding next : theCode.getCoding()) {
|
||||
ValidationResult retVal = validateCode(next, theVs);
|
||||
if (retVal != null && retVal.isOk()) {
|
||||
return retVal;
|
||||
}
|
||||
}
|
||||
|
||||
return new ValidationResult(null, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValidationResult validateCode(Coding theCode, ValueSet theVs) {
|
||||
String system = theCode.getSystem();
|
||||
String code = theCode.getCode();
|
||||
String display = theCode.getDisplay();
|
||||
return validateCode(system, code, display, theVs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValidationResult validateCode(String theSystem, String theCode, String theDisplay) {
|
||||
IValidationSupport.CodeValidationResult result = myValidationSupport.validateCode(myCtx, theSystem, theCode, theDisplay);
|
||||
if (result == null) {
|
||||
return null;
|
||||
}
|
||||
return new ValidationResult(result.getSeverity(), result.getMessage(), result.asConceptDefinition());
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValidationResult validateCode(String theSystem, String theCode, String theDisplay, ConceptSetComponent theVsi) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValidationResult validateCode(String theSystem, String theCode, String theDisplay, ValueSet theVs) {
|
||||
|
||||
if (theVs != null && isNotBlank(theCode)) {
|
||||
for (ConceptSetComponent next : theVs.getCompose().getInclude()) {
|
||||
if (isBlank(theSystem) || theSystem.equals(next.getSystem())) {
|
||||
for (ConceptReferenceComponent nextCode : next.getConcept()) {
|
||||
if (theCode.equals(nextCode.getCode())) {
|
||||
CodeType code = new CodeType(theCode);
|
||||
return new ValidationResult(new ConceptDefinitionComponent(code));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
boolean caseSensitive = true;
|
||||
if (isNotBlank(theSystem)) {
|
||||
CodeSystem system = fetchCodeSystem(theSystem);
|
||||
if (system == null) {
|
||||
return new ValidationResult(IssueSeverity.INFORMATION, "Code " + theSystem + "/" + theCode + " was not validated because the code system is not present");
|
||||
}
|
||||
|
||||
if (system.hasCaseSensitive()) {
|
||||
caseSensitive = system.getCaseSensitive();
|
||||
}
|
||||
}
|
||||
|
||||
String wantCode = theCode;
|
||||
if (!caseSensitive) {
|
||||
wantCode = wantCode.toUpperCase();
|
||||
}
|
||||
|
||||
ValueSetExpansionOutcome expandedValueSet = null;
|
||||
|
||||
/*
|
||||
* The following valueset is a special case, since the BCP codesystem is very difficult to expand
|
||||
*/
|
||||
if (theVs != null && "http://hl7.org/fhir/ValueSet/languages".equals(theVs.getId())) {
|
||||
ValueSet expansion = new ValueSet();
|
||||
for (ConceptSetComponent nextInclude : theVs.getCompose().getInclude()) {
|
||||
for (ConceptReferenceComponent nextConcept : nextInclude.getConcept()) {
|
||||
expansion.getExpansion().addContains().setCode(nextConcept.getCode()).setDisplay(nextConcept.getDisplay());
|
||||
}
|
||||
}
|
||||
expandedValueSet = new ValueSetExpansionOutcome(expansion);
|
||||
}
|
||||
|
||||
if (expandedValueSet == null) {
|
||||
expandedValueSet = expand(theVs, null);
|
||||
}
|
||||
|
||||
for (ValueSetExpansionContainsComponent next : expandedValueSet.getValueset().getExpansion().getContains()) {
|
||||
String nextCode = next.getCode();
|
||||
if (!caseSensitive) {
|
||||
nextCode = nextCode.toUpperCase();
|
||||
}
|
||||
|
||||
if (nextCode.equals(wantCode)) {
|
||||
if (theSystem == null || next.getSystem().equals(theSystem)) {
|
||||
ConceptDefinitionComponent definition = new ConceptDefinitionComponent();
|
||||
definition.setCode(next.getCode());
|
||||
definition.setDisplay(next.getDisplay());
|
||||
ValidationResult retVal = new ValidationResult(definition);
|
||||
return retVal;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return new ValidationResult(IssueSeverity.ERROR, "Unknown code[" + theCode + "] in system[" + theSystem + "]");
|
||||
}
|
||||
|
||||
@Override
|
||||
@CoverageIgnore
|
||||
public List<MetadataResource> allConformanceResources() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
@CoverageIgnore
|
||||
public boolean hasCache() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSetExpansionOutcome expand(ValueSet theSource, ExpansionProfile theProfile) {
|
||||
ValueSetExpansionOutcome vso;
|
||||
try {
|
||||
vso = getExpander().expand(theSource, theProfile);
|
||||
} catch (InvalidRequestException e) {
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
if (vso.getError() != null) {
|
||||
throw new InvalidRequestException(vso.getError());
|
||||
} else {
|
||||
return vso;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExpansionProfile getExpansionProfile() {
|
||||
return myExpansionProfile;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setExpansionProfile(ExpansionProfile theExpProfile) {
|
||||
myExpansionProfile = theExpProfile;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSetExpansionOutcome expandVS(ValueSet theSource, boolean theCacheOk, boolean theHeiarchical) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSetExpansionComponent expandVS(ConceptSetComponent theInc, boolean theHeiarchical) throws TerminologyServiceException {
|
||||
return myValidationSupport.expandValueSet(myCtx, theInc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setLogger(ILoggingService theLogger) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getVersion() {
|
||||
return myCtx.getVersion().getVersion().getFhirVersionString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isNoTerminologyServer() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T extends Resource> T fetchResourceWithException(Class<T> theClass_, String theUri) throws FHIRException {
|
||||
T retVal = fetchResource(theClass_, theUri);
|
||||
if (retVal == null) {
|
||||
throw new FHIRException("Unable to fetch " + theUri);
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
@Override
|
||||
public List<String> getResourceNames() {
|
||||
List<String> result = new ArrayList<String>();
|
||||
for (ResourceType next : ResourceType.values()) {
|
||||
result.add(next.name());
|
||||
}
|
||||
Collections.sort(result);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getTypeNames() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getVersion() {
|
||||
return myCtx.getVersion().getVersion().getFhirVersionString();
|
||||
}
|
||||
|
||||
@Override
|
||||
@CoverageIgnore
|
||||
public boolean hasCache() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T extends Resource> boolean hasResource(Class<T> theClass_, String theUri) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isNoTerminologyServer() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IParser newJsonParser() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public IParser newXmlParser() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String oid2Uri(String theCode) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setLogger(ILoggingService theLogger) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supportsSystem(String theSystem) {
|
||||
if (myValidationSupport == null) {
|
||||
return false;
|
||||
} else {
|
||||
return myValidationSupport.isCodeSystemSupported(myCtx, theSystem);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> typeTails() {
|
||||
return new HashSet<String>(Arrays.asList("Integer", "UnsignedInt", "PositiveInt", "Decimal", "DateTime", "Date", "Time", "Instant", "String", "Uri", "Oid", "Uuid", "Id", "Boolean", "Code",
|
||||
"Markdown", "Base64Binary", "Coding", "CodeableConcept", "Attachment", "Identifier", "Quantity", "SampledData", "Range", "Period", "Ratio", "HumanName", "Address", "ContactPoint",
|
||||
"Timing", "Reference", "Annotation", "Signature", "Meta"));
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValidationResult validateCode(CodeableConcept theCode, ValueSet theVs) {
|
||||
for (Coding next : theCode.getCoding()) {
|
||||
ValidationResult retVal = validateCode(next, theVs);
|
||||
if (retVal != null && retVal.isOk()) {
|
||||
return retVal;
|
||||
}
|
||||
}
|
||||
|
||||
return new ValidationResult(null, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValidationResult validateCode(Coding theCode, ValueSet theVs) {
|
||||
String system = theCode.getSystem();
|
||||
String code = theCode.getCode();
|
||||
String display = theCode.getDisplay();
|
||||
return validateCode(system, code, display, theVs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValidationResult validateCode(String theSystem, String theCode, String theDisplay) {
|
||||
IValidationSupport.CodeValidationResult result = myValidationSupport.validateCode(myCtx, theSystem, theCode, theDisplay);
|
||||
if (result == null) {
|
||||
return null;
|
||||
}
|
||||
return new ValidationResult(result.getSeverity(), result.getMessage(), result.asConceptDefinition());
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValidationResult validateCode(String theSystem, String theCode, String theDisplay, ConceptSetComponent theVsi) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValidationResult validateCode(String theSystem, String theCode, String theDisplay, ValueSet theVs) {
|
||||
|
||||
if (theVs != null && isNotBlank(theCode)) {
|
||||
for (ConceptSetComponent next : theVs.getCompose().getInclude()) {
|
||||
if (isBlank(theSystem) || theSystem.equals(next.getSystem())) {
|
||||
for (ConceptReferenceComponent nextCode : next.getConcept()) {
|
||||
if (theCode.equals(nextCode.getCode())) {
|
||||
CodeType code = new CodeType(theCode);
|
||||
return new ValidationResult(new ConceptDefinitionComponent(code));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
boolean caseSensitive = true;
|
||||
if (isNotBlank(theSystem)) {
|
||||
CodeSystem system = fetchCodeSystem(theSystem);
|
||||
if (system == null) {
|
||||
return new ValidationResult(IssueSeverity.INFORMATION, "Code " + theSystem + "/" + theCode + " was not validated because the code system is not present");
|
||||
}
|
||||
|
||||
if (system.hasCaseSensitive()) {
|
||||
caseSensitive = system.getCaseSensitive();
|
||||
}
|
||||
}
|
||||
|
||||
String wantCode = theCode;
|
||||
if (!caseSensitive) {
|
||||
wantCode = wantCode.toUpperCase();
|
||||
}
|
||||
|
||||
ValueSetExpansionOutcome expandedValueSet = null;
|
||||
|
||||
/*
|
||||
* The following valueset is a special case, since the BCP codesystem is very difficult to expand
|
||||
*/
|
||||
if (theVs != null && "http://hl7.org/fhir/ValueSet/languages".equals(theVs.getId())) {
|
||||
ValueSet expansion = new ValueSet();
|
||||
for (ConceptSetComponent nextInclude : theVs.getCompose().getInclude()) {
|
||||
for (ConceptReferenceComponent nextConcept : nextInclude.getConcept()) {
|
||||
expansion.getExpansion().addContains().setCode(nextConcept.getCode()).setDisplay(nextConcept.getDisplay());
|
||||
}
|
||||
}
|
||||
expandedValueSet = new ValueSetExpansionOutcome(expansion);
|
||||
}
|
||||
|
||||
if (expandedValueSet == null) {
|
||||
expandedValueSet = expand(theVs, null);
|
||||
}
|
||||
|
||||
for (ValueSetExpansionContainsComponent next : expandedValueSet.getValueset().getExpansion().getContains()) {
|
||||
String nextCode = next.getCode();
|
||||
if (!caseSensitive) {
|
||||
nextCode = nextCode.toUpperCase();
|
||||
}
|
||||
|
||||
if (nextCode.equals(wantCode)) {
|
||||
if (theSystem == null || next.getSystem().equals(theSystem)) {
|
||||
ConceptDefinitionComponent definition = new ConceptDefinitionComponent();
|
||||
definition.setCode(next.getCode());
|
||||
definition.setDisplay(next.getDisplay());
|
||||
ValidationResult retVal = new ValidationResult(definition);
|
||||
return retVal;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return new ValidationResult(IssueSeverity.ERROR, "Unknown code[" + theCode + "] in system[" + theSystem + "]");
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,67 @@
|
|||
package org.hl7.fhir.dstu3.hapi.validation;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import com.github.benmanes.caffeine.cache.Cache;
|
||||
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
|
||||
import org.hl7.fhir.dstu3.model.CodeSystem;
|
||||
import org.hl7.fhir.dstu3.model.StructureDefinition;
|
||||
import org.hl7.fhir.dstu3.model.ValueSet;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public class CachingValidationSupport implements IValidationSupport {
|
||||
|
||||
private final IValidationSupport myWrap;
|
||||
private final Cache<String, Object> myCache;
|
||||
|
||||
public CachingValidationSupport(IValidationSupport theWrap) {
|
||||
myWrap = theWrap;
|
||||
myCache = Caffeine.newBuilder().expireAfterWrite(60, TimeUnit.SECONDS).build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSet.ValueSetExpansionComponent expandValueSet(FhirContext theContext, ValueSet.ConceptSetComponent theInclude) {
|
||||
return myWrap.expandValueSet(theContext, theInclude);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<IBaseResource> fetchAllConformanceResources(FhirContext theContext) {
|
||||
return (List<IBaseResource>) myCache.get("fetchAllConformanceResources",
|
||||
t -> myWrap.fetchAllConformanceResources(theContext));
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<StructureDefinition> fetchAllStructureDefinitions(FhirContext theContext) {
|
||||
return (List<StructureDefinition>) myCache.get("fetchAllStructureDefinitions",
|
||||
t -> myWrap.fetchAllStructureDefinitions(theContext));
|
||||
}
|
||||
|
||||
@Override
|
||||
public CodeSystem fetchCodeSystem(FhirContext theContext, String theSystem) {
|
||||
return myWrap.fetchCodeSystem(theContext, theSystem);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T extends IBaseResource> T fetchResource(FhirContext theContext, Class<T> theClass, String theUri) {
|
||||
return myWrap.fetchResource(theContext, theClass, theUri);
|
||||
}
|
||||
|
||||
@Override
|
||||
public StructureDefinition fetchStructureDefinition(FhirContext theCtx, String theUrl) {
|
||||
return myWrap.fetchStructureDefinition(theCtx, theUrl);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCodeSystemSupported(FhirContext theContext, String theSystem) {
|
||||
return myWrap.isCodeSystemSupported(theContext, theSystem);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CodeValidationResult validateCode(FhirContext theContext, String theCodeSystem, String theCode, String theDisplay) {
|
||||
return myWrap.validateCode(theContext, theCodeSystem, theCode, theDisplay);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,56 @@
|
|||
package org.hl7.fhir.instance.hapi.validation;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import com.github.benmanes.caffeine.cache.Cache;
|
||||
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||
import org.hl7.fhir.instance.model.StructureDefinition;
|
||||
import org.hl7.fhir.instance.model.ValueSet;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public class CachingValidationSupport implements IValidationSupport {
|
||||
|
||||
private final IValidationSupport myWrap;
|
||||
private final Cache<String, Object> myCache;
|
||||
|
||||
public CachingValidationSupport(IValidationSupport theWrap) {
|
||||
myWrap = theWrap;
|
||||
myCache = Caffeine.newBuilder().expireAfterWrite(60, TimeUnit.SECONDS).build();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public List<StructureDefinition> allStructures() {
|
||||
return (List<StructureDefinition>) myCache.get("fetchAllStructureDefinitions",
|
||||
t -> myWrap.allStructures());
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSet.ValueSetExpansionComponent expandValueSet(FhirContext theContext, ValueSet.ConceptSetComponent theInclude) {
|
||||
return myWrap.expandValueSet(theContext, theInclude);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSet fetchCodeSystem(FhirContext theContext, String theSystem) {
|
||||
return myWrap.fetchCodeSystem(theContext, theSystem);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T extends IBaseResource> T fetchResource(FhirContext theContext, Class<T> theClass, String theUri) {
|
||||
return myWrap.fetchResource(theContext, theClass, theUri);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCodeSystemSupported(FhirContext theContext, String theSystem) {
|
||||
return myWrap.isCodeSystemSupported(theContext, theSystem);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CodeValidationResult validateCode(FhirContext theContext, String theCodeSystem, String theCode, String theDisplay) {
|
||||
return myWrap.validateCode(theContext, theCodeSystem, theCode, theDisplay);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,67 @@
|
|||
package org.hl7.fhir.r4.hapi.validation;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import com.github.benmanes.caffeine.cache.Cache;
|
||||
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.StructureDefinition;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public class CachingValidationSupport implements IValidationSupport {
|
||||
|
||||
private final IValidationSupport myWrap;
|
||||
private final Cache<String, Object> myCache;
|
||||
|
||||
public CachingValidationSupport(IValidationSupport theWrap) {
|
||||
myWrap = theWrap;
|
||||
myCache = Caffeine.newBuilder().expireAfterWrite(60, TimeUnit.SECONDS).build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSet.ValueSetExpansionComponent expandValueSet(FhirContext theContext, ValueSet.ConceptSetComponent theInclude) {
|
||||
return myWrap.expandValueSet(theContext, theInclude);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<IBaseResource> fetchAllConformanceResources(FhirContext theContext) {
|
||||
return (List<IBaseResource>) myCache.get("fetchAllConformanceResources",
|
||||
t -> myWrap.fetchAllConformanceResources(theContext));
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<StructureDefinition> fetchAllStructureDefinitions(FhirContext theContext) {
|
||||
return (List<StructureDefinition>) myCache.get("fetchAllStructureDefinitions",
|
||||
t -> myWrap.fetchAllStructureDefinitions(theContext));
|
||||
}
|
||||
|
||||
@Override
|
||||
public CodeSystem fetchCodeSystem(FhirContext theContext, String theSystem) {
|
||||
return myWrap.fetchCodeSystem(theContext, theSystem);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T extends IBaseResource> T fetchResource(FhirContext theContext, Class<T> theClass, String theUri) {
|
||||
return myWrap.fetchResource(theContext, theClass, theUri);
|
||||
}
|
||||
|
||||
@Override
|
||||
public StructureDefinition fetchStructureDefinition(FhirContext theCtx, String theUrl) {
|
||||
return myWrap.fetchStructureDefinition(theCtx, theUrl);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCodeSystemSupported(FhirContext theContext, String theSystem) {
|
||||
return myWrap.isCodeSystemSupported(theContext, theSystem);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CodeValidationResult validateCode(FhirContext theContext, String theCodeSystem, String theCode, String theDisplay) {
|
||||
return myWrap.validateCode(theContext, theCodeSystem, theCode, theDisplay);
|
||||
}
|
||||
}
|
|
@ -1,8 +1,9 @@
|
|||
package org.hl7.fhir.r4.hapi.ctx;
|
||||
package org.hl7.fhir.r4.hapi.validation;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.MetadataResource;
|
||||
import org.hl7.fhir.r4.model.StructureDefinition;
|
|
@ -1,7 +1,8 @@
|
|||
package org.hl7.fhir.r4.hapi.ctx;
|
||||
package org.hl7.fhir.r4.hapi.validation;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.StructureDefinition;
|
||||
import org.hl7.fhir.r4.model.ValueSet.ConceptSetComponent;
|
|
@ -77,7 +77,7 @@ public class FhirInstanceValidatorDstu3Test {
|
|||
myVal.setValidateAgainstStandardSchematron(false);
|
||||
|
||||
myMockSupport = mock(IValidationSupport.class);
|
||||
ValidationSupportChain validationSupport = new ValidationSupportChain(myMockSupport, myDefaultValidationSupport);
|
||||
CachingValidationSupport validationSupport = new CachingValidationSupport(new ValidationSupportChain(myMockSupport, myDefaultValidationSupport));
|
||||
myInstanceVal = new FhirInstanceValidator(validationSupport);
|
||||
|
||||
myVal.registerValidatorModule(myInstanceVal);
|
||||
|
|
|
@ -17,7 +17,10 @@ import org.hl7.fhir.r4.conformance.ProfileUtilities;
|
|||
import org.hl7.fhir.r4.context.IWorkerContext;
|
||||
import org.hl7.fhir.r4.hapi.ctx.*;
|
||||
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport.CodeValidationResult;
|
||||
import org.hl7.fhir.r4.hapi.validation.CachingValidationSupport;
|
||||
import org.hl7.fhir.r4.hapi.validation.FhirInstanceValidator;
|
||||
import org.hl7.fhir.r4.hapi.validation.PrePopulatedValidationSupport;
|
||||
import org.hl7.fhir.r4.hapi.validation.ValidationSupportChain;
|
||||
import org.hl7.fhir.r4.model.*;
|
||||
import org.hl7.fhir.r4.model.Bundle.BundleEntryComponent;
|
||||
import org.hl7.fhir.r4.model.CodeSystem.ConceptDefinitionComponent;
|
||||
|
@ -94,7 +97,7 @@ public class FhirInstanceValidatorR4Test {
|
|||
myVal.setValidateAgainstStandardSchematron(false);
|
||||
|
||||
myMockSupport = mock(IValidationSupport.class);
|
||||
ValidationSupportChain validationSupport = new ValidationSupportChain(myMockSupport, myDefaultValidationSupport);
|
||||
CachingValidationSupport validationSupport = new CachingValidationSupport(new ValidationSupportChain(myMockSupport, myDefaultValidationSupport));
|
||||
myInstanceVal = new FhirInstanceValidator(validationSupport);
|
||||
|
||||
myVal.registerValidatorModule(myInstanceVal);
|
||||
|
@ -539,7 +542,7 @@ public class FhirInstanceValidatorR4Test {
|
|||
public void testValidateProfileWithExtension() throws IOException, FHIRException {
|
||||
PrePopulatedValidationSupport valSupport = new PrePopulatedValidationSupport();
|
||||
DefaultProfileValidationSupport defaultSupport = new DefaultProfileValidationSupport();
|
||||
ValidationSupportChain support = new ValidationSupportChain(valSupport, defaultSupport);
|
||||
CachingValidationSupport support = new CachingValidationSupport(new ValidationSupportChain(valSupport, defaultSupport));
|
||||
|
||||
// Prepopulate SDs
|
||||
valSupport.addStructureDefinition(loadStructureDefinition(defaultSupport, "/dstu3/myconsent-profile.xml"));
|
||||
|
|
|
@ -7,13 +7,12 @@ import ca.uhn.fhir.validation.FhirValidator;
|
|||
import ca.uhn.fhir.validation.ResultSeverityEnum;
|
||||
import ca.uhn.fhir.validation.SingleValidationMessage;
|
||||
import ca.uhn.fhir.validation.ValidationResult;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.hl7.fhir.r4.context.IWorkerContext;
|
||||
import org.hl7.fhir.r4.hapi.ctx.DefaultProfileValidationSupport;
|
||||
import org.hl7.fhir.r4.hapi.ctx.HapiWorkerContext;
|
||||
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
|
||||
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport.CodeValidationResult;
|
||||
import org.hl7.fhir.r4.hapi.ctx.ValidationSupportChain;
|
||||
import org.hl7.fhir.r4.hapi.validation.ValidationSupportChain;
|
||||
import org.hl7.fhir.r4.hapi.validation.FhirInstanceValidator;
|
||||
import org.hl7.fhir.r4.model.*;
|
||||
import org.hl7.fhir.r4.model.CodeSystem.CodeSystemContentMode;
|
||||
|
@ -23,7 +22,6 @@ import org.hl7.fhir.r4.model.Questionnaire.QuestionnaireItemOptionComponent;
|
|||
import org.hl7.fhir.r4.model.Questionnaire.QuestionnaireItemType;
|
||||
import org.hl7.fhir.r4.model.QuestionnaireResponse.QuestionnaireResponseItemComponent;
|
||||
import org.hl7.fhir.r4.model.QuestionnaireResponse.QuestionnaireResponseStatus;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
|
|
@ -89,6 +89,12 @@
|
|||
client-assigned reosurce ID). Thanks to Frank Tao for the pull
|
||||
request!
|
||||
</action>
|
||||
<action type="add">
|
||||
A new IValidationSupport implementation has been added, named CachingValidationSupport. This
|
||||
module wraps another implementation and provides short-term caching. This can have a dramatic
|
||||
performance improvement on servers that are validating or executing FHIRPath repeatedly
|
||||
under load. This module is used by default in the JPA server.
|
||||
</action>
|
||||
</release>
|
||||
<release version="3.4.0" date="2018-05-28">
|
||||
<action type="add">
|
||||
|
|
Loading…
Reference in New Issue