More work on terminology service

This commit is contained in:
jamesagnew 2016-05-17 09:05:37 -04:00
parent 207ba872fa
commit 78fa6eaeaf
19 changed files with 647 additions and 136 deletions

View File

@ -76,4 +76,5 @@ ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.successfulUpdate=Successfully update
ca.uhn.fhir.jpa.dao.SearchBuilder.invalidQuantityPrefix=Unable to handle quantity prefix "{0}" for value: {1}
ca.uhn.fhir.jpa.dao.SearchBuilder.invalidNumberPrefix=Unable to handle number prefix "{0}" for value: {1}
ca.uhn.fhir.jpa.term.HapiTerminologySvcImpl.cannotCreateDuplicateCodeSystemUri=Can not create multiple code systems with URI "{0}", already have one with resource ID: {1}
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvc.cannotCreateDuplicateCodeSystemUri=Can not create multiple code systems with URI "{0}", already have one with resource ID: {1}
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvc.expansionTooLarge=Expansion of ValueSet produced too many codes (maximum {0}) - Operation aborted!

View File

@ -27,6 +27,12 @@
<name>HAPI FHIR JPA Server</name>
<dependencies>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-csv</artifactId>
<version>1.3</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-base</artifactId>

View File

@ -41,7 +41,7 @@ import org.springframework.scheduling.config.ScheduledTaskRegistrar;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
import ca.uhn.fhir.jpa.search.StaleSearchDeletingSvc;
import ca.uhn.fhir.jpa.term.HapiTerminologySvcImpl;
import ca.uhn.fhir.jpa.term.BaseHapiTerminologySvc;
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
@Configuration
@ -129,11 +129,6 @@ public class BaseConfig implements SchedulingConfigurer {
// return retVal;
}
@Bean(autowire = Autowire.BY_TYPE)
public IHapiTerminologySvc terminologyService() {
return new HapiTerminologySvcImpl();
}
// @PostConstruct
// public void wireResourceDaos() {
// Map<String, IDao> daoBeans = myAppCtx.getBeansOfType(IDao.class);

View File

@ -29,12 +29,20 @@ import org.springframework.context.annotation.Primary;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.dao.SearchParamExtractorDstu1;
import ca.uhn.fhir.jpa.term.HapiTerminologySvcDstu1;
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.dstu2.composite.MetaDt;
@Configuration
public class BaseDstu1Config extends BaseConfig {
@Bean(autowire = Autowire.BY_TYPE)
public IHapiTerminologySvc terminologyService() {
return new HapiTerminologySvcDstu1();
}
@Bean
@Primary
public FhirContext defaultFhirContext() {

View File

@ -31,12 +31,20 @@ import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl;
import ca.uhn.fhir.jpa.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.dao.SearchParamExtractorDstu2;
import ca.uhn.fhir.jpa.term.HapiTerminologySvcDstu2;
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
import ca.uhn.fhir.model.dstu2.composite.MetaDt;
@Configuration
@EnableTransactionManagement
public class BaseDstu2Config extends BaseConfig {
@Bean(autowire = Autowire.BY_TYPE)
public IHapiTerminologySvc terminologyService() {
return new HapiTerminologySvcDstu2();
}
@Bean
@Primary
public FhirContext defaultFhirContext() {

View File

@ -1,7 +1,10 @@
package ca.uhn.fhir.jpa.config.dstu3;
import org.hl7.fhir.dstu3.hapi.validation.FhirInstanceValidator;
import org.hl7.fhir.dstu3.hapi.validation.HapiWorkerContext;
import org.hl7.fhir.dstu3.hapi.validation.IValidationSupport;
import org.hl7.fhir.dstu3.terminologies.ValueSetExpander;
import org.hl7.fhir.dstu3.utils.IWorkerContext;
import org.hl7.fhir.dstu3.validation.IResourceValidator.BestPracticeWarningLevel;
/*
@ -37,6 +40,8 @@ import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl;
import ca.uhn.fhir.jpa.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.dao.dstu3.SearchParamExtractorDstu3;
import ca.uhn.fhir.jpa.term.HapiTerminologySvcDstu3;
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
import ca.uhn.fhir.jpa.validation.JpaValidationSupportChainDstu3;
import ca.uhn.fhir.validation.IValidatorModule;
@ -44,13 +49,23 @@ import ca.uhn.fhir.validation.IValidatorModule;
@EnableTransactionManagement
public class BaseDstu3Config extends BaseConfig {
@Bean(autowire = Autowire.BY_TYPE)
public IHapiTerminologySvc terminologyService() {
return new HapiTerminologySvcDstu3();
}
@Bean
public HapiWorkerContext workerContext() {
return new HapiWorkerContext(defaultFhirContext(), validationSupportChainDstu3());
}
@Bean
@Primary
public FhirContext defaultFhirContext() {
return fhirContextDstu3();
}
@Bean(name="myInstanceValidatorDstu3")
@Bean(name = "myInstanceValidatorDstu3")
@Lazy
public IValidatorModule instanceValidatorDstu3() {
FhirInstanceValidator val = new FhirInstanceValidator();
@ -85,12 +100,12 @@ public class BaseDstu3Config extends BaseConfig {
}
@Primary
@Bean(autowire=Autowire.BY_NAME, name="myJpaValidationSupportChainDstu3")
@Bean(autowire = Autowire.BY_NAME, name = "myJpaValidationSupportChainDstu3")
public IValidationSupport validationSupportChainDstu3() {
return new JpaValidationSupportChainDstu3();
}
@Bean(autowire=Autowire.BY_TYPE)
@Bean(autowire = Autowire.BY_TYPE)
public SearchParamExtractorDstu3 searchParamExtractor() {
return new SearchParamExtractorDstu3();
}

View File

@ -56,12 +56,17 @@ public class DaoConfig {
private List<IServerInterceptor> myInterceptors;
private ResourceEncodingEnum myResourceEncoding = ResourceEncodingEnum.JSONC;
// ***
// update setter javadoc if default changes
// ***
private int myMaximumExpansionSize = 5000;
private ResourceEncodingEnum myResourceEncoding = ResourceEncodingEnum.JSONC;
private boolean mySchedulingDisabled;
private boolean mySubscriptionEnabled;
private long mySubscriptionPollDelay = 1000;
private Long mySubscriptionPurgeInactiveAfterMillis;
/**
* Search results are stored in the database so that they can be paged through. After this
* number of milliseconds, they will be deleted from the database. Defaults to 1 hour.
@ -94,10 +99,15 @@ public class DaoConfig {
}
return myInterceptors;
}
/**
* See {@link #setMaximumExpansionSize(int)}
*/
public int getMaximumExpansionSize() {
return myMaximumExpansionSize;
}
public ResourceEncodingEnum getResourceEncoding() {
return myResourceEncoding;
}
public long getSubscriptionPollDelay() {
return mySubscriptionPollDelay;
}
@ -218,6 +228,15 @@ public class DaoConfig {
myInterceptors = theInterceptors;
}
/**
* Sets the maximum number of codes that will be added to a valueset expansion before
* the operation will be failed as too costly
*/
public void setMaximumExpansionSize(int theMaximumExpansionSize) {
Validate.isTrue(theMaximumExpansionSize > 0, "theMaximumExpansionSize must be > 0");
myMaximumExpansionSize = theMaximumExpansionSize;
}
public void setResourceEncoding(ResourceEncodingEnum theResourceEncoding) {
myResourceEncoding = theResourceEncoding;
}

View File

@ -100,6 +100,7 @@ import ca.uhn.fhir.jpa.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
import ca.uhn.fhir.jpa.term.VersionIndependentConcept;
import ca.uhn.fhir.jpa.util.StopWatch;
import ca.uhn.fhir.model.api.IPrimitiveDatatype;
import ca.uhn.fhir.model.api.IQueryParameterType;
@ -156,8 +157,8 @@ public class SearchBuilder {
private ISearchResultDao mySearchResultDao;
private IHapiTerminologySvc myTerminologySvc;
public SearchBuilder(FhirContext theFhirContext, EntityManager theEntityManager, PlatformTransactionManager thePlatformTransactionManager, IFulltextSearchSvc theSearchDao,
ISearchResultDao theSearchResultDao, BaseHapiFhirDao<?> theDao, IResourceIndexedSearchParamUriDao theResourceIndexedSearchParamUriDao, IForcedIdDao theForcedIdDao, IHapiTerminologySvc theTerminologySvc) {
public SearchBuilder(FhirContext theFhirContext, EntityManager theEntityManager, PlatformTransactionManager thePlatformTransactionManager, IFulltextSearchSvc theSearchDao, ISearchResultDao theSearchResultDao, BaseHapiFhirDao<?> theDao,
IResourceIndexedSearchParamUriDao theResourceIndexedSearchParamUriDao, IForcedIdDao theForcedIdDao, IHapiTerminologySvc theTerminologySvc) {
myContext = theFhirContext;
myEntityManager = theEntityManager;
myPlatformTransactionManager = thePlatformTransactionManager;
@ -380,8 +381,7 @@ public class SearchBuilder {
return;
}
private boolean addPredicateMissingFalseIfPresent(CriteriaBuilder theBuilder, String theParamName, Root<? extends BaseResourceIndexedSearchParam> from, List<Predicate> codePredicates,
IQueryParameterType nextOr) {
private boolean addPredicateMissingFalseIfPresent(CriteriaBuilder theBuilder, String theParamName, Root<? extends BaseResourceIndexedSearchParam> from, List<Predicate> codePredicates, IQueryParameterType nextOr) {
boolean missingFalse = false;
if (nextOr.getMissing() != null) {
if (nextOr.getMissing().booleanValue() == true) {
@ -395,8 +395,7 @@ public class SearchBuilder {
return missingFalse;
}
private boolean addPredicateMissingFalseIfPresentForResourceLink(CriteriaBuilder theBuilder, String theParamName, Root<? extends ResourceLink> from, List<Predicate> codePredicates,
IQueryParameterType nextOr) {
private boolean addPredicateMissingFalseIfPresentForResourceLink(CriteriaBuilder theBuilder, String theParamName, Root<? extends ResourceLink> from, List<Predicate> codePredicates, IQueryParameterType nextOr) {
boolean missingFalse = false;
if (nextOr.getMissing() != null) {
if (nextOr.getMissing().booleanValue() == true) {
@ -745,15 +744,21 @@ public class SearchBuilder {
}
/*
* CriteriaBuilder builder = myEntityManager.getCriteriaBuilder(); CriteriaQuery<Long> cq = builder.createQuery(Long.class); Root<ResourceTable> from = cq.from(ResourceTable.class);
* CriteriaBuilder builder = myEntityManager.getCriteriaBuilder(); CriteriaQuery<Long> cq =
* builder.createQuery(Long.class); Root<ResourceTable> from = cq.from(ResourceTable.class);
* cq.select(from.get("myId").as(Long.class));
*
* Subquery<Long> subQ = cq.subquery(Long.class); Root<? extends BaseResourceIndexedSearchParam> subQfrom = subQ.from(theParamTable); subQ.select(subQfrom.get("myResourcePid").as(Long.class));
* Predicate subQname = builder.equal(subQfrom.get("myParamName"), theParamName); Predicate subQtype = builder.equal(subQfrom.get("myResourceType"), myResourceName);
* Subquery<Long> subQ = cq.subquery(Long.class); Root<? extends BaseResourceIndexedSearchParam> subQfrom =
* subQ.from(theParamTable); subQ.select(subQfrom.get("myResourcePid").as(Long.class));
* Predicate subQname = builder.equal(subQfrom.get("myParamName"), theParamName); Predicate subQtype =
* builder.equal(subQfrom.get("myResourceType"), myResourceName);
* subQ.where(builder.and(subQtype, subQname));
*
* List<Predicate> predicates = new ArrayList<Predicate>(); predicates.add(builder.not(builder.in(from.get("myId")).value(subQ))); predicates.add(builder.equal(from.get("myResourceType"),
* myResourceName)); predicates.add(builder.isNull(from.get("myDeleted"))); createPredicateResourceId(builder, cq, predicates, from.get("myId").as(Long.class));
* List<Predicate> predicates = new ArrayList<Predicate>();
* predicates.add(builder.not(builder.in(from.get("myId")).value(subQ)));
* predicates.add(builder.equal(from.get("myResourceType"),
* myResourceName)); predicates.add(builder.isNull(from.get("myDeleted"))); createPredicateResourceId(builder, cq,
* predicates, from.get("myId").as(Long.class));
*/
List<Pair<String, String>> notTags = Lists.newArrayList();
@ -935,6 +940,10 @@ public class SearchBuilder {
}
Predicate singleCode = createPredicateToken(nextOr, theParamName, builder, from);
if (singleCode == null) {
doSetPids(new ArrayList<Long>());
return;
}
codePredicates.add(singleCode);
}
@ -986,10 +995,13 @@ public class SearchBuilder {
if (param.getQualifier() == UriParamQualifierEnum.ABOVE) {
/*
* :above is an inefficient query- It means that the user is supplying a more specific URL (say http://example.com/foo/bar/baz) and that we should match on any URLs that are less
* specific but otherwise the same. For example http://example.com and http://example.com/foo would both match.
* :above is an inefficient query- It means that the user is supplying a more specific URL (say
* http://example.com/foo/bar/baz) and that we should match on any URLs that are less
* specific but otherwise the same. For example http://example.com and http://example.com/foo would both
* match.
*
* We do this by querying the DB for all candidate URIs and then manually checking each one. This isn't very efficient, but this is also probably not a very common type of query to do.
* We do this by querying the DB for all candidate URIs and then manually checking each one. This isn't
* very efficient, but this is also probably not a very common type of query to do.
*
* If we ever need to make this more efficient, lucene could certainly be used as an optimization.
*/
@ -1148,8 +1160,7 @@ public class SearchBuilder {
predicates.addAll(createLastUpdatedPredicates(myParams.getLastUpdatedAndRemove(), builder, from));
}
private Predicate createPredicateNumeric(CriteriaBuilder builder, IQueryParameterType params, ParamPrefixEnum cmpValue, BigDecimal valueValue, final Expression<BigDecimal> path,
String invalidMessageName, String theValueString) {
private Predicate createPredicateNumeric(CriteriaBuilder builder, IQueryParameterType params, ParamPrefixEnum cmpValue, BigDecimal valueValue, final Expression<BigDecimal> path, String invalidMessageName, String theValueString) {
Predicate num;
switch (cmpValue) {
case GREATERTHAN:
@ -1265,8 +1276,7 @@ public class SearchBuilder {
}
private Predicate createPredicateString(IQueryParameterType theParameter, String theParamName, CriteriaBuilder theBuilder,
From<ResourceIndexedSearchParamString, ResourceIndexedSearchParamString> theFrom) {
private Predicate createPredicateString(IQueryParameterType theParameter, String theParamName, CriteriaBuilder theBuilder, From<ResourceIndexedSearchParamString, ResourceIndexedSearchParamString> theFrom) {
String rawSearchTerm;
if (theParameter instanceof TokenParam) {
TokenParam id = (TokenParam) theParameter;
@ -1285,8 +1295,7 @@ public class SearchBuilder {
}
if (rawSearchTerm.length() > ResourceIndexedSearchParamString.MAX_LENGTH) {
throw new InvalidRequestException("Parameter[" + theParamName + "] has length (" + rawSearchTerm.length() + ") that is longer than maximum allowed ("
+ ResourceIndexedSearchParamString.MAX_LENGTH + "): " + rawSearchTerm);
throw new InvalidRequestException("Parameter[" + theParamName + "] has length (" + rawSearchTerm.length() + ") that is longer than maximum allowed (" + ResourceIndexedSearchParamString.MAX_LENGTH + "): " + rawSearchTerm);
}
String likeExpression = BaseHapiFhirDao.normalizeString(rawSearchTerm);
@ -1316,8 +1325,7 @@ public class SearchBuilder {
return orPredicates;
}
private Predicate createPredicateToken(IQueryParameterType theParameter, String theParamName, CriteriaBuilder theBuilder,
From<ResourceIndexedSearchParamToken, ResourceIndexedSearchParamToken> theFrom) {
private Predicate createPredicateToken(IQueryParameterType theParameter, String theParamName, CriteriaBuilder theBuilder, From<ResourceIndexedSearchParamToken, ResourceIndexedSearchParamToken> theFrom) {
String code;
String system;
TokenParamModifier modifier = null;
@ -1339,15 +1347,43 @@ public class SearchBuilder {
}
if (system != null && system.length() > ResourceIndexedSearchParamToken.MAX_LENGTH) {
throw new InvalidRequestException(
"Parameter[" + theParamName + "] has system (" + system.length() + ") that is longer than maximum allowed (" + ResourceIndexedSearchParamToken.MAX_LENGTH + "): " + system);
throw new InvalidRequestException("Parameter[" + theParamName + "] has system (" + system.length() + ") that is longer than maximum allowed (" + ResourceIndexedSearchParamToken.MAX_LENGTH + "): " + system);
}
if (code != null && code.length() > ResourceIndexedSearchParamToken.MAX_LENGTH) {
throw new InvalidRequestException(
"Parameter[" + theParamName + "] has code (" + code.length() + ") that is longer than maximum allowed (" + ResourceIndexedSearchParamToken.MAX_LENGTH + "): " + code);
throw new InvalidRequestException("Parameter[" + theParamName + "] has code (" + code.length() + ") that is longer than maximum allowed (" + ResourceIndexedSearchParamToken.MAX_LENGTH + "): " + code);
}
/*
* Process token modifiers (:in, :below, :above)
*/
List<VersionIndependentConcept> codes = null;
if (modifier == TokenParamModifier.IN) {
codes = myTerminologySvc.expandValueSet(code);
} else if (modifier == TokenParamModifier.ABOVE) {
codes = myTerminologySvc.findCodesAbove(system, code);
} else if (modifier == TokenParamModifier.BELOW) {
codes = myTerminologySvc.findCodesBelow(system, code);
}
if (codes != null) {
if (codes.isEmpty()) {
return null;
}
List<Predicate> orPredicates = new ArrayList<Predicate>();
for (VersionIndependentConcept nextCode : codes) {
Predicate systemPredicate = theBuilder.equal(theFrom.get("mySystem"), nextCode.getSystem());
Predicate codePredicate = theBuilder.equal(theFrom.get("myValue"), nextCode.getCode());
orPredicates.add(theBuilder.and(systemPredicate, codePredicate));
}
return theBuilder.or(orPredicates.toArray(new Predicate[orPredicates.size()]));
}
/*
* Ok, this is a normal query
*/
ArrayList<Predicate> singleCodePredicates = (new ArrayList<Predicate>());
if (StringUtils.isNotBlank(system)) {
singleCodePredicates.add(theBuilder.equal(theFrom.get("mySystem"), system));
@ -1359,37 +1395,17 @@ public class SearchBuilder {
}
if (StringUtils.isNotBlank(code)) {
if (modifier != null) {
switch (modifier) {
case IN:
case NOT_IN:
system = defaultString(system);
if (!myTerminologySvc.supportsSystem(system)) {
throw new InvalidRequestException("Unable to perform :in search for system: " + system);
}
Set<TermConcept> codeConcepts = myTerminologySvc.findCodesBelow(system, code);
Set<String> codeCodes = toCodes(codeConcepts);
if (modifier == TokenParamModifier.IN) {
singleCodePredicates.add(theFrom.get("myValue").in(codeCodes));
} else {
singleCodePredicates.add(theBuilder.not(theFrom.get("myValue").in(codeCodes)));
}
break;
default:
throw new InvalidRequestException("Invalid modifier " + modifier.getValue() + " for param " + theParamName);
}
} else {
singleCodePredicates.add(theBuilder.equal(theFrom.get("myValue"), code));
}
singleCodePredicates.add(theBuilder.equal(theFrom.get("myValue"), code));
} else {
/*
* As of HAPI FHIR 1.5, if the client searched for a token with a system but no specified value this means to match all tokens with the given value.
* As of HAPI FHIR 1.5, if the client searched for a token with a system but no specified value this means to
* match all tokens with the given value.
*
* I'm not sure I agree with this, but hey.. FHIR-I voted and this was the result :)
*/
// singleCodePredicates.add(theBuilder.isNull(theFrom.get("myValue")));
}
Predicate singleCode = theBuilder.and(toArray(singleCodePredicates));
return singleCode;
}
@ -2004,7 +2020,8 @@ public class SearchBuilder {
}
/**
* Figures out the tolerance for a search. For example, if the user is searching for <code>4.00</code>, this method returns <code>0.005</code> because we shold actually match values which are
* Figures out the tolerance for a search. For example, if the user is searching for <code>4.00</code>, this method
* returns <code>0.005</code> because we shold actually match values which are
* <code>4 (+/-) 0.005</code> according to the FHIR specs.
*/
static BigDecimal calculateFuzzAmount(ParamPrefixEnum cmpValue, BigDecimal theValue) {
@ -2059,8 +2076,7 @@ public class SearchBuilder {
return resultList;
}
public static void loadResourcesByPid(Collection<Long> theIncludePids, List<IBaseResource> theResourceListToPopulate, Set<Long> theRevIncludedPids, boolean theForHistoryOperation,
EntityManager entityManager, FhirContext context, IDao theDao) {
public static void loadResourcesByPid(Collection<Long> theIncludePids, List<IBaseResource> theResourceListToPopulate, Set<Long> theRevIncludedPids, boolean theForHistoryOperation, EntityManager entityManager, FhirContext context, IDao theDao) {
if (theIncludePids.isEmpty()) {
return;
}
@ -2109,8 +2125,7 @@ public class SearchBuilder {
*
* @param theLastUpdated
*/
public static HashSet<Long> loadReverseIncludes(FhirContext theContext, EntityManager theEntityManager, Collection<Long> theMatches, Set<Include> theRevIncludes, boolean theReverseMode,
DateRangeParam theLastUpdated) {
public static HashSet<Long> loadReverseIncludes(FhirContext theContext, EntityManager theEntityManager, Collection<Long> theMatches, Set<Include> theRevIncludes, boolean theReverseMode, DateRangeParam theLastUpdated) {
if (theMatches.size() == 0) {
return new HashSet<Long>();
}

View File

@ -1,8 +1,10 @@
package ca.uhn.fhir.jpa.term;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
@ -13,6 +15,7 @@ import org.springframework.transaction.annotation.Transactional;
import com.google.common.base.Stopwatch;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao;
import ca.uhn.fhir.jpa.dao.data.ITermConceptDao;
@ -26,8 +29,8 @@ import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.util.ObjectUtil;
import ca.uhn.fhir.util.ValidateUtil;
public class HapiTerminologySvcImpl implements IHapiTerminologySvc {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(HapiTerminologySvcImpl.class);
public abstract class BaseHapiTerminologySvc implements IHapiTerminologySvc {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiTerminologySvc.class);
private static final Object PLACEHOLDER_OBJECT = new Object();
@Autowired
@ -39,21 +42,35 @@ public class HapiTerminologySvcImpl implements IHapiTerminologySvc {
@Autowired
private ITermConceptDao myConceptDao;
@Autowired
private DaoConfig myDaoConfig;
@Autowired
private ITermConceptParentChildLinkDao myConceptParentChildLinkDao;
@Autowired
private FhirContext myContext;
protected FhirContext myContext;
private void fetchChildren(TermConcept theConcept, Set<TermConcept> theSetToPopulate) {
for (TermConceptParentChildLink nextChildLink : theConcept.getChildren()) {
TermConcept nextChild = nextChildLink.getChild();
if (theSetToPopulate.add(nextChild)) {
if (addToSet(theSetToPopulate, nextChild)) {
fetchChildren(nextChild, theSetToPopulate);
}
}
}
private boolean addToSet(Set<TermConcept> theSetToPopulate, TermConcept theConcept) {
boolean retVal = theSetToPopulate.add(theConcept);
if (retVal) {
if (theSetToPopulate.size() >= myDaoConfig.getMaximumExpansionSize()) {
String msg = myContext.getLocalizer().getMessage(BaseHapiTerminologySvc.class, "expansionTooLarge", myDaoConfig.getMaximumExpansionSize());
throw new InvalidRequestException(msg);
}
}
return retVal;
}
private TermConcept fetchLoadedCode(Long theCodeSystemResourcePid, Long theCodeSystemVersionPid, String theCode) {
TermCodeSystemVersion codeSystem = myCodeSystemVersionDao.findByCodeSystemResourceAndVersion(theCodeSystemResourcePid, theCodeSystemVersionPid);
TermConcept concept = myConceptDao.findByCodeSystemAndCode(codeSystem, theCode);
@ -63,7 +80,7 @@ public class HapiTerminologySvcImpl implements IHapiTerminologySvc {
private void fetchParents(TermConcept theConcept, Set<TermConcept> theSetToPopulate) {
for (TermConceptParentChildLink nextChildLink : theConcept.getParents()) {
TermConcept nextChild = nextChildLink.getParent();
if (theSetToPopulate.add(nextChild)) {
if (addToSet(theSetToPopulate, nextChild)) {
fetchParents(nextChild, theSetToPopulate);
}
}
@ -88,6 +105,16 @@ public class HapiTerminologySvcImpl implements IHapiTerminologySvc {
return retVal;
}
@Override
public List<VersionIndependentConcept> findCodesAbove(String theSystem, String theCode) {
TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(theSystem);
TermCodeSystemVersion csv = cs.getCurrentVersion();
Set<TermConcept> codes = findCodesAbove(cs.getResource().getId(), csv.getResourceVersionId(), theCode);
ArrayList<VersionIndependentConcept> retVal = toVersionIndependentConcepts(theSystem, codes);
return retVal;
}
@Transactional(propagation = Propagation.REQUIRED)
@Override
public Set<TermConcept> findCodesBelow(Long theCodeSystemResourcePid, Long theCodeSystemVersionPid, String theCode) {
@ -107,6 +134,16 @@ public class HapiTerminologySvcImpl implements IHapiTerminologySvc {
return retVal;
}
@Override
public List<VersionIndependentConcept> findCodesBelow(String theSystem, String theCode) {
TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(theSystem);
TermCodeSystemVersion csv = cs.getCurrentVersion();
Set<TermConcept> codes = findCodesBelow(cs.getResource().getId(), csv.getResourceVersionId(), theCode);
ArrayList<VersionIndependentConcept> retVal = toVersionIndependentConcepts(theSystem, codes);
return retVal;
}
private void persistChildren(TermConcept theConcept, TermCodeSystemVersion theCodeSystem, IdentityHashMap<TermConcept, Object> theConceptsStack) {
if (theConceptsStack.put(theConcept, PLACEHOLDER_OBJECT) != null) {
return;
@ -142,7 +179,7 @@ public class HapiTerminologySvcImpl implements IHapiTerminologySvc {
myCodeSystemDao.save(codeSystem);
} else {
if (!ObjectUtil.equals(codeSystem.getResource().getId(), theCodeSystem.getResource().getId())) {
String msg = myContext.getLocalizer().getMessage(HapiTerminologySvcImpl.class, "cannotCreateDuplicateCodeSystemUri", theSystemUri, codeSystem.getResource().getIdDt().toUnqualifiedVersionless().getValue());
String msg = myContext.getLocalizer().getMessage(BaseHapiTerminologySvc.class, "cannotCreateDuplicateCodeSystemUri", theSystemUri, codeSystem.getResource().getIdDt().toUnqualifiedVersionless().getValue());
throw new UnprocessableEntityException(msg);
}
}
@ -170,6 +207,14 @@ public class HapiTerminologySvcImpl implements IHapiTerminologySvc {
return cs != null;
}
private ArrayList<VersionIndependentConcept> toVersionIndependentConcepts(String theSystem, Set<TermConcept> codes) {
ArrayList<VersionIndependentConcept> retVal = new ArrayList<VersionIndependentConcept>(codes.size());
for (TermConcept next : codes) {
retVal.add(new VersionIndependentConcept(theSystem, next.getCode()));
}
return retVal;
}
private void validateConceptForStorage(TermConcept theConcept, TermCodeSystemVersion theCodeSystem, IdentityHashMap<TermConcept, Object> theConceptsStack) {
ValidateUtil.isNotNullOrThrowInvalidRequest(theConcept.getCodeSystem() == theCodeSystem, "Codesystem contains a code which does not reference the codesystem");
ValidateUtil.isNotBlankOrThrowInvalidRequest(theConcept.getCode(), "Codesystem contains a code which does not reference the codesystem");
@ -185,12 +230,4 @@ public class HapiTerminologySvcImpl implements IHapiTerminologySvc {
theConceptsStack.remove(theConcept);
}
@Override
public Set<TermConcept> findCodesBelow(String theSystem, String theCode) {
TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(theSystem);
TermCodeSystemVersion csv = cs.getCurrentVersion();
return findCodesBelow(cs.getResource().getId(), csv.getResourceVersionId(), theCode);
}
}

View File

@ -0,0 +1,12 @@
package ca.uhn.fhir.jpa.term;
import java.util.List;
public class HapiTerminologySvcDstu1 extends BaseHapiTerminologySvc {
@Override
public List<VersionIndependentConcept> expandValueSet(String theValueSet) {
throw new UnsupportedOperationException();
}
}

View File

@ -0,0 +1,19 @@
package ca.uhn.fhir.jpa.term;
import java.util.List;
import org.hl7.fhir.instance.hapi.validation.IValidationSupport;
import org.springframework.beans.factory.annotation.Autowired;
public class HapiTerminologySvcDstu2 extends BaseHapiTerminologySvc {
@Autowired
private IValidationSupport myValidationSupport;
@Override
public List<VersionIndependentConcept> expandValueSet(String theValueSet) {
throw new UnsupportedOperationException();
}
}

View File

@ -0,0 +1,43 @@
package ca.uhn.fhir.jpa.term;
import java.util.ArrayList;
import java.util.List;
import org.hl7.fhir.dstu3.model.ValueSet;
import org.hl7.fhir.dstu3.model.ValueSet.ValueSetExpansionContainsComponent;
import org.hl7.fhir.dstu3.terminologies.ValueSetExpander;
import org.hl7.fhir.dstu3.terminologies.ValueSetExpander.ValueSetExpansionOutcome;
import org.hl7.fhir.dstu3.utils.IWorkerContext;
import org.springframework.beans.factory.annotation.Autowired;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
public class HapiTerminologySvcDstu3 extends BaseHapiTerminologySvc {
@Autowired
private IWorkerContext myWorkerContext;
@Autowired
private ValueSetExpander myValueSetExpander;
@Override
public List<VersionIndependentConcept> expandValueSet(String theValueSet) {
ValueSet source = new ValueSet();
source.getCompose().addImport(theValueSet);
try {
ArrayList<VersionIndependentConcept> retVal = new ArrayList<VersionIndependentConcept>();
ValueSetExpansionOutcome outcome = myValueSetExpander.expand(source);
for (ValueSetExpansionContainsComponent next : outcome.getValueset().getExpansion().getContains()) {
retVal.add(new VersionIndependentConcept(next.getSystem(), next.getCode()));
}
return retVal;
} catch (Exception e) {
throw new InternalErrorException(e);
}
}
}

View File

@ -1,5 +1,7 @@
package ca.uhn.fhir.jpa.term;
import java.util.List;
/*
* #%L
* HAPI FHIR JPA Server
@ -31,10 +33,14 @@ public interface IHapiTerminologySvc {
Set<TermConcept> findCodesBelow(Long theCodeSystemResourcePid, Long theCodeSystemResourceVersionPid, String theCode);
Set<TermConcept> findCodesBelow(String theSystem, String theCode);
List<VersionIndependentConcept> findCodesBelow(String theSystem, String theCode);
void storeNewCodeSystemVersion(Long theCodeSystemResourcePid, String theSystemUri, TermCodeSystemVersion theCodeSytem);
public boolean supportsSystem(String system);
public boolean supportsSystem(String theCodeSystem);
List<VersionIndependentConcept> expandValueSet(String theValueSet);
List<VersionIndependentConcept> findCodesAbove(String theSystem, String theCode);
}

View File

@ -0,0 +1,65 @@
package ca.uhn.fhir.jpa.term;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.io.IOUtils;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
public class TerminologyLoaderSvc {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvc.class);
public void loadSnomedCt(byte[] theZipBytes) {
Map<String, TermConcept> id2concept = new HashMap<String, TermConcept>();
ZipInputStream zis = new ZipInputStream(new ByteArrayInputStream(theZipBytes));
try {
for (ZipEntry nextEntry; (nextEntry = zis.getNextEntry()) != null; ) {
ZippedFileInputStream entryStream = new ZippedFileInputStream(zis);
byte[] bytes = IOUtils.toByteArray(entryStream);
ourLog.info("Read file {} - {} bytes", nextEntry.getName(), bytes.length);
String string = new String(bytes, "UTF-8");
CSVParser parsed = CSVParser.parse(string, CSVFormat.newFormat('\t').withFirstRecordAsHeader());
ourLog.info("Header map: {}", parsed.getHeaderMap());
}
} catch (IOException e) {
throw new InternalErrorException(e);
} finally {
IOUtils.closeQuietly(zis);
}
}
private static class ZippedFileInputStream extends InputStream {
private ZipInputStream is;
public ZippedFileInputStream(ZipInputStream is){
this.is = is;
}
@Override
public int read() throws IOException {
return is.read();
}
@Override
public void close() throws IOException {
is.closeEntry();
}
}
}

View File

@ -0,0 +1,29 @@
package ca.uhn.fhir.jpa.term;
public class VersionIndependentConcept {
private String mySystem;
private String myCode;
public VersionIndependentConcept(String theSystem, String theCode) {
setSystem(theSystem);
setCode(theCode);
}
public String getSystem() {
return mySystem;
}
public void setSystem(String theSystem) {
mySystem = theSystem;
}
public String getCode() {
return myCode;
}
public void setCode(String theCode) {
myCode = theCode;
}
}

View File

@ -1428,23 +1428,23 @@ public class FhirResourceDaoDstu2SearchNoFtTest extends BaseJpaDstu2Test {
patient.addName().addFamily("Tester").addGiven("testSearchTokenParam2");
myPatientDao.create(patient, mySrd);
{
SearchParameterMap map = new SearchParameterMap();
map.add(Patient.SP_LANGUAGE, new TokenParam(null, "testSearchTokenParamCode", true));
assertEquals(0, myPatientDao.search(map).size());
}
{
SearchParameterMap map = new SearchParameterMap();
map.add(Patient.SP_LANGUAGE, new TokenParam(null, "testSearchTokenParamCode", true));
map.add(Patient.SP_IDENTIFIER, new IdentifierDt("urn:system", "testSearchTokenParam001"));
assertEquals(0, myPatientDao.search(map).size());
}
{
SearchParameterMap map = new SearchParameterMap();
map.add(Patient.SP_LANGUAGE, new TokenParam(null, "testSearchTokenParamComText", true));
map.add(Patient.SP_IDENTIFIER, new IdentifierDt("urn:system", "testSearchTokenParam001"));
assertEquals(1, myPatientDao.search(map).size());
}
{
SearchParameterMap map = new SearchParameterMap();
map.add(Patient.SP_LANGUAGE, new TokenParam(null, "testSearchTokenParamCode", true));
assertEquals(0, myPatientDao.search(map).size());
}
{
SearchParameterMap map = new SearchParameterMap();
map.add(Patient.SP_LANGUAGE, new TokenParam(null, "testSearchTokenParamCode", true));
map.add(Patient.SP_IDENTIFIER, new IdentifierDt("urn:system", "testSearchTokenParam001"));
assertEquals(0, myPatientDao.search(map).size());
}
{
SearchParameterMap map = new SearchParameterMap();
map.add(Patient.SP_IDENTIFIER, new IdentifierDt("urn:system", "testSearchTokenParam001"));

View File

@ -13,6 +13,7 @@ import org.hibernate.search.jpa.FullTextEntityManager;
import org.hibernate.search.jpa.Search;
import org.hl7.fhir.dstu3.hapi.validation.IValidationSupport;
import org.hl7.fhir.dstu3.model.Appointment;
import org.hl7.fhir.dstu3.model.AuditEvent;
import org.hl7.fhir.dstu3.model.Bundle;
import org.hl7.fhir.dstu3.model.CarePlan;
import org.hl7.fhir.dstu3.model.CodeSystem;
@ -95,12 +96,21 @@ public abstract class BaseJpaDstu3Test extends BaseJpaTest {
@Qualifier("myAppointmentDaoDstu3")
protected IFhirResourceDao<Appointment> myAppointmentDao;
@Autowired
@Qualifier("myAuditEventDaoDstu3")
protected IFhirResourceDao<AuditEvent> myAuditEventDao;
@Autowired
@Qualifier("myBundleDaoDstu3")
protected IFhirResourceDao<Bundle> myBundleDao;
@Autowired
@Qualifier("myCarePlanDaoDstu3")
protected IFhirResourceDao<CarePlan> myCarePlanDao;
@Autowired
@Qualifier("myCodeSystemDaoDstu3")
protected IFhirResourceDao<CodeSystem> myCodeSystemDao;
@Autowired
@Qualifier("myCompartmentDefinitionDaoDstu3")
protected IFhirResourceDao<CompartmentDefinition> myCompartmentDefinitionDao;
@Autowired
@Qualifier("myConceptMapDaoDstu3")
protected IFhirResourceDao<ConceptMap> myConceptMapDao;
@Autowired
@ -144,11 +154,12 @@ public abstract class BaseJpaDstu3Test extends BaseJpaTest {
@Autowired
@Qualifier("myNamingSystemDaoDstu3")
protected IFhirResourceDao<NamingSystem> myNamingSystemDao;
@Autowired
@Qualifier("myObservationDaoDstu3")
protected IFhirResourceDao<Observation> myObservationDao;
@Autowired
@Qualifier("myOperationDefinitionDaoDstu3")
protected IFhirResourceDao<OperationDefinition> myOperationDefinitionDao;
@Autowired
@Qualifier("myOrganizationDaoDstu3")
protected IFhirResourceDao<Organization> myOrganizationDao;
@ -156,9 +167,6 @@ public abstract class BaseJpaDstu3Test extends BaseJpaTest {
@Qualifier("myPatientDaoDstu3")
protected IFhirResourceDaoPatient<Patient> myPatientDao;
@Autowired
@Qualifier("myCarePlanDaoDstu3")
protected IFhirResourceDao<CarePlan> myCarePlanDao;
@Autowired
@Qualifier("myPractitionerDaoDstu3")
protected IFhirResourceDao<Practitioner> myPractitionerDao;
@Autowired
@ -180,12 +188,6 @@ public abstract class BaseJpaDstu3Test extends BaseJpaTest {
@Qualifier("myStructureDefinitionDaoDstu3")
protected IFhirResourceDao<StructureDefinition> myStructureDefinitionDao;
@Autowired
@Qualifier("myCompartmentDefinitionDaoDstu3")
protected IFhirResourceDao<CompartmentDefinition> myCompartmentDefinitionDao;
@Autowired
@Qualifier("myOperationDefinitionDaoDstu3")
protected IFhirResourceDao<OperationDefinition> myOperationDefinitionDao;
@Autowired
@Qualifier("mySubscriptionDaoDstu3")
protected IFhirResourceDaoSubscription<Subscription> mySubscriptionDao;
@Autowired

View File

@ -1,33 +1,39 @@
package ca.uhn.fhir.jpa.dao.dstu3;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.empty;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import java.util.Map;
import java.util.Set;
import org.hl7.fhir.dstu3.model.AuditEvent;
import org.hl7.fhir.dstu3.model.CodeSystem;
import org.hl7.fhir.dstu3.model.Observation;
import org.hl7.fhir.dstu3.model.CodeSystem.CodeSystemContentMode;
import org.hl7.fhir.dstu3.model.CodeSystem.ConceptDefinitionComponent;
import org.hl7.fhir.dstu3.model.Observation;
import org.hl7.fhir.dstu3.model.Observation.ObservationStatus;
import org.hl7.fhir.dstu3.model.ValueSet;
import org.hl7.fhir.instance.model.api.IIdType;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import ca.uhn.fhir.jpa.dao.DaoMethodOutcome;
import ca.uhn.fhir.jpa.dao.SearchParameterMap;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.param.TokenParamModifier;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.util.TestUtil;
public class FhirResourceDaoTerminologyDstu3Test extends BaseJpaDstu3Test {
private static final String URL_MY_VALUE_SET = "http://example.com/my_value_set";
private static final String URL_MY_CODE_SYSTEM = "http://example.com/my_code_system";
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoTerminologyDstu3Test.class);
@AfterClass
@ -40,7 +46,7 @@ public class FhirResourceDaoTerminologyDstu3Test extends BaseJpaDstu3Test {
public void testCodeSystemWithDefinedCodes() {
//@formatter:off
CodeSystem codeSystem = new CodeSystem();
codeSystem.setUrl("http://example.com/my_code_system");
codeSystem.setUrl(URL_MY_CODE_SYSTEM);
codeSystem.setContent(CodeSystemContentMode.COMPLETE);
codeSystem
.addConcept().setCode("A").setDisplay("Code A")
@ -60,46 +66,211 @@ public class FhirResourceDaoTerminologyDstu3Test extends BaseJpaDstu3Test {
}
@Test
public void testSearchCodeIn() {
//@formatter:off
CodeSystem codeSystem = new CodeSystem();
codeSystem.setUrl("http://example.com/my_code_system");
codeSystem.setContent(CodeSystemContentMode.COMPLETE);
codeSystem
.addConcept().setCode("A").setDisplay("Code A")
.addConcept(new ConceptDefinitionComponent().setCode("AA").setDisplay("Code AA"))
.addConcept(new ConceptDefinitionComponent().setCode("AB").setDisplay("Code AB"));
codeSystem
.addConcept().setCode("B").setDisplay("Code A")
.addConcept(new ConceptDefinitionComponent().setCode("BA").setDisplay("Code AA"))
.addConcept(new ConceptDefinitionComponent().setCode("BB").setDisplay("Code AB"));
//@formatter:on
myCodeSystemDao.create(codeSystem, new ServletRequestDetails());
@Ignore
public void testSearchCodeInEmptyValueSet() {
ValueSet valueSet = new ValueSet();
valueSet.setUrl(URL_MY_VALUE_SET);
myValueSetDao.create(valueSet, mySrd);
Observation obsAA = new Observation();
obsAA.getCode().addCoding().setSystem("http://example.com/my_code_system").setCode("AA");
obsAA.setStatus(ObservationStatus.FINAL);
obsAA.getCode().addCoding().setSystem(URL_MY_CODE_SYSTEM).setCode("AA");
myObservationDao.create(obsAA, mySrd).getId().toUnqualifiedVersionless();
Observation obsBA = new Observation();
obsBA.setStatus(ObservationStatus.FINAL);
obsBA.getCode().addCoding().setSystem(URL_MY_CODE_SYSTEM).setCode("BA");
myObservationDao.create(obsBA, mySrd).getId().toUnqualifiedVersionless();
Observation obsCA = new Observation();
obsCA.setStatus(ObservationStatus.FINAL);
obsCA.getCode().addCoding().setSystem(URL_MY_CODE_SYSTEM).setCode("CA");
myObservationDao.create(obsCA, mySrd).getId().toUnqualifiedVersionless();
SearchParameterMap params;
params = new SearchParameterMap();
params.add(Observation.SP_CODE, new TokenParam(null, URL_MY_VALUE_SET).setModifier(TokenParamModifier.IN));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(params)), empty());
params = new SearchParameterMap();
params.add(Observation.SP_CODE, new TokenParam(null, URL_MY_VALUE_SET).setModifier(TokenParamModifier.IN));
params.add(Observation.SP_STATUS, new TokenParam(null, "final"));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(params)), empty());
}
@Test
public void testSearchCodeInLocalCodesystem() {
createLocalCsAndVs();
Observation obsAA = new Observation();
obsAA.getCode().addCoding().setSystem(URL_MY_CODE_SYSTEM).setCode("AA");
IIdType idAA = myObservationDao.create(obsAA, mySrd).getId().toUnqualifiedVersionless();
Observation obsBA = new Observation();
obsBA.getCode().addCoding().setSystem("http://example.com/my_code_system").setCode("BA");
obsBA.getCode().addCoding().setSystem(URL_MY_CODE_SYSTEM).setCode("BA");
IIdType idBA = myObservationDao.create(obsBA, mySrd).getId().toUnqualifiedVersionless();
Observation obsCA = new Observation();
obsCA.getCode().addCoding().setSystem(URL_MY_CODE_SYSTEM).setCode("CA");
IIdType idCA = myObservationDao.create(obsCA, mySrd).getId().toUnqualifiedVersionless();
SearchParameterMap params = new SearchParameterMap();
params.add(Observation.SP_CODE, new TokenParam("http://example.com/my_code_system", "A").setModifier(TokenParamModifier.IN));
params.add(Observation.SP_CODE, new TokenParam(null, URL_MY_VALUE_SET).setModifier(TokenParamModifier.IN));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(params)), containsInAnyOrder(idAA.getValue(), idBA.getValue()));
}
@Test
public void testSearchCodeInFhirCodesystem() {
createLocalCsAndVs();
AuditEvent aeIn1 = new AuditEvent();
aeIn1.getType().setSystem("http://nema.org/dicom/dicm").setCode("110102");
IIdType idIn1 = myAuditEventDao.create(aeIn1, mySrd).getId().toUnqualifiedVersionless();
AuditEvent aeIn2 = new AuditEvent();
aeIn2.getType().setSystem("http://hl7.org/fhir/audit-event-type").setCode("rest");
IIdType idIn2 = myAuditEventDao.create(aeIn2, mySrd).getId().toUnqualifiedVersionless();
AuditEvent aeOut1 = new AuditEvent();
aeOut1.getType().setSystem("http://example.com").setCode("foo");
IIdType idOut1 = myAuditEventDao.create(aeOut1, mySrd).getId().toUnqualifiedVersionless();
SearchParameterMap params = new SearchParameterMap();
params.add(AuditEvent.SP_TYPE, new TokenParam(null, "http://hl7.org/fhir/ValueSet/audit-event-type").setModifier(TokenParamModifier.IN));
assertThat(toUnqualifiedVersionlessIdValues(myAuditEventDao.search(params)), containsInAnyOrder(idIn1.getValue(), idIn2.getValue()));
params = new SearchParameterMap();
params.add(AuditEvent.SP_TYPE, new TokenParam(null, "http://hl7.org/fhir/ValueSet/v3-PurposeOfUse").setModifier(TokenParamModifier.IN));
assertThat(toUnqualifiedVersionlessIdValues(myAuditEventDao.search(params)), empty());
}
/**
* Can't currently abort costly
*/
@Test
@Ignore
public void testRefuseCostlyExpansionFhirCodesystem() {
createLocalCsAndVs();
myDaoConfig.setMaximumExpansionSize(1);
SearchParameterMap params = new SearchParameterMap();
params.add(AuditEvent.SP_TYPE, new TokenParam(null, "http://hl7.org/fhir/ValueSet/audit-event-type").setModifier(TokenParamModifier.IN));
try {
myAuditEventDao.search(params);
fail();
} catch (InvalidRequestException e) {
assertEquals("", e.getMessage());
}
}
@Test
public void testRefuseCostlyExpansionLocalCodesystem() {
createLocalCsAndVs();
myDaoConfig.setMaximumExpansionSize(1);
SearchParameterMap params = new SearchParameterMap();
params.add(Observation.SP_CODE, new TokenParam(URL_MY_CODE_SYSTEM, "AAA").setModifier(TokenParamModifier.ABOVE));
try {
myObservationDao.search(params);
fail();
} catch (InvalidRequestException e) {
assertEquals("Expansion of ValueSet produced too many codes (maximum 1) - Operation aborted!", e.getMessage());
}
}
@Test
public void testSearchCodeAboveLocalCodesystem() {
createLocalCsAndVs();
Observation obsAA = new Observation();
obsAA.getCode().addCoding().setSystem(URL_MY_CODE_SYSTEM).setCode("AA");
IIdType idAA = myObservationDao.create(obsAA, mySrd).getId().toUnqualifiedVersionless();
Observation obsBA = new Observation();
obsBA.getCode().addCoding().setSystem(URL_MY_CODE_SYSTEM).setCode("BA");
IIdType idBA = myObservationDao.create(obsBA, mySrd).getId().toUnqualifiedVersionless();
Observation obsCA = new Observation();
obsCA.getCode().addCoding().setSystem(URL_MY_CODE_SYSTEM).setCode("CA");
IIdType idCA = myObservationDao.create(obsCA, mySrd).getId().toUnqualifiedVersionless();
SearchParameterMap params = new SearchParameterMap();
params.add(Observation.SP_CODE, new TokenParam(URL_MY_CODE_SYSTEM, "AAA").setModifier(TokenParamModifier.ABOVE));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(params)), containsInAnyOrder(idAA.getValue()));
params = new SearchParameterMap();
params.add(Observation.SP_CODE, new TokenParam(URL_MY_CODE_SYSTEM, "A").setModifier(TokenParamModifier.ABOVE));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(params)), empty());
}
@Before
public void before() {
myDaoConfig.setMaximumExpansionSize(5000);
}
@Test
public void testSearchCodeBelowLocalCodesystem() {
createLocalCsAndVs();
Observation obsAA = new Observation();
obsAA.getCode().addCoding().setSystem(URL_MY_CODE_SYSTEM).setCode("AA");
IIdType idAA = myObservationDao.create(obsAA, mySrd).getId().toUnqualifiedVersionless();
Observation obsBA = new Observation();
obsBA.getCode().addCoding().setSystem(URL_MY_CODE_SYSTEM).setCode("BA");
IIdType idBA = myObservationDao.create(obsBA, mySrd).getId().toUnqualifiedVersionless();
Observation obsCA = new Observation();
obsCA.getCode().addCoding().setSystem(URL_MY_CODE_SYSTEM).setCode("CA");
IIdType idCA = myObservationDao.create(obsCA, mySrd).getId().toUnqualifiedVersionless();
SearchParameterMap params = new SearchParameterMap();
params.add(Observation.SP_CODE, new TokenParam(URL_MY_CODE_SYSTEM, "A").setModifier(TokenParamModifier.BELOW));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(params)), containsInAnyOrder(idAA.getValue()));
params = new SearchParameterMap();
params.add(Observation.SP_CODE, new TokenParam(URL_MY_CODE_SYSTEM, "AAA").setModifier(TokenParamModifier.BELOW));
assertThat(toUnqualifiedVersionlessIdValues(myObservationDao.search(params)), empty());
}
private void createLocalCsAndVs() {
//@formatter:off
CodeSystem codeSystem = new CodeSystem();
codeSystem.setUrl(URL_MY_CODE_SYSTEM);
codeSystem.setContent(CodeSystemContentMode.COMPLETE);
codeSystem
.addConcept().setCode("A").setDisplay("Code A")
.addConcept(new ConceptDefinitionComponent().setCode("AA").setDisplay("Code AA")
.addConcept(new ConceptDefinitionComponent().setCode("AAA").setDisplay("Code AAA"))
)
.addConcept(new ConceptDefinitionComponent().setCode("AB").setDisplay("Code AB"));
codeSystem
.addConcept().setCode("B").setDisplay("Code B")
.addConcept(new ConceptDefinitionComponent().setCode("BA").setDisplay("Code BA"))
.addConcept(new ConceptDefinitionComponent().setCode("BB").setDisplay("Code BB"));
//@formatter:on
myCodeSystemDao.create(codeSystem, new ServletRequestDetails());
ValueSet valueSet = new ValueSet();
valueSet.setUrl(URL_MY_VALUE_SET);
valueSet.getCompose().addInclude().setSystem(codeSystem.getUrl());
myValueSetDao.create(valueSet, mySrd);
}
@Test
public void testCodeSystemCreateDuplicateFails() {
CodeSystem codeSystem = new CodeSystem();
codeSystem.setUrl("http://example.com/my_code_system");
codeSystem.setUrl(URL_MY_CODE_SYSTEM);
codeSystem.setContent(CodeSystemContentMode.COMPLETE);
IIdType id = myCodeSystemDao.create(codeSystem, new ServletRequestDetails()).getId().toUnqualified();
codeSystem = new CodeSystem();
codeSystem.setUrl("http://example.com/my_code_system");
codeSystem.setUrl(URL_MY_CODE_SYSTEM);
codeSystem.setContent(CodeSystemContentMode.COMPLETE);
try {
myCodeSystemDao.create(codeSystem, new ServletRequestDetails());

View File

@ -0,0 +1,60 @@
package ca.uhn.fhir.jpa.term;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.Validate;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import ca.uhn.fhir.util.TestUtil;
public class TerminologyLoaderSvcTest {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcTest.class);
private TerminologyLoaderSvc mySvc;
@Before
public void before() {
mySvc = new TerminologyLoaderSvc();
}
@AfterClass
public static void afterClassClearContext() {
TestUtil.clearAllStaticFieldsForUnitTest();
}
@Test
// @Ignore
public void testLoadSnomedCt() throws Exception {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ZipOutputStream zos = new ZipOutputStream(bos);
addEntry(zos, "sct2_Concept_Full_INT_20160131.txt");
addEntry(zos, "sct2_Concept_Full-en_INT_20160131.txt");
addEntry(zos, "sct2_Description_Full-en_INT_20160131.txt");
addEntry(zos, "sct2_Identifier_Full_INT_20160131.txt");
addEntry(zos, "sct2_Relationship_Full_INT_20160131.txt");
addEntry(zos, "sct2_StatedRelationship_Full_INT_20160131.txt");
addEntry(zos, "sct2_TextDefinition_Full-en_INT_20160131.txt");
zos.close();
ourLog.info("ZIP file has {} bytes", bos.toByteArray().length);
mySvc.loadSnomedCt(bos.toByteArray());
}
private void addEntry(ZipOutputStream zos, String fileName) throws IOException {
ourLog.info("Adding {} to test zip", fileName);
zos.putNextEntry(new ZipEntry("SnomedCT_Release_INT_20160131_Full/Terminology/" + fileName));
byte[] byteArray = IOUtils.toByteArray(getClass().getResourceAsStream("/sct/" + fileName));
Validate.notNull(byteArray);
zos.write(byteArray);
zos.closeEntry();
}
}