Merge branch 'improve_jpa_indexes'

This commit is contained in:
James Agnew 2018-07-01 12:08:03 -04:00
commit b929864873
91 changed files with 2497 additions and 1753 deletions

View File

@ -502,6 +502,7 @@
<groupId>com.github.ben-manes.caffeine</groupId>
<artifactId>caffeine</artifactId>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava-testlib</artifactId>

View File

@ -8,10 +8,10 @@ import ca.uhn.fhir.jpa.util.ResourceCountCache;
import ca.uhn.fhir.model.dstu2.composite.MetaDt;
import ca.uhn.fhir.validation.IValidatorModule;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.instance.hapi.validation.CachingValidationSupport;
import org.hl7.fhir.instance.hapi.validation.DefaultProfileValidationSupport;
import org.hl7.fhir.instance.hapi.validation.FhirInstanceValidator;
import org.hl7.fhir.instance.hapi.validation.ValidationSupportChain;
import org.hl7.fhir.instance.utils.IResourceValidator.BestPracticeWarningLevel;
import org.hl7.fhir.r4.utils.IResourceValidator;
import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.context.annotation.Bean;
@ -29,9 +29,9 @@ import org.springframework.transaction.annotation.EnableTransactionManagement;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -81,7 +81,7 @@ public class BaseDstu2Config extends BaseConfig {
public IValidatorModule instanceValidatorDstu2() {
FhirInstanceValidator retVal = new FhirInstanceValidator();
retVal.setBestPracticeWarningLevel(IResourceValidator.BestPracticeWarningLevel.Warning);
retVal.setValidationSupport(new ValidationSupportChain(new DefaultProfileValidationSupport(), jpaValidationSupportDstu2()));
retVal.setValidationSupport(new CachingValidationSupport(new ValidationSupportChain(new DefaultProfileValidationSupport(), jpaValidationSupportDstu2())));
return retVal;
}
@ -91,6 +91,13 @@ public class BaseDstu2Config extends BaseConfig {
return retVal;
}
@Bean(name = "myResourceCountsCache")
public ResourceCountCache resourceCountsCache() {
ResourceCountCache retVal = new ResourceCountCache(() -> systemDaoDstu2().getResourceCounts());
retVal.setCacheMillis(60 * DateUtils.MILLIS_PER_SECOND);
return retVal;
}
@Bean(autowire = Autowire.BY_TYPE)
public IFulltextSearchSvc searchDao() {
FulltextSearchSvcImpl searchDao = new FulltextSearchSvcImpl();
@ -121,13 +128,6 @@ public class BaseDstu2Config extends BaseConfig {
return retVal;
}
@Bean(name = "myResourceCountsCache")
public ResourceCountCache resourceCountsCache() {
ResourceCountCache retVal = new ResourceCountCache(() -> systemDaoDstu2().getResourceCounts());
retVal.setCacheMillis(60 * DateUtils.MILLIS_PER_SECOND);
return retVal;
}
@Bean(autowire = Autowire.BY_TYPE)
public IHapiTerminologySvc terminologyService() {
return new HapiTerminologySvcDstu2();

View File

@ -19,6 +19,7 @@ import ca.uhn.fhir.jpa.validation.JpaValidationSupportChainDstu3;
import ca.uhn.fhir.validation.IValidatorModule;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
import org.hl7.fhir.dstu3.hapi.validation.CachingValidationSupport;
import org.hl7.fhir.dstu3.hapi.validation.FhirInstanceValidator;
import org.hl7.fhir.r4.utils.IResourceValidator;
import org.springframework.beans.factory.annotation.Autowire;
@ -37,9 +38,9 @@ import org.springframework.transaction.annotation.EnableTransactionManagement;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -78,13 +79,17 @@ public class BaseDstu3Config extends BaseConfig {
return val;
}
@Bean
public JpaValidationSupportChainDstu3 jpaValidationSupportChain() {
return new JpaValidationSupportChainDstu3();
}
@Bean(name = "myJpaValidationSupportDstu3", autowire = Autowire.BY_NAME)
public ca.uhn.fhir.jpa.dao.dstu3.IJpaValidationSupportDstu3 jpaValidationSupportDstu3() {
ca.uhn.fhir.jpa.dao.dstu3.JpaValidationSupportDstu3 retVal = new ca.uhn.fhir.jpa.dao.dstu3.JpaValidationSupportDstu3();
return retVal;
}
@Bean(name = "myResourceCountsCache")
public ResourceCountCache resourceCountsCache() {
ResourceCountCache retVal = new ResourceCountCache(() -> systemDaoDstu3().getResourceCounts());
@ -142,7 +147,7 @@ public class BaseDstu3Config extends BaseConfig {
@Primary
@Bean(autowire = Autowire.BY_NAME, name = "myJpaValidationSupportChainDstu3")
public IValidationSupport validationSupportChainDstu3() {
return new JpaValidationSupportChainDstu3();
return new CachingValidationSupport(jpaValidationSupportChain());
}
}

View File

@ -21,6 +21,7 @@ import ca.uhn.fhir.validation.IValidatorModule;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
import org.hl7.fhir.r4.hapi.rest.server.GraphQLProvider;
import org.hl7.fhir.r4.hapi.validation.CachingValidationSupport;
import org.hl7.fhir.r4.hapi.validation.FhirInstanceValidator;
import org.hl7.fhir.r4.utils.GraphQLEngine;
import org.hl7.fhir.r4.utils.IResourceValidator.BestPracticeWarningLevel;
@ -40,9 +41,9 @@ import org.springframework.transaction.annotation.EnableTransactionManagement;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -93,6 +94,11 @@ public class BaseR4Config extends BaseConfig {
return val;
}
@Bean
public JpaValidationSupportChainR4 jpaValidationSupportChain() {
return new JpaValidationSupportChainR4();
}
@Bean(name = "myJpaValidationSupportR4", autowire = Autowire.BY_NAME)
public ca.uhn.fhir.jpa.dao.r4.IJpaValidationSupportR4 jpaValidationSupportR4() {
ca.uhn.fhir.jpa.dao.r4.JpaValidationSupportR4 retVal = new ca.uhn.fhir.jpa.dao.r4.JpaValidationSupportR4();
@ -156,7 +162,7 @@ public class BaseR4Config extends BaseConfig {
@Primary
@Bean(autowire = Autowire.BY_NAME, name = "myJpaValidationSupportChainR4")
public IValidationSupport validationSupportChainR4() {
return new JpaValidationSupportChainR4();
return new CachingValidationSupport(jpaValidationSupportChain());
}
}

View File

@ -1,45 +1,46 @@
package ca.uhn.fhir.jpa.dao;
import static org.apache.commons.lang3.StringUtils.compare;
import static org.apache.commons.lang3.StringUtils.defaultIfBlank;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.apache.commons.lang3.StringUtils.trim;
import java.io.CharArrayWriter;
import java.io.UnsupportedEncodingException;
import java.text.Normalizer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import javax.annotation.PostConstruct;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceContextType;
import javax.persistence.Tuple;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import javax.xml.stream.events.Characters;
import javax.xml.stream.events.XMLEvent;
import ca.uhn.fhir.context.*;
import ca.uhn.fhir.jpa.dao.data.*;
import ca.uhn.fhir.jpa.entity.*;
import ca.uhn.fhir.jpa.search.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.search.JpaRuntimeSearchParam;
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
import ca.uhn.fhir.jpa.util.DeleteConflict;
import ca.uhn.fhir.jpa.util.ExpungeOptions;
import ca.uhn.fhir.jpa.util.ExpungeOutcome;
import ca.uhn.fhir.jpa.util.JpaConstants;
import ca.uhn.fhir.model.api.*;
import ca.uhn.fhir.model.base.composite.BaseCodingDt;
import ca.uhn.fhir.model.base.composite.BaseResourceReferenceDt;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.primitive.InstantDt;
import ca.uhn.fhir.model.primitive.StringDt;
import ca.uhn.fhir.model.valueset.BundleEntryTransactionMethodEnum;
import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.parser.LenientErrorHandler;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.QualifiedParamList;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.*;
import ca.uhn.fhir.rest.server.exceptions.*;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
import ca.uhn.fhir.rest.server.interceptor.IServerOperationInterceptor;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.util.*;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Sets;
import com.google.common.hash.HashFunction;
import com.google.common.hash.Hashing;
import org.apache.commons.lang3.NotImplementedException;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
@ -47,17 +48,7 @@ import org.apache.http.NameValuePair;
import org.apache.http.client.utils.URLEncodedUtils;
import org.hibernate.Session;
import org.hibernate.internal.SessionImpl;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IBaseCoding;
import org.hl7.fhir.instance.model.api.IBaseExtension;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
import org.hl7.fhir.instance.model.api.IBaseReference;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IDomainResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.hl7.fhir.instance.model.api.*;
import org.hl7.fhir.r4.model.BaseResource;
import org.hl7.fhir.r4.model.Bundle.HTTPVerb;
import org.hl7.fhir.r4.model.CanonicalType;
@ -74,12 +65,23 @@ import org.springframework.stereotype.Repository;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.support.TransactionTemplate;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Sets;
import com.google.common.hash.HashFunction;
import com.google.common.hash.Hashing;
import javax.annotation.PostConstruct;
import javax.persistence.*;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import javax.xml.stream.events.Characters;
import javax.xml.stream.events.XMLEvent;
import java.io.CharArrayWriter;
import java.io.UnsupportedEncodingException;
import java.text.Normalizer;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.*;
/*
* #%L
@ -90,9 +92,9 @@ import com.google.common.hash.Hashing;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -100,125 +102,6 @@ import com.google.common.hash.Hashing;
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.BaseRuntimeChildDefinition;
import ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition;
import ca.uhn.fhir.context.BaseRuntimeElementDefinition;
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.context.RuntimeChildResourceDefinition;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedCompositeStringUniqueDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamCoordsDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamDateDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamNumberDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamQuantityDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamStringDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamTokenDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamUriDao;
import ca.uhn.fhir.jpa.dao.data.IResourceSearchViewDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
import ca.uhn.fhir.jpa.entity.BaseHasResource;
import ca.uhn.fhir.jpa.entity.BaseResourceIndexedSearchParam;
import ca.uhn.fhir.jpa.entity.BaseTag;
import ca.uhn.fhir.jpa.entity.ForcedId;
import ca.uhn.fhir.jpa.entity.IBaseResourceEntity;
import ca.uhn.fhir.jpa.entity.ResourceEncodingEnum;
import ca.uhn.fhir.jpa.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.entity.ResourceHistoryTag;
import ca.uhn.fhir.jpa.entity.ResourceIndexedCompositeStringUnique;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamCoords;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamDate;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamNumber;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamQuantity;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamString;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamToken;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamUri;
import ca.uhn.fhir.jpa.entity.ResourceLink;
import ca.uhn.fhir.jpa.entity.ResourceSearchView;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.jpa.entity.ResourceTag;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.entity.SearchInclude;
import ca.uhn.fhir.jpa.entity.SearchParam;
import ca.uhn.fhir.jpa.entity.SearchParamPresent;
import ca.uhn.fhir.jpa.entity.SearchResult;
import ca.uhn.fhir.jpa.entity.SearchStatusEnum;
import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
import ca.uhn.fhir.jpa.entity.SubscriptionTable;
import ca.uhn.fhir.jpa.entity.TagDefinition;
import ca.uhn.fhir.jpa.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.entity.TermConceptDesignation;
import ca.uhn.fhir.jpa.entity.TermConceptMap;
import ca.uhn.fhir.jpa.entity.TermConceptMapGroup;
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElement;
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElementTarget;
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
import ca.uhn.fhir.jpa.entity.TermConceptProperty;
import ca.uhn.fhir.jpa.search.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.search.JpaRuntimeSearchParam;
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
import ca.uhn.fhir.jpa.util.DeleteConflict;
import ca.uhn.fhir.jpa.util.ExpungeOptions;
import ca.uhn.fhir.jpa.util.ExpungeOutcome;
import ca.uhn.fhir.jpa.util.JpaConstants;
import ca.uhn.fhir.model.api.IQueryParameterAnd;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
import ca.uhn.fhir.model.api.Tag;
import ca.uhn.fhir.model.api.TagList;
import ca.uhn.fhir.model.base.composite.BaseCodingDt;
import ca.uhn.fhir.model.base.composite.BaseResourceReferenceDt;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.primitive.InstantDt;
import ca.uhn.fhir.model.primitive.StringDt;
import ca.uhn.fhir.model.valueset.BundleEntryTransactionMethodEnum;
import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.parser.LenientErrorHandler;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.QualifiedParamList;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.param.ParameterUtil;
import ca.uhn.fhir.rest.param.StringAndListParam;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenAndListParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.param.UriAndListParam;
import ca.uhn.fhir.rest.param.UriParam;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
import ca.uhn.fhir.rest.server.interceptor.IServerOperationInterceptor;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.util.CoverageIgnore;
import ca.uhn.fhir.util.FhirTerser;
import ca.uhn.fhir.util.OperationOutcomeUtil;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.UrlUtil;
import ca.uhn.fhir.util.XmlUtil;
@SuppressWarnings("WeakerAccess")
@Repository
@ -345,6 +228,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
}
protected ExpungeOutcome doExpunge(String theResourceName, Long theResourceId, Long theVersion, ExpungeOptions theExpungeOptions) {
TransactionTemplate txTemplate = new TransactionTemplate(myPlatformTransactionManager);
if (!getConfig().isExpungeEnabled()) {
throw new MethodNotAllowedException("$expunge is not enabled on this server");
@ -363,32 +247,39 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
/*
* Delete historical versions of deleted resources
*/
Pageable page = new PageRequest(0, remainingCount.get());
Slice<Long> resourceIds;
if (theResourceId != null) {
resourceIds = myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceId, theResourceName);
} else {
if (theResourceName != null) {
resourceIds = myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceName);
Pageable page = PageRequest.of(0, remainingCount.get());
Slice<Long> resourceIds = txTemplate.execute(t -> {
if (theResourceId != null) {
return myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceId, theResourceName);
} else {
resourceIds = myResourceTableDao.findIdsOfDeletedResources(page);
if (theResourceName != null) {
return myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceName);
} else {
return myResourceTableDao.findIdsOfDeletedResources(page);
}
}
}
});
for (Long next : resourceIds) {
expungeHistoricalVersionsOfId(next, remainingCount);
if (remainingCount.get() <= 0) {
return toExpungeOutcome(theExpungeOptions, remainingCount);
}
txTemplate.execute(t -> {
expungeHistoricalVersionsOfId(next, remainingCount);
if (remainingCount.get() <= 0) {
return toExpungeOutcome(theExpungeOptions, remainingCount);
}
return null;
});
}
/*
* Delete current versions of deleted resources
*/
for (Long next : resourceIds) {
expungeCurrentVersionOfResource(next);
if (remainingCount.get() <= 0) {
return toExpungeOutcome(theExpungeOptions, remainingCount);
}
txTemplate.execute(t -> {
expungeCurrentVersionOfResource(next);
if (remainingCount.get() <= 0) {
return toExpungeOutcome(theExpungeOptions, remainingCount);
}
return null;
});
}
}
@ -398,22 +289,26 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
/*
* Delete historical versions of non-deleted resources
*/
Pageable page = new PageRequest(0, remainingCount.get());
Slice<Long> historicalIds;
if (theResourceId != null && theVersion != null) {
historicalIds = toSlice(myResourceHistoryTableDao.findForIdAndVersion(theResourceId, theVersion));
} else {
if (theResourceName != null) {
historicalIds = myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResources(page, theResourceName);
Pageable page = PageRequest.of(0, remainingCount.get());
Slice<Long> historicalIds = txTemplate.execute(t -> {
if (theResourceId != null && theVersion != null) {
return toSlice(myResourceHistoryTableDao.findForIdAndVersion(theResourceId, theVersion));
} else {
historicalIds = myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResources(page);
if (theResourceName != null) {
return myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResources(page, theResourceName);
} else {
return myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResources(page);
}
}
}
});
for (Long next : historicalIds) {
expungeHistoricalVersion(next);
if (remainingCount.decrementAndGet() <= 0) {
return toExpungeOutcome(theExpungeOptions, remainingCount);
}
txTemplate.execute(t -> {
expungeHistoricalVersion(next);
if (remainingCount.decrementAndGet() <= 0) {
return toExpungeOutcome(theExpungeOptions, remainingCount);
}
return null;
});
}
}
@ -433,7 +328,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
});
txTemplate.execute(t -> {
doExpungeEverythingQuery("DELETE from " + SearchParamPresent.class.getSimpleName() + " d");
doExpungeEverythingQuery("DELETE from " + SearchParam.class.getSimpleName() + " d");
doExpungeEverythingQuery("DELETE from " + ForcedId.class.getSimpleName() + " d");
doExpungeEverythingQuery("DELETE from " + ResourceIndexedSearchParamDate.class.getSimpleName() + " d");
doExpungeEverythingQuery("DELETE from " + ResourceIndexedSearchParamNumber.class.getSimpleName() + " d");
@ -822,58 +716,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
return retVal;
}
@SuppressWarnings("unchecked")
public <R extends IBaseResource> IFhirResourceDao<R> getDao(Class<R> theType) {
Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> resourceTypeToDao = getDaos();
IFhirResourceDao<R> dao = (IFhirResourceDao<R>) resourceTypeToDao.get(theType);
return dao;
}
protected IFhirResourceDao<?> getDaoOrThrowException(Class<? extends IBaseResource> theClass) {
IFhirResourceDao<? extends IBaseResource> retVal = getDao(theClass);
if (retVal == null) {
List<String> supportedResourceTypes = getDaos()
.keySet()
.stream()
.map(t->myContext.getResourceDefinition(t).getName())
.sorted()
.collect(Collectors.toList());
throw new InvalidRequestException("Unable to process request, this server does not know how to handle resources of type " + getContext().getResourceDefinition(theClass).getName() + " - Can handle: " + supportedResourceTypes);
}
return retVal;
}
private Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> getDaos() {
if (myResourceTypeToDao == null) {
Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> resourceTypeToDao = new HashMap<>();
Map<String, IFhirResourceDao> daos = myApplicationContext.getBeansOfType(IFhirResourceDao.class, false, false);
String[] beanNames = myApplicationContext.getBeanNamesForType(IFhirResourceDao.class);
for (IFhirResourceDao<?> next : daos.values()) {
resourceTypeToDao.put(next.getResourceType(), next);
}
if (this instanceof IFhirResourceDao<?>) {
IFhirResourceDao<?> thiz = (IFhirResourceDao<?>) this;
resourceTypeToDao.put(thiz.getResourceType(), thiz);
}
myResourceTypeToDao = resourceTypeToDao;
}
return Collections.unmodifiableMap(myResourceTypeToDao);
}
@PostConstruct
public void startClearCaches() {
myResourceTypeToDao = null;
}
protected Set<ResourceIndexedSearchParamCoords> extractSearchParamCoords(ResourceTable theEntity, IBaseResource theResource) {
return mySearchParamExtractor.extractSearchParamCoords(theEntity, theResource);
}
@ -1028,7 +870,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
param = new ResourceIndexedSearchParamQuantity();
break;
case STRING:
param = new ResourceIndexedSearchParamString();
param = new ResourceIndexedSearchParamString()
.setDaoConfig(myConfig);
break;
case TOKEN:
param = new ResourceIndexedSearchParamToken();
@ -1075,18 +918,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
return myConfig;
}
@Override
public void setApplicationContext(ApplicationContext theApplicationContext) throws BeansException {
/*
* We do a null check here because Smile's module system tries to
* initialize the application context twice if two modules depend on
* the persistence module. The second time sets the dependency's appctx.
*/
if (myApplicationContext == null) {
myApplicationContext = theApplicationContext;
}
}
public void setConfig(DaoConfig theConfig) {
myConfig = theConfig;
}
@ -1113,6 +944,50 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
}
}
@SuppressWarnings("unchecked")
public <R extends IBaseResource> IFhirResourceDao<R> getDao(Class<R> theType) {
Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> resourceTypeToDao = getDaos();
IFhirResourceDao<R> dao = (IFhirResourceDao<R>) resourceTypeToDao.get(theType);
return dao;
}
protected IFhirResourceDao<?> getDaoOrThrowException(Class<? extends IBaseResource> theClass) {
IFhirResourceDao<? extends IBaseResource> retVal = getDao(theClass);
if (retVal == null) {
List<String> supportedResourceTypes = getDaos()
.keySet()
.stream()
.map(t -> myContext.getResourceDefinition(t).getName())
.sorted()
.collect(Collectors.toList());
throw new InvalidRequestException("Unable to process request, this server does not know how to handle resources of type " + getContext().getResourceDefinition(theClass).getName() + " - Can handle: " + supportedResourceTypes);
}
return retVal;
}
private Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> getDaos() {
if (myResourceTypeToDao == null) {
Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> resourceTypeToDao = new HashMap<>();
Map<String, IFhirResourceDao> daos = myApplicationContext.getBeansOfType(IFhirResourceDao.class, false, false);
String[] beanNames = myApplicationContext.getBeanNamesForType(IFhirResourceDao.class);
for (IFhirResourceDao<?> next : daos.values()) {
resourceTypeToDao.put(next.getResourceType(), next);
}
if (this instanceof IFhirResourceDao<?>) {
IFhirResourceDao<?> thiz = (IFhirResourceDao<?>) this;
resourceTypeToDao.put(thiz.getResourceType(), thiz);
}
myResourceTypeToDao = resourceTypeToDao;
}
return Collections.unmodifiableMap(myResourceTypeToDao);
}
public IResourceIndexedCompositeStringUniqueDao getResourceIndexedCompositeStringUniqueDao() {
return myResourceIndexedCompositeStringUniqueDao;
}
@ -1290,9 +1165,9 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
@Override
public SearchBuilder newSearchBuilder() {
SearchBuilder builder = new SearchBuilder(getContext(), myEntityManager, myFulltextSearchSvc, this, myResourceIndexedSearchParamUriDao,
myForcedIdDao,
myTerminologySvc, mySerarchParamRegistry, myResourceTagDao, myResourceViewDao);
SearchBuilder builder = new SearchBuilder(
getContext(), myEntityManager, myFulltextSearchSvc, this, myResourceIndexedSearchParamUriDao,
myForcedIdDao, myTerminologySvc, mySerarchParamRegistry, myResourceTagDao, myResourceViewDao);
return builder;
}
@ -1608,6 +1483,15 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
// nothing
}
/**
* Subclasses may override to provide behaviour. Called when a pre-existing resource has been updated in the database
*
* @param theEntity The resource
*/
protected void postDelete(ResourceTable theEntity) {
// nothing
}
/**
* Subclasses may override to provide behaviour. Called when a pre-existing resource has been updated in the database
*
@ -1654,6 +1538,18 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
return retVal;
}
@Override
public void setApplicationContext(ApplicationContext theApplicationContext) throws BeansException {
/*
* We do a null check here because Smile's module system tries to
* initialize the application context twice if two modules depend on
* the persistence module. The second time sets the dependency's appctx.
*/
if (myApplicationContext == null) {
myApplicationContext = theApplicationContext;
}
}
private void setUpdatedTime(Collection<? extends BaseResourceIndexedSearchParam> theParams, Date theUpdateTime) {
for (BaseResourceIndexedSearchParam nextSearchParam : theParams) {
nextSearchParam.setUpdated(theUpdateTime);
@ -1710,6 +1606,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
return false;
}
@PostConstruct
public void startClearCaches() {
myResourceTypeToDao = null;
}
private ExpungeOutcome toExpungeOutcome(ExpungeOptions theExpungeOptions, AtomicInteger theRemainingCount) {
return new ExpungeOutcome()
.setDeletedCount(theExpungeOptions.getLimit() - theRemainingCount.get());
@ -1866,7 +1767,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
return theResourceType + '/' + theId.toString();
}
}
@SuppressWarnings("unchecked")
protected ResourceTable updateEntity(RequestDetails theRequest, final IBaseResource theResource, ResourceTable
theEntity, Date theDeletedTimestampOrNull, boolean thePerformIndexing,
@ -2139,6 +2040,10 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
postPersist(theEntity, (T) theResource);
} else if (theEntity.getDeleted() != null) {
postDelete(theEntity);
} else {
theEntity = myEntityManager.merge(theEntity);
@ -2193,6 +2098,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
if (thePerformIndexing) {
for (ResourceIndexedSearchParamString next : removeCommon(existingStringParams, stringParams)) {
next.setDaoConfig(myConfig);
myEntityManager.remove(next);
theEntity.getParamsString().remove(next);
}

View File

@ -50,6 +50,7 @@ import ca.uhn.fhir.rest.server.method.SearchMethodBinding;
import ca.uhn.fhir.util.*;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.*;
import org.hl7.fhir.r4.model.InstantType;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Required;
import org.springframework.lang.NonNull;
@ -517,6 +518,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}
@Override
@Transactional(propagation = Propagation.NEVER)
public ExpungeOutcome expunge(IIdType theId, ExpungeOptions theExpungeOptions) {
BaseHasResource entity = readEntity(theId);
if (theId.hasVersionIdPart()) {
@ -532,6 +534,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}
@Override
@Transactional(propagation = Propagation.NEVER)
public ExpungeOutcome expunge(ExpungeOptions theExpungeOptions) {
ourLog.info("Beginning TYPE[{}] expunge operation", getResourceName());
@ -856,14 +859,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
T retVal = toResource(myResourceType, entity, null, false);
IPrimitiveType<Date> deleted;
if (retVal instanceof IResource) {
deleted = ResourceMetadataKeyEnum.DELETED_AT.get((IResource) retVal);
} else {
deleted = ResourceMetadataKeyEnum.DELETED_AT.get((IAnyResource) retVal);
}
if (deleted != null && !deleted.isEmpty()) {
throw new ResourceGoneException("Resource was deleted at " + deleted.getValueAsString());
if (entity.getDeleted() != null) {
throw new ResourceGoneException("Resource was deleted at " + new InstantType(entity.getDeleted()).getValueAsString());
}
ourLog.debug("Processed read on {} in {}ms", theId.getValue(), w.getMillisAndRestart());

View File

@ -305,7 +305,8 @@ public abstract class BaseHapiFhirSystemDao<T, MT> extends BaseHapiFhirDao<IBase
final IBaseResource resource = toResource(resourceTable, false);
@SuppressWarnings("rawtypes") final IFhirResourceDao dao = getDaoOrThrowException(resource.getClass());
Class<? extends IBaseResource> resourceClass = getContext().getResourceDefinition(resourceTable.getResourceType()).getImplementingClass();
@SuppressWarnings("rawtypes") final IFhirResourceDao dao = getDaoOrThrowException(resourceClass);
dao.reindex(resource, resourceTable);
return null;

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.dao;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -20,45 +20,43 @@ package ca.uhn.fhir.jpa.dao;
* #L%
*/
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.regex.Pattern;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.util.FhirTerser;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang3.ObjectUtils;
import org.hl7.fhir.instance.model.api.IBaseDatatype;
import org.hl7.fhir.instance.model.api.IBaseExtension;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.springframework.beans.factory.annotation.Autowired;
import com.google.common.annotations.VisibleForTesting;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.util.FhirTerser;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.regex.Pattern;
public abstract class BaseSearchParamExtractor implements ISearchParamExtractor {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseSearchParamExtractor.class);
public static final Pattern SPLIT = Pattern.compile("\\||( or )");
public static final Pattern SPLIT = Pattern.compile("\\||( or )");
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseSearchParamExtractor.class);
@Autowired
private FhirContext myContext;
@Autowired
private DaoConfig myDaoConfig;
@Autowired
private ISearchParamRegistry mySearchParamRegistry;
public BaseSearchParamExtractor() {
super();
}
public BaseSearchParamExtractor(FhirContext theCtx, ISearchParamRegistry theSearchParamRegistry) {
public BaseSearchParamExtractor(DaoConfig theDaoConfig, FhirContext theCtx, ISearchParamRegistry theSearchParamRegistry) {
myContext = theCtx;
mySearchParamRegistry = theSearchParamRegistry;
myDaoConfig = theDaoConfig;
}
@Override
public List<PathAndRef> extractResourceLinks(IBaseResource theResource, RuntimeSearchParam theNextSpDef) {
List<PathAndRef> refs = new ArrayList<PathAndRef>();
@ -95,20 +93,24 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
}
} catch (Exception e) {
RuntimeResourceDefinition def = myContext.getResourceDefinition(theResource);
ourLog.warn("Failed to index values from path[{}] in resource type[{}]: {}", new Object[] { nextPathTrimmed, def.getName(), e.toString(), e } );
ourLog.warn("Failed to index values from path[{}] in resource type[{}]: {}", new Object[] {nextPathTrimmed, def.getName(), e.toString(), e});
}
}
return values;
}
protected FhirContext getContext() {
return myContext;
}
public DaoConfig getDaoConfig() {
return myDaoConfig;
}
public Collection<RuntimeSearchParam> getSearchParams(IBaseResource theResource) {
RuntimeResourceDefinition def = getContext().getResourceDefinition(theResource);
Collection<RuntimeSearchParam> retVal = mySearchParamRegistry.getActiveSearchParams(def.getName()).values();
List<RuntimeSearchParam> defaultList= Collections.emptyList();
List<RuntimeSearchParam> defaultList = Collections.emptyList();
retVal = ObjectUtils.defaultIfNull(retVal, defaultList);
return retVal;
}

View File

@ -20,9 +20,9 @@ import java.util.*;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -89,7 +89,7 @@ public class DaoConfig {
/**
* update setter javadoc if default changes
*/
private boolean myAllowContainsSearches = true;
private boolean myAllowContainsSearches = false;
/**
* update setter javadoc if default changes
@ -754,7 +754,15 @@ public class DaoConfig {
* If enabled, the server will support the use of :contains searches,
* which are helpful but can have adverse effects on performance.
* <p>
* Default is <code>true</code>
* Default is <code>false</code> (Note that prior to HAPI FHIR
* 3.5.0 the default was <code>true</code>)
* </p>
* <p>
* Note: If you change this value after data already has
* already been stored in the database, you must for a reindexing
* of all data in the database or resources may not be
* searchable.
* </p>
*/
public boolean isAllowContainsSearches() {
return myAllowContainsSearches;
@ -764,12 +772,21 @@ public class DaoConfig {
* If enabled, the server will support the use of :contains searches,
* which are helpful but can have adverse effects on performance.
* <p>
* Default is <code>true</code>
* Default is <code>false</code> (Note that prior to HAPI FHIR
* 3.5.0 the default was <code>true</code>)
* </p>
* <p>
* Note: If you change this value after data already has
* already been stored in the database, you must for a reindexing
* of all data in the database or resources may not be
* searchable.
* </p>
*/
public void setAllowContainsSearches(boolean theAllowContainsSearches) {
this.myAllowContainsSearches = theAllowContainsSearches;
}
/**
* If set to <code>true</code> (default is <code>false</code>) the server will allow
* resources to have references to external servers. For example if this server is

View File

@ -28,6 +28,7 @@ import java.util.*;
import javax.annotation.PostConstruct;
import org.apache.commons.codec.binary.StringUtils;
import org.hl7.fhir.instance.hapi.validation.CachingValidationSupport;
import org.hl7.fhir.instance.hapi.validation.DefaultProfileValidationSupport;
import org.hl7.fhir.instance.hapi.validation.ValidationSupportChain;
import org.hl7.fhir.instance.model.api.IIdType;
@ -62,7 +63,7 @@ public class FhirResourceDaoValueSetDstu2 extends FhirResourceDaoDstu2<ValueSet>
@Qualifier("myFhirContextDstu2Hl7Org")
private FhirContext myRiCtx;
private ValidationSupportChain myValidationSupport;
private CachingValidationSupport myValidationSupport;
private void addCompose(String theFilter, ValueSet theValueSetToPopulate, ValueSet theSourceValueSet, CodeSystemConcept theConcept) {
if (isBlank(theFilter)) {
@ -252,7 +253,7 @@ public class FhirResourceDaoValueSetDstu2 extends FhirResourceDaoDstu2<ValueSet>
public void postConstruct() {
super.postConstruct();
myDefaultProfileValidationSupport = new DefaultProfileValidationSupport();
myValidationSupport = new ValidationSupportChain(myDefaultProfileValidationSupport, myJpaValidationSupport);
myValidationSupport = new CachingValidationSupport(new ValidationSupportChain(myDefaultProfileValidationSupport, myJpaValidationSupport));
}
@Override

View File

@ -1,58 +1,5 @@
package ca.uhn.fhir.jpa.dao;
import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import java.math.BigDecimal;
import java.math.MathContext;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import javax.persistence.EntityManager;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.AbstractQuery;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaBuilder.In;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Expression;
import javax.persistence.criteria.From;
import javax.persistence.criteria.Join;
import javax.persistence.criteria.JoinType;
import javax.persistence.criteria.Order;
import javax.persistence.criteria.Path;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import javax.persistence.criteria.Subquery;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.tuple.Pair;
import org.hibernate.ScrollMode;
import org.hibernate.ScrollableResults;
import org.hibernate.query.Query;
import org.hl7.fhir.dstu3.model.BaseResource;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
/*
* #%L
* HAPI FHIR JPA Server
@ -72,48 +19,19 @@ import com.google.common.collect.Sets;
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.BaseRuntimeChildDefinition;
import ca.uhn.fhir.context.BaseRuntimeDeclaredChildDefinition;
import ca.uhn.fhir.context.BaseRuntimeElementDefinition;
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeChildChoiceDefinition;
import ca.uhn.fhir.context.RuntimeChildResourceDefinition;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.context.*;
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamUriDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
import ca.uhn.fhir.jpa.dao.data.IResourceSearchViewDao;
import ca.uhn.fhir.jpa.entity.BaseHasResource;
import ca.uhn.fhir.jpa.entity.BaseResourceIndexedSearchParam;
import ca.uhn.fhir.jpa.entity.ForcedId;
import ca.uhn.fhir.jpa.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamDate;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamNumber;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamQuantity;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamString;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamToken;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamUri;
import ca.uhn.fhir.jpa.entity.ResourceLink;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.jpa.entity.ResourceTag;
import ca.uhn.fhir.jpa.entity.ResourceSearchView;
import ca.uhn.fhir.jpa.entity.SearchParam;
import ca.uhn.fhir.jpa.entity.SearchParamPresent;
import ca.uhn.fhir.jpa.entity.TagDefinition;
import ca.uhn.fhir.jpa.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
import ca.uhn.fhir.jpa.entity.*;
import ca.uhn.fhir.jpa.search.JpaRuntimeSearchParam;
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
import ca.uhn.fhir.jpa.term.VersionIndependentConcept;
import ca.uhn.fhir.jpa.util.BaseIterator;
import ca.uhn.fhir.jpa.util.ScrollableResultsIterator;
import ca.uhn.fhir.model.api.IPrimitiveDatatype;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.api.Include;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
import ca.uhn.fhir.model.api.*;
import ca.uhn.fhir.model.base.composite.BaseCodingDt;
import ca.uhn.fhir.model.base.composite.BaseIdentifierDt;
import ca.uhn.fhir.model.base.composite.BaseQuantityDt;
@ -125,25 +43,41 @@ import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
import ca.uhn.fhir.rest.api.SortOrderEnum;
import ca.uhn.fhir.rest.api.SortSpec;
import ca.uhn.fhir.rest.param.CompositeParam;
import ca.uhn.fhir.rest.param.DateParam;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.param.HasParam;
import ca.uhn.fhir.rest.param.NumberParam;
import ca.uhn.fhir.rest.param.ParamPrefixEnum;
import ca.uhn.fhir.rest.param.QuantityParam;
import ca.uhn.fhir.rest.param.ReferenceParam;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.param.TokenParamModifier;
import ca.uhn.fhir.rest.param.UriParam;
import ca.uhn.fhir.rest.param.UriParamQualifierEnum;
import ca.uhn.fhir.rest.param.*;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.UrlUtil;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.tuple.Pair;
import org.hibernate.ScrollMode;
import org.hibernate.ScrollableResults;
import org.hibernate.query.Query;
import org.hibernate.query.criteria.internal.CriteriaBuilderImpl;
import org.hibernate.query.criteria.internal.predicate.BooleanStaticAssertionPredicate;
import org.hl7.fhir.dstu3.model.BaseResource;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import javax.persistence.EntityManager;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.*;
import java.math.BigDecimal;
import java.math.MathContext;
import java.util.*;
import java.util.Map.Entry;
import static org.apache.commons.lang3.StringUtils.*;
/**
* The SearchBuilder is responsible for actually forming the SQL query that handles
@ -332,7 +266,7 @@ public class SearchBuilder implements ISearchBuilder {
return;
}
List<Predicate> codePredicates = new ArrayList<Predicate>();
List<Predicate> codePredicates = new ArrayList<>();
for (IQueryParameterType nextOr : theList) {
IQueryParameterType params = nextOr;
@ -348,8 +282,9 @@ public class SearchBuilder implements ISearchBuilder {
ParamPrefixEnum prefix = ObjectUtils.defaultIfNull(param.getPrefix(), ParamPrefixEnum.EQUAL);
String invalidMessageName = "invalidNumberPrefix";
Predicate num = createPredicateNumeric(theResourceName, theParamName, join, myBuilder, params, prefix, value, fromObj, invalidMessageName);
codePredicates.add(num);
Predicate predicateNumeric = createPredicateNumeric(theResourceName, theParamName, join, myBuilder, params, prefix, value, fromObj, invalidMessageName);
Predicate predicateOuter = combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, join, predicateNumeric );
codePredicates.add(predicateOuter);
} else {
throw new IllegalArgumentException("Invalid token type: " + params.getClass());
@ -362,11 +297,10 @@ public class SearchBuilder implements ISearchBuilder {
private void addPredicateParamMissing(String theResourceName, String theParamName, boolean theMissing) {
Join<ResourceTable, SearchParamPresent> paramPresentJoin = myResourceTableRoot.join("mySearchParamPresents", JoinType.LEFT);
Join<SearchParamPresent, SearchParam> paramJoin = paramPresentJoin.join("mySearchParam", JoinType.LEFT);
myPredicates.add(myBuilder.equal(paramJoin.get("myResourceName"), theResourceName));
myPredicates.add(myBuilder.equal(paramJoin.get("myParamName"), theParamName));
myPredicates.add(myBuilder.equal(paramPresentJoin.get("myPresent"), !theMissing));
Expression<Long> hashPresence = paramPresentJoin.get("myHashPresence").as(Long.class);
Long hash = SearchParamPresent.calculateHashPresence(theResourceName, theParamName, !theMissing);
myPredicates.add(myBuilder.equal(hashPresence, hash));
}
private void addPredicateParamMissing(String theResourceName, String theParamName, boolean theMissing, Join<ResourceTable, ? extends BaseResourceIndexedSearchParam> theJoin) {
@ -384,7 +318,7 @@ public class SearchBuilder implements ISearchBuilder {
return;
}
List<Predicate> codePredicates = new ArrayList<Predicate>();
List<Predicate> codePredicates = new ArrayList<>();
for (IQueryParameterType nextOr : theList) {
Predicate singleCode = createPredicateQuantity(nextOr, theResourceName, theParamName, myBuilder, join);
@ -407,7 +341,7 @@ public class SearchBuilder implements ISearchBuilder {
Join<ResourceTable, ResourceLink> join = createOrReuseJoin(JoinEnum.REFERENCE, theParamName);
List<Predicate> codePredicates = new ArrayList<Predicate>();
List<Predicate> codePredicates = new ArrayList<>();
for (IQueryParameterType nextOr : theList) {
@ -504,7 +438,7 @@ public class SearchBuilder implements ISearchBuilder {
} else {
RuntimeResourceDefinition resDef = myContext.getResourceDefinition(ref.getResourceType());
resourceTypes = new ArrayList<Class<? extends IBaseResource>>(1);
resourceTypes = new ArrayList<>(1);
resourceTypes.add(resDef.getImplementingClass());
resourceId = ref.getIdPart();
}
@ -549,7 +483,7 @@ public class SearchBuilder implements ISearchBuilder {
IQueryParameterType chainValue;
if (remainingChain != null) {
if (param == null || param.getParamType() != RestSearchParameterTypeEnum.REFERENCE) {
ourLog.debug("Type {} parameter {} is not a reference, can not chain {}", new Object[] {nextType.getSimpleName(), chain, remainingChain});
ourLog.debug("Type {} parameter {} is not a reference, can not chain {}", nextType.getSimpleName(), chain, remainingChain);
continue;
}
@ -570,7 +504,7 @@ public class SearchBuilder implements ISearchBuilder {
Root<ResourceTable> subQfrom = subQ.from(ResourceTable.class);
subQ.select(subQfrom.get("myId").as(Long.class));
List<List<? extends IQueryParameterType>> andOrParams = new ArrayList<List<? extends IQueryParameterType>>();
List<List<? extends IQueryParameterType>> andOrParams = new ArrayList<>();
andOrParams.add(Collections.singletonList(chainValue));
/*
@ -621,7 +555,7 @@ public class SearchBuilder implements ISearchBuilder {
private void addPredicateResourceId(List<List<? extends IQueryParameterType>> theValues) {
for (List<? extends IQueryParameterType> nextValue : theValues) {
Set<Long> orPids = new HashSet<Long>();
Set<Long> orPids = new HashSet<>();
for (IQueryParameterType next : nextValue) {
String value = next.getValueAsQueryToken(myContext);
if (value != null && value.startsWith("|")) {
@ -669,10 +603,9 @@ public class SearchBuilder implements ISearchBuilder {
return;
}
List<Predicate> codePredicates = new ArrayList<Predicate>();
List<Predicate> codePredicates = new ArrayList<>();
for (IQueryParameterType nextOr : theList) {
IQueryParameterType theParameter = nextOr;
Predicate singleCode = createPredicateString(theParameter, theResourceName, theParamName, myBuilder, join);
Predicate singleCode = createPredicateString(nextOr, theResourceName, theParamName, myBuilder, join);
codePredicates.add(singleCode);
}
@ -817,7 +750,7 @@ public class SearchBuilder implements ISearchBuilder {
return;
}
List<Predicate> codePredicates = new ArrayList<Predicate>();
List<Predicate> codePredicates = new ArrayList<>();
for (IQueryParameterType nextOr : theList) {
if (nextOr instanceof TokenParam) {
@ -860,7 +793,6 @@ public class SearchBuilder implements ISearchBuilder {
continue;
}
Predicate predicate;
if (param.getQualifier() == UriParamQualifierEnum.ABOVE) {
/*
@ -889,14 +821,24 @@ public class SearchBuilder implements ISearchBuilder {
continue;
}
predicate = join.get("myUri").as(String.class).in(toFind);
Predicate uriPredicate = join.get("myUri").as(String.class).in(toFind);
Predicate hashAndUriPredicate = combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, join, uriPredicate);
codePredicates.add(hashAndUriPredicate);
} else if (param.getQualifier() == UriParamQualifierEnum.BELOW) {
predicate = myBuilder.like(join.get("myUri").as(String.class), createLeftMatchLikeExpression(value));
Predicate uriPredicate = myBuilder.like(join.get("myUri").as(String.class), createLeftMatchLikeExpression(value));
Predicate hashAndUriPredicate = combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, join, uriPredicate);
codePredicates.add(hashAndUriPredicate);
} else {
predicate = myBuilder.equal(join.get("myUri").as(String.class), value);
long hashUri = ResourceIndexedSearchParamUri.calculateHashUri(theResourceName, theParamName, value);
Predicate hashPredicate = myBuilder.equal(join.get("myHashUri"), hashUri);
codePredicates.add(hashPredicate);
}
codePredicates.add(predicate);
} else {
throw new IllegalArgumentException("Invalid URI type: " + nextOr.getClass());
}
@ -914,16 +856,13 @@ public class SearchBuilder implements ISearchBuilder {
}
Predicate orPredicate = myBuilder.or(toArray(codePredicates));
Predicate outerPredicate = combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, join, orPredicate);
myPredicates.add(outerPredicate);
myPredicates.add(orPredicate);
}
private Predicate combineParamIndexPredicateWithParamNamePredicate(String theResourceName, String theParamName, From<?, ? extends BaseResourceIndexedSearchParam> theFrom, Predicate thePredicate) {
Predicate resourceTypePredicate = myBuilder.equal(theFrom.get("myResourceType"), theResourceName);
Predicate paramNamePredicate = myBuilder.equal(theFrom.get("myParamName"), theParamName);
Predicate outerPredicate = myBuilder.and(resourceTypePredicate, paramNamePredicate, thePredicate);
return outerPredicate;
long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity(theResourceName, theParamName);
Predicate hashIdentityPredicate = myBuilder.equal(theFrom.get("myHashIdentity"), hashIdentity);
return myBuilder.and(hashIdentityPredicate, thePredicate);
}
private Predicate createCompositeParamPart(String theResourceName, Root<ResourceTable> theRoot, RuntimeSearchParam theParam, IQueryParameterType leftValue) {
@ -1103,7 +1042,7 @@ public class SearchBuilder implements ISearchBuilder {
if (theParamName == null) {
return num;
}
return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, num);
return num;
}
private Predicate createPredicateQuantity(IQueryParameterType theParam, String theResourceName, String theParamName, CriteriaBuilder theBuilder,
@ -1129,39 +1068,31 @@ public class SearchBuilder implements ISearchBuilder {
throw new IllegalArgumentException("Invalid quantity type: " + theParam.getClass());
}
Predicate system = null;
if (!isBlank(systemValue)) {
system = theBuilder.equal(theFrom.get("mySystem"), systemValue);
}
Predicate code = null;
if (!isBlank(unitsValue)) {
code = theBuilder.equal(theFrom.get("myUnits"), unitsValue);
Predicate hashPredicate;
if (!isBlank(systemValue) && !isBlank(unitsValue)) {
long hash = ResourceIndexedSearchParamQuantity.calculateHashSystemAndUnits(theResourceName, theParamName, systemValue, unitsValue);
hashPredicate = myBuilder.equal(theFrom.get("myHashIdentitySystemAndUnits"), hash);
} else if (!isBlank(unitsValue)) {
long hash = ResourceIndexedSearchParamQuantity.calculateHashUnits(theResourceName, theParamName, unitsValue);
hashPredicate = myBuilder.equal(theFrom.get("myHashIdentityAndUnits"), hash);
} else {
long hash = BaseResourceIndexedSearchParam.calculateHashIdentity(theResourceName, theParamName);
hashPredicate = myBuilder.equal(theFrom.get("myHashIdentity"), hash);
}
cmpValue = ObjectUtils.defaultIfNull(cmpValue, ParamPrefixEnum.EQUAL);
final Expression<BigDecimal> path = theFrom.get("myValue");
String invalidMessageName = "invalidQuantityPrefix";
Predicate num = createPredicateNumeric(theResourceName, null, theFrom, theBuilder, theParam, cmpValue, valueValue, path, invalidMessageName);
Predicate numericPredicate = createPredicateNumeric(theResourceName, null, theFrom, theBuilder, theParam, cmpValue, valueValue, path, invalidMessageName);
Predicate singleCode;
if (system == null && code == null) {
singleCode = num;
} else if (system == null) {
singleCode = theBuilder.and(code, num);
} else if (code == null) {
singleCode = theBuilder.and(system, num);
} else {
singleCode = theBuilder.and(system, code, num);
}
return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, singleCode);
return theBuilder.and(hashPredicate, numericPredicate);
}
private Predicate createPredicateString(IQueryParameterType theParameter, String theResourceName, String theParamName, CriteriaBuilder theBuilder,
From<?, ResourceIndexedSearchParamString> theFrom) {
String rawSearchTerm;
DaoConfig daoConfig = myCallingDao.getConfig();
if (theParameter instanceof TokenParam) {
TokenParam id = (TokenParam) theParameter;
if (!id.isText()) {
@ -1172,7 +1103,7 @@ public class SearchBuilder implements ISearchBuilder {
StringParam id = (StringParam) theParameter;
rawSearchTerm = id.getValue();
if (id.isContains()) {
if (!myCallingDao.getConfig().isAllowContainsSearches()) {
if (!daoConfig.isAllowContainsSearches()) {
throw new MethodNotAllowedException(":contains modifier is disabled on this server");
}
}
@ -1188,22 +1119,34 @@ public class SearchBuilder implements ISearchBuilder {
+ ResourceIndexedSearchParamString.MAX_LENGTH + "): " + rawSearchTerm);
}
String likeExpression = BaseHapiFhirDao.normalizeString(rawSearchTerm);
if (theParameter instanceof StringParam &&
((StringParam) theParameter).isContains() &&
myCallingDao.getConfig().isAllowContainsSearches()) {
likeExpression = createLeftAndRightMatchLikeExpression(likeExpression);
boolean exactMatch = theParameter instanceof StringParam && ((StringParam) theParameter).isExact();
if (exactMatch) {
// Exact match
Long hash = ResourceIndexedSearchParamString.calculateHashExact(theResourceName, theParamName, rawSearchTerm);
return theBuilder.equal(theFrom.get("myHashExact").as(Long.class), hash);
} else {
likeExpression = createLeftMatchLikeExpression(likeExpression);
}
Predicate singleCode = theBuilder.like(theFrom.get("myValueNormalized").as(String.class), likeExpression);
if (theParameter instanceof StringParam && ((StringParam) theParameter).isExact()) {
Predicate exactCode = theBuilder.equal(theFrom.get("myValueExact"), rawSearchTerm);
singleCode = theBuilder.and(singleCode, exactCode);
}
// Normalized Match
return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, singleCode);
String normalizedString = BaseHapiFhirDao.normalizeString(rawSearchTerm);
String likeExpression;
if (theParameter instanceof StringParam &&
((StringParam) theParameter).isContains() &&
daoConfig.isAllowContainsSearches()) {
likeExpression = createLeftAndRightMatchLikeExpression(normalizedString);
} else {
likeExpression = createLeftMatchLikeExpression(normalizedString);
}
Long hash = ResourceIndexedSearchParamString.calculateHashNormalized(daoConfig, theResourceName, theParamName, normalizedString);
Predicate hashCode = theBuilder.equal(theFrom.get("myHashNormalizedPrefix").as(Long.class), hash);
Predicate singleCode = theBuilder.like(theFrom.get("myValueNormalized").as(String.class), likeExpression);
return theBuilder.and(hashCode, singleCode);
}
}
private List<Predicate> createPredicateTagList(Path<TagDefinition> theDefJoin, CriteriaBuilder theBuilder, TagTypeEnum theTagType, List<Pair<String, String>> theTokens) {
@ -1258,7 +1201,7 @@ public class SearchBuilder implements ISearchBuilder {
* Process token modifiers (:in, :below, :above)
*/
List<VersionIndependentConcept> codes = null;
List<VersionIndependentConcept> codes;
if (modifier == TokenParamModifier.IN) {
codes = myTerminologySvc.expandValueSet(code);
} else if (modifier == TokenParamModifier.ABOVE) {
@ -1267,81 +1210,53 @@ public class SearchBuilder implements ISearchBuilder {
} else if (modifier == TokenParamModifier.BELOW) {
system = determineSystemIfMissing(theParamName, code, system);
codes = myTerminologySvc.findCodesBelow(system, code);
}
ArrayList<Predicate> singleCodePredicates = new ArrayList<>();
if (codes != null) {
if (codes.isEmpty()) {
// This will never match anything
Predicate codePredicate = theBuilder.isNull(theFrom.get("myMissing"));
singleCodePredicates.add(codePredicate);
} else {
List<Predicate> orPredicates = new ArrayList<Predicate>();
Map<String, List<VersionIndependentConcept>> map = new HashMap<String, List<VersionIndependentConcept>>();
for (VersionIndependentConcept nextCode : codes) {
List<VersionIndependentConcept> systemCodes = map.get(nextCode.getSystem());
if (null == systemCodes) {
systemCodes = new ArrayList<>();
map.put(nextCode.getSystem(), systemCodes);
}
systemCodes.add(nextCode);
}
// Use "in" in case of large numbers of codes due to param modifiers
final Path<String> systemExpression = theFrom.get("mySystem");
final Path<String> valueExpression = theFrom.get("myValue");
for (Map.Entry<String, List<VersionIndependentConcept>> entry : map.entrySet()) {
Predicate systemPredicate = theBuilder.equal(systemExpression, entry.getKey());
In<String> codePredicate = theBuilder.in(valueExpression);
for (VersionIndependentConcept nextCode : entry.getValue()) {
codePredicate.value(nextCode.getCode());
}
orPredicates.add(theBuilder.and(systemPredicate, codePredicate));
}
singleCodePredicates.add(theBuilder.or(orPredicates.toArray(new Predicate[orPredicates.size()])));
}
} else {
codes = Collections.singletonList(new VersionIndependentConcept(system, code));
}
/*
* Ok, this is a normal query
*/
if (codes.isEmpty()) {
// This will never match anything
return new BooleanStaticAssertionPredicate((CriteriaBuilderImpl) theBuilder, false);
}
if (StringUtils.isNotBlank(system)) {
if (modifier != null && modifier == TokenParamModifier.NOT) {
singleCodePredicates.add(theBuilder.notEqual(theFrom.get("mySystem"), system));
} else {
singleCodePredicates.add(theBuilder.equal(theFrom.get("mySystem"), system));
}
} else if (system == null) {
// don't check the system
/*
* Note: A null system value means "match any system", but
* an empty-string system value means "match values that
* explicitly have no system".
*/
boolean haveSystem = codes.get(0).getSystem() != null;
boolean haveCode = isNotBlank(codes.get(0).getCode());
Expression<Long> hashField;
if (!haveSystem && !haveCode) {
// If we have neither, this isn't actually an expression so
// just return 1=1
return new BooleanStaticAssertionPredicate((CriteriaBuilderImpl) theBuilder, true);
} else if (haveSystem && haveCode) {
hashField = theFrom.get("myHashSystemAndValue").as(Long.class);
} else if (haveSystem) {
hashField = theFrom.get("myHashSystem").as(Long.class);
} else {
hashField = theFrom.get("myHashValue").as(Long.class);
}
List<Long> values = new ArrayList<>(codes.size());
for (VersionIndependentConcept next : codes) {
if (haveSystem && haveCode) {
values.add(ResourceIndexedSearchParamToken.calculateHashSystemAndValue(theResourceName, theParamName, next.getSystem(), next.getCode()));
} else if (haveSystem) {
values.add(ResourceIndexedSearchParamToken.calculateHashSystem(theResourceName, theParamName, next.getSystem()));
} else {
// If the system is "", we only match on null systems
singleCodePredicates.add(theBuilder.isNull(theFrom.get("mySystem")));
}
if (StringUtils.isNotBlank(code)) {
if (modifier != null && modifier == TokenParamModifier.NOT) {
singleCodePredicates.add(theBuilder.notEqual(theFrom.get("myValue"), code));
} else {
singleCodePredicates.add(theBuilder.equal(theFrom.get("myValue"), code));
}
} else {
/*
* As of HAPI FHIR 1.5, if the client searched for a token with a system but no specified value this means to
* match all tokens with the given value.
*
* I'm not sure I agree with this, but hey.. FHIR-I voted and this was the result :)
*/
// singleCodePredicates.add(theBuilder.isNull(theFrom.get("myValue")));
values.add(ResourceIndexedSearchParamToken.calculateHashValue(theResourceName, theParamName, next.getCode()));
}
}
Predicate singleCode = theBuilder.and(toArray(singleCodePredicates));
return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, singleCode);
Predicate predicate = hashField.in(values);
if (modifier == TokenParamModifier.NOT) {
Predicate identityPredicate = theBuilder.equal(theFrom.get("myHashIdentity").as(Long.class), BaseResourceIndexedSearchParam.calculateHashIdentity(theResourceName, theParamName));
Predicate disjunctionPredicate = theBuilder.not(predicate);
predicate = theBuilder.and(identityPredicate, disjunctionPredicate);
}
return predicate;
}
@Override
@ -1446,8 +1361,8 @@ public class SearchBuilder implements ISearchBuilder {
if (myParams.getEverythingMode() != null) {
Join<ResourceTable, ResourceLink> join = myResourceTableRoot.join("myResourceLinks", JoinType.LEFT);
if (myParams.get(BaseResource.SP_RES_ID) != null) {
StringParam idParm = (StringParam) myParams.get(BaseResource.SP_RES_ID).get(0).get(0);
if (myParams.get(IAnyResource.SP_RES_ID) != null) {
StringParam idParm = (StringParam) myParams.get(IAnyResource.SP_RES_ID).get(0).get(0);
Long pid = BaseHapiFhirDao.translateForcedIdToPid(myResourceName, idParm.getValue(), myForcedIdDao);
if (myAlsoIncludePids == null) {
myAlsoIncludePids = new ArrayList<>(1);
@ -1537,7 +1452,7 @@ public class SearchBuilder implements ISearchBuilder {
return false;
}
if (BaseResource.SP_RES_ID.equals(theSort.getParamName())) {
if (IAnyResource.SP_RES_ID.equals(theSort.getParamName())) {
From<?, ?> forcedIdJoin = theFrom.join("myForcedId", JoinType.LEFT);
if (theSort.getOrder() == null || theSort.getOrder() == SortOrderEnum.ASC) {
theOrders.add(theBuilder.asc(forcedIdJoin.get("myForcedId")));
@ -1730,7 +1645,7 @@ public class SearchBuilder implements ISearchBuilder {
idList.add(resource.getId());
}
Map<Long, Collection<ResourceTag>> tagMap = new HashMap<Long, Collection<ResourceTag>>();
Map<Long, Collection<ResourceTag>> tagMap = new HashMap<>();
//-- no tags
if (idList.size() == 0)
@ -1747,7 +1662,7 @@ public class SearchBuilder implements ISearchBuilder {
resourceId = tag.getResourceId();
tagCol = tagMap.get(resourceId);
if (tagCol == null) {
tagCol = new ArrayList<ResourceTag>();
tagCol = new ArrayList<>();
tagCol.add(tag);
tagMap.put(resourceId, tagCol);
} else {
@ -1794,18 +1709,16 @@ public class SearchBuilder implements ISearchBuilder {
}
/**
* THIS SHOULD RETURN HASHSET and not jsut Set because we add to it later (so it can't be Collections.emptySet())
*
* @param theLastUpdated
* THIS SHOULD RETURN HASHSET and not just Set because we add to it later (so it can't be Collections.emptySet())
*/
@Override
public HashSet<Long> loadReverseIncludes(IDao theCallingDao, FhirContext theContext, EntityManager theEntityManager, Collection<Long> theMatches, Set<Include> theRevIncludes,
boolean theReverseMode, DateRangeParam theLastUpdated) {
if (theMatches.size() == 0) {
return new HashSet<Long>();
return new HashSet<>();
}
if (theRevIncludes == null || theRevIncludes.isEmpty()) {
return new HashSet<Long>();
return new HashSet<>();
}
String searchFieldName = theReverseMode ? "myTargetResourcePid" : "mySourceResourcePid";
@ -1846,7 +1759,7 @@ public class SearchBuilder implements ISearchBuilder {
} else {
List<String> paths;
RuntimeSearchParam param = null;
RuntimeSearchParam param;
String resType = nextInclude.getParamType();
if (isBlank(resType)) {
continue;

View File

@ -59,7 +59,7 @@ public class SearchParamExtractorDstu2 extends BaseSearchParamExtractor implemen
searchTerm = searchTerm.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
}
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(resourceName, BaseHapiFhirDao.normalizeString(searchTerm), searchTerm);
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(getDaoConfig(), resourceName, BaseHapiFhirDao.normalizeString(searchTerm), searchTerm);
nextEntity.setResource(theEntity);
retVal.add(nextEntity);
}
@ -68,7 +68,7 @@ public class SearchParamExtractorDstu2 extends BaseSearchParamExtractor implemen
if (value.length() > ResourceIndexedSearchParamString.MAX_LENGTH) {
value = value.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
}
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(nextSpDef.getName(), BaseHapiFhirDao.normalizeString(value), value);
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(getDaoConfig(), nextSpDef.getName(), BaseHapiFhirDao.normalizeString(value), value);
nextEntity.setResource(theEntity);
retVal.add(nextEntity);
}

View File

@ -1,34 +0,0 @@
package ca.uhn.fhir.jpa.dao.data;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import ca.uhn.fhir.jpa.entity.SearchParam;
public interface ISearchParamDao extends JpaRepository<SearchParam, Long> {
@Query("SELECT s FROM SearchParam s WHERE s.myResourceName = :resname AND s.myParamName = :parmname")
public SearchParam findForResource(@Param("resname") String theResourceType, @Param("parmname") String theParamName);
}

View File

@ -26,6 +26,8 @@ import org.springframework.data.repository.query.Param;
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
import java.util.Optional;
public interface ITermCodeSystemDao extends JpaRepository<TermCodeSystem, Long> {
@Query("SELECT cs FROM TermCodeSystem cs WHERE cs.myCodeSystemUri = :code_system_uri")
@ -34,4 +36,7 @@ public interface ITermCodeSystemDao extends JpaRepository<TermCodeSystem, Long>
@Query("SELECT cs FROM TermCodeSystem cs WHERE cs.myResourcePid = :resource_pid")
TermCodeSystem findByResourcePid(@Param("resource_pid") Long theReourcePid);
@Query("SELECT cs FROM TermCodeSystem cs WHERE cs.myCurrentVersion.myId = :csv_pid")
Optional<TermCodeSystem> findWithCodeSystemVersionAsCurrentVersion(@Param("csv_pid") Long theCodeSystemVersionPid);
}

View File

@ -1,9 +1,16 @@
package ca.uhn.fhir.jpa.dao.data;
import java.util.List;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.util.List;
/*
* #%L
@ -14,9 +21,9 @@ import org.springframework.data.domain.Pageable;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -25,14 +32,6 @@ import org.springframework.data.domain.Pageable;
* #L%
*/
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept;
public interface ITermConceptDao extends JpaRepository<TermConcept, Long> {
@Query("SELECT c FROM TermConcept c WHERE c.myCodeSystem = :code_system AND c.myCode = :code")
@ -42,14 +41,13 @@ public interface ITermConceptDao extends JpaRepository<TermConcept, Long> {
List<TermConcept> findByCodeSystemVersion(@Param("code_system") TermCodeSystemVersion theCodeSystem);
@Query("SELECT t FROM TermConcept t WHERE t.myCodeSystem.myId = :cs_pid")
@Modifying
List<TermConcept> findByCodeSystemVersion(@Param("cs_pid") Long thePid);
Slice<TermConcept> findByCodeSystemVersion(Pageable thePage, @Param("cs_pid") Long thePid);
@Query("SELECT t FROM TermConcept t WHERE t.myIndexStatus = null")
Page<TermConcept> findResourcesRequiringReindexing(Pageable thePageRequest);
@Query("UPDATE TermConcept t SET t.myIndexStatus = null")
@Modifying
int markAllForReindexing();
@Query("SELECT t FROM TermConcept t WHERE t.myIndexStatus = null")
Page<TermConcept> findResourcesRequiringReindexing(Pageable thePageRequest);
}

View File

@ -1,7 +1,11 @@
package ca.uhn.fhir.jpa.dao.data;
import ca.uhn.fhir.jpa.entity.TermConceptDesignation;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
/*
* #%L
@ -12,9 +16,9 @@ import org.springframework.data.jpa.repository.JpaRepository;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -24,5 +28,8 @@ import org.springframework.data.jpa.repository.JpaRepository;
*/
public interface ITermConceptDesignationDao extends JpaRepository<TermConceptDesignation, Long> {
// nothing
@Query("SELECT t FROM TermConceptDesignation t WHERE t.myCodeSystemVersion.myId = :csv_pid")
Slice<TermConceptDesignation> findByCodeSystemVersion(Pageable thePage, @Param("csv_pid") Long thePid);
}

View File

@ -1,5 +1,12 @@
package ca.uhn.fhir.jpa.dao.data;
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.util.Collection;
/*
@ -11,9 +18,9 @@ import java.util.Collection;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -22,20 +29,12 @@ import java.util.Collection;
* #L%
*/
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
public interface ITermConceptParentChildLinkDao extends JpaRepository<TermConceptParentChildLink, Long> {
@Query("DELETE FROM TermConceptParentChildLink t WHERE t.myCodeSystem.myId = :cs_pid")
@Modifying
void deleteByCodeSystemVersion(@Param("cs_pid") Long thePid);
@Query("SELECT t.myParentPid FROM TermConceptParentChildLink t WHERE t.myChildPid = :child_pid")
Collection<Long> findAllWithChild(@Param("child_pid") Long theConceptPid);
@Query("SELECT t FROM TermConceptParentChildLink t WHERE t.myCodeSystem.myId = :cs_pid")
Slice<TermConceptParentChildLink> findByCodeSystemVersion(Pageable thePage, @Param("cs_pid") Long thePid);
}

View File

@ -1,17 +1,12 @@
package ca.uhn.fhir.jpa.dao.data;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.entity.TermConceptProperty;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.util.List;
/*
* #%L
* HAPI FHIR JPA Server
@ -21,9 +16,9 @@ import java.util.List;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -33,5 +28,8 @@ import java.util.List;
*/
public interface ITermConceptPropertyDao extends JpaRepository<TermConceptProperty, Long> {
// nothing
@Query("SELECT t FROM TermConceptProperty t WHERE t.myCodeSystemVersion.myId = :cs_pid")
Slice<TermConceptProperty> findByCodeSystemVersion(Pageable thePage, @Param("cs_pid") Long thePid);
}

View File

@ -160,13 +160,16 @@ public class FhirResourceDaoConceptMapDstu3 extends FhirResourceDaoDstu3<Concept
boolean theUpdateVersion, Date theUpdateTime, boolean theForceUpdate, boolean theCreateNewHistoryEntry) {
ResourceTable retVal = super.updateEntity(theRequestDetails, theResource, theEntity, theDeletedTimestampOrNull, thePerformIndexing, theUpdateVersion, theUpdateTime, theForceUpdate, theCreateNewHistoryEntry);
ConceptMap conceptMap = (ConceptMap) theResource;
// Convert from DSTU3 to R4
try {
myHapiTerminologySvc.storeTermConceptMapAndChildren(retVal, VersionConvertor_30_40.convertConceptMap(conceptMap));
} catch (FHIRException fe) {
throw new InternalErrorException(fe);
if (retVal.getDeleted() == null) {
try {
ConceptMap conceptMap = (ConceptMap) theResource;
org.hl7.fhir.r4.model.ConceptMap converted = VersionConvertor_30_40.convertConceptMap(conceptMap);
myHapiTerminologySvc.storeTermConceptMapAndChildren(retVal, converted);
} catch (FHIRException fe) {
throw new InternalErrorException(fe);
}
} else {
myHapiTerminologySvc.deleteConceptMapAndChildren(retVal);
}
return retVal;

View File

@ -26,12 +26,13 @@ import static org.apache.commons.lang3.StringUtils.trim;
import java.math.BigDecimal;
import java.util.*;
import javax.annotation.PostConstruct;
import javax.measure.unit.NonSI;
import javax.measure.unit.Unit;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.hl7.fhir.dstu3.context.IWorkerContext;
import org.hl7.fhir.dstu3.hapi.ctx.HapiWorkerContext;
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
import org.hl7.fhir.dstu3.model.*;
import org.hl7.fhir.dstu3.model.CapabilityStatement.CapabilityStatementRestSecurityComponent;
@ -58,6 +59,13 @@ public class SearchParamExtractorDstu3 extends BaseSearchParamExtractor implemen
@Autowired
private org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport myValidationSupport;
private HapiWorkerContext myWorkerContext;
@PostConstruct
public void start() {
myWorkerContext = new HapiWorkerContext(getContext(), myValidationSupport);
}
/**
* Constructor
*/
@ -65,8 +73,8 @@ public class SearchParamExtractorDstu3 extends BaseSearchParamExtractor implemen
super();
}
public SearchParamExtractorDstu3(FhirContext theCtx, IValidationSupport theValidationSupport, ISearchParamRegistry theSearchParamRegistry) {
super(theCtx, theSearchParamRegistry);
public SearchParamExtractorDstu3(DaoConfig theDaoConfig, FhirContext theCtx, IValidationSupport theValidationSupport, ISearchParamRegistry theSearchParamRegistry) {
super(theDaoConfig, theCtx, theSearchParamRegistry);
myValidationSupport = theValidationSupport;
}
@ -78,7 +86,7 @@ public class SearchParamExtractorDstu3 extends BaseSearchParamExtractor implemen
searchTerm = searchTerm.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
}
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(resourceName, BaseHapiFhirDao.normalizeString(searchTerm), searchTerm);
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(getDaoConfig(), resourceName, BaseHapiFhirDao.normalizeString(searchTerm), searchTerm);
nextEntity.setResource(theEntity);
retVal.add(nextEntity);
}
@ -87,7 +95,7 @@ public class SearchParamExtractorDstu3 extends BaseSearchParamExtractor implemen
if (value.length() > ResourceIndexedSearchParamString.MAX_LENGTH) {
value = value.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
}
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(nextSpDef.getName(), BaseHapiFhirDao.normalizeString(value), value);
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(getDaoConfig(), nextSpDef.getName(), BaseHapiFhirDao.normalizeString(value), value);
nextEntity.setResource(theEntity);
retVal.add(nextEntity);
}
@ -695,8 +703,7 @@ public class SearchParamExtractorDstu3 extends BaseSearchParamExtractor implemen
*/
@Override
protected List<Object> extractValues(String thePaths, IBaseResource theResource) {
IWorkerContext worker = new org.hl7.fhir.dstu3.hapi.ctx.HapiWorkerContext(getContext(), myValidationSupport);
FHIRPathEngine fp = new FHIRPathEngine(worker);
FHIRPathEngine fp = new FHIRPathEngine(myWorkerContext);
List<Object> values = new ArrayList<>();
try {

View File

@ -38,7 +38,7 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport.CodeValidationResult;
import org.hl7.fhir.r4.hapi.ctx.ValidationSupportChain;
import org.hl7.fhir.r4.hapi.validation.ValidationSupportChain;
import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.CodeSystem.CodeSystemContentMode;
import org.hl7.fhir.r4.model.CodeSystem.ConceptDefinitionComponent;
@ -47,7 +47,6 @@ import org.hl7.fhir.r4.model.Coding;
import org.hl7.fhir.r4.model.IdType;
import org.springframework.beans.factory.annotation.Autowired;
import javax.servlet.http.HttpServletRequest;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;

View File

@ -156,9 +156,12 @@ public class FhirResourceDaoConceptMapR4 extends FhirResourceDaoR4<ConceptMap> i
boolean theUpdateVersion, Date theUpdateTime, boolean theForceUpdate, boolean theCreateNewHistoryEntry) {
ResourceTable retVal = super.updateEntity(theRequestDetails, theResource, theEntity, theDeletedTimestampOrNull, thePerformIndexing, theUpdateVersion, theUpdateTime, theForceUpdate, theCreateNewHistoryEntry);
ConceptMap conceptMap = (ConceptMap) theResource;
myHapiTerminologySvc.storeTermConceptMapAndChildren(retVal, conceptMap);
if (retVal.getDeleted() == null) {
ConceptMap conceptMap = (ConceptMap) theResource;
myHapiTerminologySvc.storeTermConceptMapAndChildren(retVal, conceptMap);
} else {
myHapiTerminologySvc.deleteConceptMapAndChildren(retVal);
}
return retVal;
}

View File

@ -64,8 +64,8 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements
super();
}
public SearchParamExtractorR4(FhirContext theCtx, IValidationSupport theValidationSupport, ISearchParamRegistry theSearchParamRegistry) {
super(theCtx, theSearchParamRegistry);
public SearchParamExtractorR4(DaoConfig theDaoConfig, FhirContext theCtx, IValidationSupport theValidationSupport, ISearchParamRegistry theSearchParamRegistry) {
super(theDaoConfig, theCtx, theSearchParamRegistry);
myValidationSupport = theValidationSupport;
}
@ -77,7 +77,7 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements
searchTerm = searchTerm.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
}
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(resourceName, BaseHapiFhirDao.normalizeString(searchTerm), searchTerm);
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(getDaoConfig(), resourceName, BaseHapiFhirDao.normalizeString(searchTerm), searchTerm);
nextEntity.setResource(theEntity);
retVal.add(nextEntity);
}
@ -86,7 +86,7 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements
if (value.length() > ResourceIndexedSearchParamString.MAX_LENGTH) {
value = value.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
}
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(nextSpDef.getName(), BaseHapiFhirDao.normalizeString(value), value);
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(getDaoConfig(), nextSpDef.getName(), BaseHapiFhirDao.normalizeString(value), value);
nextEntity.setResource(theEntity);
retVal.add(nextEntity);
}
@ -104,7 +104,7 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements
*/
@Override
public Set<ResourceIndexedSearchParamDate> extractSearchParamDates(ResourceTable theEntity, IBaseResource theResource) {
HashSet<ResourceIndexedSearchParamDate> retVal = new HashSet<ResourceIndexedSearchParamDate>();
HashSet<ResourceIndexedSearchParamDate> retVal = new HashSet<>();
Collection<RuntimeSearchParam> searchParams = getSearchParams(theResource);
for (RuntimeSearchParam nextSpDef : searchParams) {

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -36,13 +36,15 @@ import java.util.Date;
@MappedSuperclass
public abstract class BaseResourceIndexedSearchParam implements Serializable {
/** Don't change this without careful consideration. You will break existing hashes! */
private static final HashFunction HASH_FUNCTION = Hashing.murmur3_128(0);
/** Don't make this public 'cause nobody better touch it! */
private static final byte[] DELIMITER_BYTES = "|".getBytes(Charsets.UTF_8);
static final int MAX_SP_NAME = 100;
/**
* Don't change this without careful consideration. You will break existing hashes!
*/
private static final HashFunction HASH_FUNCTION = Hashing.murmur3_128(0);
/**
* Don't make this public 'cause nobody better be able to modify it!
*/
private static final byte[] DELIMITER_BYTES = "|".getBytes(Charsets.UTF_8);
private static final long serialVersionUID = 1L;
// TODO: make this nullable=false and a primitive (written may 2017)
@ -71,6 +73,13 @@ public abstract class BaseResourceIndexedSearchParam implements Serializable {
@Temporal(TemporalType.TIMESTAMP)
private Date myUpdated;
/**
* Subclasses may override
*/
protected void clearHashes() {
// nothing
}
protected abstract Long getId();
public String getParamName() {
@ -82,13 +91,6 @@ public abstract class BaseResourceIndexedSearchParam implements Serializable {
myParamName = theName;
}
/**
* Subclasses may override
*/
protected void clearHashes() {
// nothing
}
public ResourceTable getResource() {
return myResource;
}
@ -127,6 +129,10 @@ public abstract class BaseResourceIndexedSearchParam implements Serializable {
public abstract IQueryParameterType toQueryParameterType();
public static long calculateHashIdentity(String theResourceType, String theParamName) {
return hash(theResourceType, theParamName);
}
/**
* Applies a fast and consistent hashing algorithm to a set of strings
*/
@ -148,5 +154,4 @@ public abstract class BaseResourceIndexedSearchParam implements Serializable {
return hashCode.asLong();
}
}

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -20,27 +20,17 @@ package ca.uhn.fhir.jpa.entity;
* #L%
*/
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.ForeignKey;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Index;
import javax.persistence.JoinColumn;
import javax.persistence.OneToOne;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
import javax.persistence.UniqueConstraint;
import org.hibernate.annotations.ColumnDefault;
import javax.persistence.*;
//@formatter:off
@Entity()
@Table(name = "HFJ_FORCED_ID", uniqueConstraints = {
@UniqueConstraint(name = "IDX_FORCEDID_RESID", columnNames = {"RESOURCE_PID"}),
@UniqueConstraint(name = "IDX_FORCEDID_TYPE_RESID", columnNames = {"RESOURCE_TYPE", "RESOURCE_PID"})
}, indexes= {
@UniqueConstraint(name = "IDX_FORCEDID_TYPE_RESID", columnNames = {"RESOURCE_TYPE", "RESOURCE_PID"}),
@UniqueConstraint(name = "IDX_FORCEDID_TYPE_FID", columnNames = {"RESOURCE_TYPE", "FORCED_ID"})
}, indexes = {
@Index(name = "IDX_FORCEDID_TYPE_FORCEDID", columnList = "RESOURCE_TYPE,FORCED_ID"),
})
//@formatter:on
@ -57,11 +47,11 @@ public class ForcedId {
@Column(name = "PID")
private Long myId;
@JoinColumn(name = "RESOURCE_PID", nullable = false, updatable = false, foreignKey=@ForeignKey(name="FK_FORCEDID_RESOURCE"))
@JoinColumn(name = "RESOURCE_PID", nullable = false, updatable = false, foreignKey = @ForeignKey(name = "FK_FORCEDID_RESOURCE"))
@OneToOne()
private ResourceTable myResource;
@Column(name = "RESOURCE_PID", nullable = false, updatable = false, insertable=false)
@Column(name = "RESOURCE_PID", nullable = false, updatable = false, insertable = false)
private Long myResourcePid;
// This is updatable=true because it was added in 1.6 and needs to be set.. At some
@ -81,39 +71,39 @@ public class ForcedId {
return myForcedId;
}
public ResourceTable getResource() {
return myResource;
}
public Long getResourcePid() {
if (myResourcePid==null) {
return myResource.getId();
}
return myResourcePid;
}
public String getResourceType() {
return myResourceType;
}
public void setForcedId(String theForcedId) {
myForcedId = theForcedId;
}
public ResourceTable getResource() {
return myResource;
}
public void setResource(ResourceTable theResource) {
myResource = theResource;
}
public void setResourcePid(Long theResourcePid) {
myResourcePid = theResourcePid;
public Long getResourcePid() {
if (myResourcePid == null) {
return myResource.getId();
}
return myResourcePid;
}
public void setResourcePid(ResourceTable theResourcePid) {
myResource = theResourcePid;
}
public String getResourceType() {
return myResourceType;
}
public void setResourceType(String theResourceType) {
myResourceType = theResourceType;
}
public void setResourcePid(Long theResourcePid) {
myResourcePid = theResourcePid;
}
}

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -31,30 +31,32 @@ import javax.persistence.*;
@Embeddable
@Entity
@Table(name = "HFJ_SPIDX_COORDS", indexes = {
@Index(name = "IDX_SP_COORDS", columnList = "RES_TYPE,SP_NAME,SP_LATITUDE,SP_LONGITUDE"),
@Index(name = "IDX_SP_COORDS_UPDATED", columnList = "SP_UPDATED"),
@Index(name = "IDX_SP_COORDS_RESID", columnList = "RES_ID")
@Table(name = "HFJ_SPIDX_COORDS", indexes = {
@Index(name = "IDX_SP_COORDS", columnList = "RES_TYPE,SP_NAME,SP_LATITUDE,SP_LONGITUDE"),
@Index(name = "IDX_SP_COORDS_UPDATED", columnList = "SP_UPDATED"),
@Index(name = "IDX_SP_COORDS_RESID", columnList = "RES_ID")
})
public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchParam {
public static final int MAX_LENGTH = 100;
private static final long serialVersionUID = 1L;
@Column(name = "SP_LATITUDE")
@Field
public double myLatitude;
@Column(name = "SP_LONGITUDE")
@Field
public double myLongitude;
@Id
@SequenceGenerator(name = "SEQ_SPIDX_COORDS", sequenceName = "SEQ_SPIDX_COORDS")
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_COORDS")
@Column(name = "SP_ID")
private Long myId;
@Column(name = "SP_LATITUDE")
@Field
public double myLatitude;
@Column(name = "SP_LONGITUDE")
@Field
public double myLongitude;
/**
* @since 3.5.0 - At some point this should be made not-null
*/
@Column(name = "HASH_IDENTITY", nullable = true)
private Long myHashIdentity;
public ResourceIndexedSearchParamCoords() {
}
@ -65,6 +67,20 @@ public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchP
setLongitude(theLongitude);
}
@PrePersist
public void calculateHashes() {
if (myHashIdentity == null) {
String resourceType = getResourceType();
String paramName = getParamName();
setHashIdentity(calculateHashIdentity(resourceType, paramName));
}
}
@Override
protected void clearHashes() {
myHashIdentity = null;
}
@Override
public boolean equals(Object theObj) {
if (this == theObj) {
@ -82,27 +98,39 @@ public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchP
b.append(getResource(), obj.getResource());
b.append(getLatitude(), obj.getLatitude());
b.append(getLongitude(), obj.getLongitude());
b.append(getHashIdentity(), obj.getHashIdentity());
return b.isEquals();
}
public Long getHashIdentity() {
return myHashIdentity;
}
public void setHashIdentity(Long theHashIdentity) {
myHashIdentity = theHashIdentity;
}
@Override
protected Long getId() {
return myId;
}
@Override
public IQueryParameterType toQueryParameterType() {
return null;
}
public double getLatitude() {
return myLatitude;
}
public void setLatitude(double theLatitude) {
myLatitude = theLatitude;
}
public double getLongitude() {
return myLongitude;
}
public void setLongitude(double theLongitude) {
myLongitude = theLongitude;
}
@Override
public int hashCode() {
HashCodeBuilder b = new HashCodeBuilder();
@ -113,12 +141,9 @@ public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchP
return b.toHashCode();
}
public void setLatitude(double theLatitude) {
myLatitude = theLatitude;
}
public void setLongitude(double theLongitude) {
myLongitude = theLongitude;
@Override
public IQueryParameterType toQueryParameterType() {
return null;
}
@Override

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -37,17 +37,14 @@ import java.util.Date;
@Embeddable
@Entity
@Table(name = "HFJ_SPIDX_DATE", indexes = {
@Index(name = "IDX_SP_DATE", columnList = "RES_TYPE,SP_NAME,SP_VALUE_LOW,SP_VALUE_HIGH"),
// @Index(name = "IDX_SP_DATE", columnList = "RES_TYPE,SP_NAME,SP_VALUE_LOW,SP_VALUE_HIGH"),
@Index(name = "IDX_SP_DATE_HASH", columnList = "HASH_IDENTITY,SP_VALUE_LOW,SP_VALUE_HIGH"),
@Index(name = "IDX_SP_DATE_UPDATED", columnList = "SP_UPDATED"),
@Index(name = "IDX_SP_DATE_RESID", columnList = "RES_ID")
})
public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchParam {
private static final long serialVersionUID = 1L;
@Transient
private transient String myOriginalValue;
@Column(name = "SP_VALUE_HIGH", nullable = true)
@Temporal(TemporalType.TIMESTAMP)
@Field
@ -56,11 +53,18 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar
@Temporal(TemporalType.TIMESTAMP)
@Field
public Date myValueLow;
@Transient
private transient String myOriginalValue;
@Id
@SequenceGenerator(name = "SEQ_SPIDX_DATE", sequenceName = "SEQ_SPIDX_DATE")
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_DATE")
@Column(name = "SP_ID")
private Long myId;
/**
* @since 3.5.0 - At some point this should be made not-null
*/
@Column(name = "HASH_IDENTITY", nullable = true)
private Long myHashIdentity;
/**
* Constructor
@ -79,6 +83,20 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar
myOriginalValue = theOriginalValue;
}
@PrePersist
public void calculateHashes() {
if (myHashIdentity == null) {
String resourceType = getResourceType();
String paramName = getParamName();
setHashIdentity(calculateHashIdentity(resourceType, paramName));
}
}
@Override
protected void clearHashes() {
myHashIdentity = null;
}
@Override
public boolean equals(Object theObj) {
if (this == theObj) {
@ -97,9 +115,23 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar
b.append(getResource(), obj.getResource());
b.append(getTimeFromDate(getValueHigh()), getTimeFromDate(obj.getValueHigh()));
b.append(getTimeFromDate(getValueLow()), getTimeFromDate(obj.getValueLow()));
b.append(getHashIdentity(), obj.getHashIdentity());
return b.isEquals();
}
public Long getHashIdentity() {
return myHashIdentity;
}
public void setHashIdentity(Long theHashIdentity) {
myHashIdentity = theHashIdentity;
}
@Override
protected Long getId() {
return myId;
}
protected Long getTimeFromDate(Date date) {
if (date != null) {
return date.getTime();
@ -107,11 +139,6 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar
return null;
}
@Override
protected Long getId() {
return myId;
}
public Date getValueHigh() {
return myValueHigh;
}

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -34,15 +34,14 @@ import org.hibernate.search.annotations.NumericField;
import javax.persistence.*;
import java.math.BigDecimal;
//@formatter:off
@Embeddable
@Entity
@Table(name = "HFJ_SPIDX_NUMBER", indexes = {
@Index(name = "IDX_SP_NUMBER", columnList = "RES_TYPE,SP_NAME,SP_VALUE"),
// @Index(name = "IDX_SP_NUMBER", columnList = "RES_TYPE,SP_NAME,SP_VALUE"),
@Index(name = "IDX_SP_NUMBER_HASH_VAL", columnList = "HASH_IDENTITY,SP_VALUE"),
@Index(name = "IDX_SP_NUMBER_UPDATED", columnList = "SP_UPDATED"),
@Index(name = "IDX_SP_NUMBER_RESID", columnList = "RES_ID")
})
//@formatter:on
public class ResourceIndexedSearchParamNumber extends BaseResourceIndexedSearchParam {
private static final long serialVersionUID = 1L;
@ -56,6 +55,11 @@ public class ResourceIndexedSearchParamNumber extends BaseResourceIndexedSearchP
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_NUMBER")
@Column(name = "SP_ID")
private Long myId;
/**
* @since 3.5.0 - At some point this should be made not-null
*/
@Column(name = "HASH_IDENTITY", nullable = true)
private Long myHashIdentity;
public ResourceIndexedSearchParamNumber() {
}
@ -65,6 +69,20 @@ public class ResourceIndexedSearchParamNumber extends BaseResourceIndexedSearchP
setValue(theValue);
}
@PrePersist
public void calculateHashes() {
if (myHashIdentity == null) {
String resourceType = getResourceType();
String paramName = getParamName();
setHashIdentity(calculateHashIdentity(resourceType, paramName));
}
}
@Override
protected void clearHashes() {
myHashIdentity = null;
}
@Override
public boolean equals(Object theObj) {
if (this == theObj) {
@ -82,9 +100,18 @@ public class ResourceIndexedSearchParamNumber extends BaseResourceIndexedSearchP
b.append(getResource(), obj.getResource());
b.append(getValue(), obj.getValue());
b.append(isMissing(), obj.isMissing());
b.append(getHashIdentity(), obj.getHashIdentity());
return b.isEquals();
}
public Long getHashIdentity() {
return myHashIdentity;
}
public void setHashIdentity(Long theHashIdentity) {
myHashIdentity = theHashIdentity;
}
@Override
protected Long getId() {
return myId;

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -33,13 +33,14 @@ import org.hibernate.search.annotations.NumericField;
import javax.persistence.*;
import java.math.BigDecimal;
import java.math.RoundingMode;
//@formatter:off
@Embeddable
@Entity
@Table(name = "HFJ_SPIDX_QUANTITY", indexes = {
@Index(name = "IDX_SP_QUANTITY", columnList = "RES_TYPE,SP_NAME,SP_SYSTEM,SP_UNITS,SP_VALUE"),
// @Index(name = "IDX_SP_QUANTITY", columnList = "RES_TYPE,SP_NAME,SP_SYSTEM,SP_UNITS,SP_VALUE"),
@Index(name = "IDX_SP_QUANTITY_HASH", columnList = "HASH_IDENTITY,SP_VALUE"),
@Index(name = "IDX_SP_QUANTITY_HASH_UN", columnList = "HASH_IDENTITY_AND_UNITS,SP_VALUE"),
@Index(name = "IDX_SP_QUANTITY_UPDATED", columnList = "SP_UPDATED"),
@Index(name = "IDX_SP_QUANTITY_RESID", columnList = "RES_ID")
})
@ -66,20 +67,26 @@ public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearc
@Column(name = "SP_ID")
private Long myId;
/**
* @since 3.4.0 - At some point this should be made not-null
* @since 3.5.0 - At some point this should be made not-null
*/
@Column(name = "HASH_UNITS_AND_VALPREFIX", nullable = true)
private Long myHashUnitsAndValPrefix;
@Column(name = "HASH_IDENTITY_AND_UNITS", nullable = true)
private Long myHashIdentityAndUnits;
/**
* @since 3.4.0 - At some point this should be made not-null
* @since 3.5.0 - At some point this should be made not-null
*/
@Column(name = "HASH_VALPREFIX", nullable = true)
private Long myHashValPrefix;
@Column(name = "HASH_IDENTITY_SYS_UNITS", nullable = true)
private Long myHashIdentitySystemAndUnits;
/**
* @since 3.5.0 - At some point this should be made not-null
*/
@Column(name = "HASH_IDENTITY", nullable = true)
private Long myHashIdentity;
public ResourceIndexedSearchParamQuantity() {
// nothing
}
public ResourceIndexedSearchParamQuantity(String theParamName, BigDecimal theValue, String theSystem, String theUnits) {
setParamName(theParamName);
setSystem(theSystem);
@ -89,16 +96,21 @@ public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearc
@PrePersist
public void calculateHashes() {
if (myHashUnitsAndValPrefix == null) {
setHashUnitsAndValPrefix(hash(getResourceType(), getParamName(), getSystem(), getUnits(), toTruncatedString(getValue())));
setHashValPrefix(hash(getResourceType(), getParamName(), toTruncatedString(getValue())));
if (myHashIdentity == null) {
String resourceType = getResourceType();
String paramName = getParamName();
String units = getUnits();
String system = getSystem();
setHashIdentity(calculateHashIdentity(resourceType, paramName));
setHashIdentityAndUnits(calculateHashUnits(resourceType, paramName, units));
setHashIdentitySystemAndUnits(calculateHashSystemAndUnits(resourceType, paramName, system, units));
}
}
@Override
protected void clearHashes() {
myHashUnitsAndValPrefix = null;
myHashValPrefix = null;
myHashIdentity = null;
myHashIdentityAndUnits = null;
}
@Override
@ -119,27 +131,36 @@ public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearc
b.append(getSystem(), obj.getSystem());
b.append(getUnits(), obj.getUnits());
b.append(getValue(), obj.getValue());
b.append(getHashUnitsAndValPrefix(), obj.getHashUnitsAndValPrefix());
b.append(getHashValPrefix(), obj.getHashValPrefix());
b.append(getHashIdentity(), obj.getHashIdentity());
b.append(getHashIdentitySystemAndUnits(), obj.getHashIdentitySystemAndUnits());
b.append(getHashIdentityAndUnits(), obj.getHashIdentityAndUnits());
return b.isEquals();
}
public Long getHashUnitsAndValPrefix() {
public Long getHashIdentity() {
calculateHashes();
return myHashUnitsAndValPrefix;
return myHashIdentity;
}
public void setHashUnitsAndValPrefix(Long theHashUnitsAndValPrefix) {
myHashUnitsAndValPrefix = theHashUnitsAndValPrefix;
public void setHashIdentity(Long theHashIdentity) {
myHashIdentity = theHashIdentity;
}
public Long getHashValPrefix() {
public Long getHashIdentityAndUnits() {
calculateHashes();
return myHashValPrefix;
return myHashIdentityAndUnits;
}
public void setHashValPrefix(Long theHashValPrefix) {
myHashValPrefix = theHashValPrefix;
public void setHashIdentityAndUnits(Long theHashIdentityAndUnits) {
myHashIdentityAndUnits = theHashIdentityAndUnits;
}
private Long getHashIdentitySystemAndUnits() {
return myHashIdentitySystemAndUnits;
}
public void setHashIdentitySystemAndUnits(Long theHashIdentitySystemAndUnits) {
myHashIdentitySystemAndUnits = theHashIdentitySystemAndUnits;
}
@Override
@ -176,14 +197,13 @@ public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearc
@Override
public int hashCode() {
calculateHashes();
HashCodeBuilder b = new HashCodeBuilder();
b.append(getResourceType());
b.append(getParamName());
b.append(getResource());
b.append(getSystem());
b.append(getUnits());
b.append(getValue());
b.append(getHashUnitsAndValPrefix());
b.append(getHashValPrefix());
return b.toHashCode();
}
@ -201,14 +221,16 @@ public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearc
b.append("units", getUnits());
b.append("value", getValue());
b.append("missing", isMissing());
b.append("hashIdentitySystemAndUnits", myHashIdentitySystemAndUnits);
return b.build();
}
private static String toTruncatedString(BigDecimal theValue) {
if (theValue == null) {
return null;
}
return theValue.setScale(0, RoundingMode.FLOOR).toPlainString();
public static long calculateHashSystemAndUnits(String theResourceType, String theParamName, String theSystem, String theUnits) {
return hash(theResourceType, theParamName, theSystem, theUnits);
}
public static long calculateHashUnits(String theResourceType, String theParamName, String theUnits) {
return hash(theResourceType, theParamName, theUnits);
}
}

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.entity;
* #L%
*/
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.rest.param.StringParam;
import org.apache.commons.lang3.StringUtils;
@ -38,7 +39,14 @@ import static org.apache.commons.lang3.StringUtils.left;
@Embeddable
@Entity
@Table(name = "HFJ_SPIDX_STRING", indexes = {
@Index(name = "IDX_SP_STRING", columnList = "RES_TYPE,SP_NAME,SP_VALUE_NORMALIZED"),
/*
* Note: We previously had indexes with the following names,
* do not reuse these names:
* IDX_SP_STRING
*/
@Index(name = "IDX_SP_STRING_HASH_NRM", columnList = "HASH_NORM_PREFIX,SP_VALUE_NORMALIZED"),
@Index(name = "IDX_SP_STRING_HASH_EXCT", columnList = "HASH_EXACT"),
@Index(name = "IDX_SP_STRING_UPDATED", columnList = "SP_UPDATED"),
@Index(name = "IDX_SP_STRING_RESID", columnList = "RES_ID")
})
@ -127,13 +135,16 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
*/
@Column(name = "HASH_EXACT", nullable = true)
private Long myHashExact;
@Transient
private transient DaoConfig myDaoConfig;
public ResourceIndexedSearchParamString() {
super();
}
public ResourceIndexedSearchParamString(String theName, String theValueNormalized, String theValueExact) {
public ResourceIndexedSearchParamString(DaoConfig theDaoConfig, String theName, String theValueNormalized, String theValueExact) {
setDaoConfig(theDaoConfig);
setParamName(theName);
setValueNormalized(theValueNormalized);
setValueExact(theValueExact);
@ -141,9 +152,13 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
@PrePersist
public void calculateHashes() {
if (myHashNormalizedPrefix == null) {
setHashNormalizedPrefix(hash(getResourceType(), getParamName(), left(getValueNormalized(), HASH_PREFIX_LENGTH)));
setHashExact(hash(getResourceType(), getParamName(), getValueExact()));
if (myHashNormalizedPrefix == null && myDaoConfig != null) {
String resourceType = getResourceType();
String paramName = getParamName();
String valueNormalized = getValueNormalized();
String valueExact = getValueExact();
setHashNormalizedPrefix(calculateHashNormalized(myDaoConfig, resourceType, paramName, valueNormalized));
setHashExact(calculateHashExact(resourceType, paramName, valueExact));
}
}
@ -169,8 +184,8 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
b.append(getParamName(), obj.getParamName());
b.append(getResource(), obj.getResource());
b.append(getValueExact(), obj.getValueExact());
b.append(getHashNormalizedPrefix(), obj.getHashNormalizedPrefix());
b.append(getHashExact(), obj.getHashExact());
b.append(getHashNormalizedPrefix(), obj.getHashNormalizedPrefix());
return b.isEquals();
}
@ -225,11 +240,14 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
b.append(getParamName());
b.append(getResource());
b.append(getValueExact());
b.append(getHashNormalizedPrefix());
b.append(getHashExact());
return b.toHashCode();
}
public BaseResourceIndexedSearchParam setDaoConfig(DaoConfig theDaoConfig) {
myDaoConfig = theDaoConfig;
return this;
}
@Override
public IQueryParameterType toQueryParameterType() {
return new StringParam(getValueExact());
@ -244,4 +262,23 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
return b.build();
}
public static long calculateHashExact(String theResourceType, String theParamName, String theValueExact) {
return hash(theResourceType, theParamName, theValueExact);
}
public static long calculateHashNormalized(DaoConfig theDaoConfig, String theResourceType, String theParamName, String theValueNormalized) {
/*
* If we're not allowing contained searches, we'll add the first
* bit of the normalized value to the hash. This helps to
* make the hash even more unique, which will be good for
* performance.
*/
int hashPrefixLength = HASH_PREFIX_LENGTH;
if (theDaoConfig.isAllowContainsSearches()) {
hashPrefixLength = 0;
}
return hash(theResourceType, theParamName, left(theValueNormalized, hashPrefixLength));
}
}

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -31,11 +31,23 @@ import org.hibernate.search.annotations.Field;
import javax.persistence.*;
import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.apache.commons.lang3.StringUtils.trim;
@Embeddable
@Entity
@Table(name = "HFJ_SPIDX_TOKEN", indexes = {
@Index(name = "IDX_SP_TOKEN", columnList = "RES_TYPE,SP_NAME,SP_SYSTEM,SP_VALUE"),
@Index(name = "IDX_SP_TOKEN_UNQUAL", columnList = "RES_TYPE,SP_NAME,SP_VALUE"),
/*
* Note: We previously had indexes with the following names,
* do not reuse these names:
* IDX_SP_TOKEN
* IDX_SP_TOKEN_UNQUAL
*/
@Index(name = "IDX_SP_TOKEN_HASH", columnList = "HASH_IDENTITY"),
@Index(name = "IDX_SP_TOKEN_HASH_S", columnList = "HASH_SYS"),
@Index(name = "IDX_SP_TOKEN_HASH_SV", columnList = "HASH_SYS_AND_VALUE"),
@Index(name = "IDX_SP_TOKEN_HASH_V", columnList = "HASH_VALUE"),
@Index(name = "IDX_SP_TOKEN_UPDATED", columnList = "SP_UPDATED"),
@Index(name = "IDX_SP_TOKEN_RESID", columnList = "RES_ID")
})
@ -56,6 +68,11 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_TOKEN")
@Column(name = "SP_ID")
private Long myId;
/**
* @since 3.4.0 - At some point this should be made not-null
*/
@Column(name = "HASH_IDENTITY", nullable = true)
private Long myHashIdentity;
/**
* @since 3.4.0 - At some point this should be made not-null
*/
@ -90,17 +107,20 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
setValue(theValue);
}
@PrePersist
public void calculateHashes() {
if (myHashSystem == null) {
setHashSystem(hash(getResourceType(), getParamName(), getSystem()));
setHashSystemAndValue(hash(getResourceType(), getParamName(), getSystem(), getValue()));
setHashValue(hash(getResourceType(), getParamName(), getValue()));
String resourceType = getResourceType();
String paramName = getParamName();
String system = getSystem();
String value = getValue();
setHashIdentity(calculateHashIdentity(resourceType, paramName));
setHashSystem(calculateHashSystem(resourceType, paramName, system));
setHashSystemAndValue(calculateHashSystemAndValue(resourceType, paramName, system, value));
setHashValue(calculateHashValue(resourceType, paramName, value));
}
}
@Override
protected void clearHashes() {
myHashSystem = null;
@ -125,6 +145,7 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
b.append(getResource(), obj.getResource());
b.append(getSystem(), obj.getSystem());
b.append(getValue(), obj.getValue());
b.append(getHashIdentity(), obj.getHashIdentity());
b.append(getHashSystem(), obj.getHashSystem());
b.append(getHashSystemAndValue(), obj.getHashSystemAndValue());
b.append(getHashValue(), obj.getHashValue());
@ -136,6 +157,15 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
return myHashSystem;
}
public Long getHashIdentity() {
calculateHashes();
return myHashIdentity;
}
public void setHashIdentity(Long theHashIdentity) {
myHashIdentity = theHashIdentity;
}
public void setHashSystem(Long theHashSystem) {
myHashSystem = theHashSystem;
}
@ -184,18 +214,15 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
@Override
public int hashCode() {
calculateHashes();
HashCodeBuilder b = new HashCodeBuilder();
b.append(getParamName());
b.append(getResource());
b.append(getSystem());
b.append(getValue());
b.append(getHashSystem());
b.append(getHashSystemAndValue());
b.append(getHashValue());
return b.toHashCode();
}
@Override
public IQueryParameterType toQueryParameterType() {
return new TokenParam(getSystem(), getValue());
@ -210,4 +237,16 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
b.append("value", getValue());
return b.build();
}
public static long calculateHashSystem(String theResourceType, String theParamName, String theSystem) {
return hash(theResourceType, theParamName, trim(theSystem));
}
public static long calculateHashSystemAndValue(String theResourceType, String theParamName, String theSystem, String theValue) {
return hash(theResourceType, theParamName, defaultString(trim(theSystem)), trim(theValue));
}
public static long calculateHashValue(String theResourceType, String theParamName, String theValue) {
return hash(theResourceType, theParamName, trim(theValue));
}
}

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -34,6 +34,8 @@ import javax.persistence.*;
@Entity
@Table(name = "HFJ_SPIDX_URI", indexes = {
@Index(name = "IDX_SP_URI", columnList = "RES_TYPE,SP_NAME,SP_URI"),
@Index(name = "IDX_SP_URI_HASH_IDENTITY", columnList = "HASH_IDENTITY,SP_URI"),
@Index(name = "IDX_SP_URI_HASH_URI", columnList = "HASH_URI"),
@Index(name = "IDX_SP_URI_RESTYPE_NAME", columnList = "RES_TYPE,SP_NAME"),
@Index(name = "IDX_SP_URI_UPDATED", columnList = "SP_UPDATED"),
@Index(name = "IDX_SP_URI_COORDS", columnList = "RES_ID")
@ -59,11 +61,17 @@ public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchPara
*/
@Column(name = "HASH_URI", nullable = true)
private Long myHashUri;
/**
* @since 3.5.0 - At some point this should be made not-null
*/
@Column(name = "HASH_IDENTITY", nullable = true)
private Long myHashIdentity;
/**
* Constructor
*/
public ResourceIndexedSearchParamUri() {
super();
}
/**
@ -77,7 +85,11 @@ public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchPara
@PrePersist
public void calculateHashes() {
if (myHashUri == null) {
setHashUri(hash(getResourceType(), getParamName(), getUri()));
String resourceType = getResourceType();
String paramName = getParamName();
String uri = getUri();
setHashIdentity(calculateHashIdentity(resourceType, paramName));
setHashUri(calculateHashUri(resourceType, paramName, uri));
}
}
@ -103,9 +115,18 @@ public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchPara
b.append(getResource(), obj.getResource());
b.append(getUri(), obj.getUri());
b.append(getHashUri(), obj.getHashUri());
b.append(getHashIdentity(), obj.getHashIdentity());
return b.isEquals();
}
private Long getHashIdentity() {
return myHashIdentity;
}
private void setHashIdentity(long theHashIdentity) {
myHashIdentity = theHashIdentity;
}
public Long getHashUri() {
calculateHashes();
return myHashUri;
@ -153,4 +174,8 @@ public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchPara
return b.toString();
}
public static long calculateHashUri(String theResourceType, String theParamName, String theUri) {
return hash(theResourceType, theParamName, theUri);
}
}

View File

@ -66,14 +66,12 @@ public class ResourceLink implements Serializable {
@ManyToOne(optional = false, fetch=FetchType.LAZY)
@JoinColumn(name = "SRC_RESOURCE_ID", referencedColumnName = "RES_ID", nullable = false, foreignKey=@ForeignKey(name="FK_RESLINK_SOURCE"))
// @ContainedIn()
private ResourceTable mySourceResource;
@Column(name = "SRC_RESOURCE_ID", insertable = false, updatable = false, nullable = false)
private Long mySourceResourcePid;
@Column(name = "SOURCE_RESOURCE_TYPE", nullable=false, length=ResourceTable.RESTYPE_LEN)
@ColumnDefault("''") // TODO: remove this (it's only here for simplifying upgrades of 1.3 -> 1.4)
@Field()
private String mySourceResourceType;
@ -86,7 +84,6 @@ public class ResourceLink implements Serializable {
private Long myTargetResourcePid;
@Column(name = "TARGET_RESOURCE_TYPE", nullable=false, length=ResourceTable.RESTYPE_LEN)
@ColumnDefault("''") // TODO: remove this (it's only here for simplifying upgrades of 1.3 -> 1.4)
@Field()
private String myTargetResourceType;

View File

@ -1,59 +0,0 @@
package ca.uhn.fhir.jpa.entity;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import javax.persistence.*;
@Entity
@Table(name = "HFJ_SEARCH_PARM", uniqueConstraints= {
@UniqueConstraint(name="IDX_SEARCHPARM_RESTYPE_SPNAME", columnNames= {"RES_TYPE", "PARAM_NAME"})
})
public class SearchParam {
@Id
@SequenceGenerator(name = "SEQ_SEARCHPARM_ID", sequenceName = "SEQ_SEARCHPARM_ID")
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SEARCHPARM_ID")
@Column(name = "PID")
private Long myId;
@Column(name="PARAM_NAME", length=BaseResourceIndexedSearchParam.MAX_SP_NAME, nullable=false, updatable=false)
private String myParamName;
@Column(name="RES_TYPE", length=ResourceTable.RESTYPE_LEN, nullable=false, updatable=false)
private String myResourceName;
public String getParamName() {
return myParamName;
}
public void setParamName(String theParamName) {
myParamName = theParamName;
}
public void setResourceName(String theResourceName) {
myResourceName = theResourceName;
}
public Long getId() {
return myId;
}
}

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -20,18 +20,16 @@ package ca.uhn.fhir.jpa.entity;
* #L%
*/
import java.io.Serializable;
import javax.persistence.*;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import javax.persistence.*;
import java.io.Serializable;
@Entity
@Table(name = "HFJ_RES_PARAM_PRESENT", indexes = {
@Index(name = "IDX_RESPARMPRESENT_RESID", columnList = "RES_ID")
}, uniqueConstraints = {
@UniqueConstraint(name = "IDX_RESPARMPRESENT_SPID_RESID", columnNames = { "SP_ID", "RES_ID" })
@Index(name = "IDX_RESPARMPRESENT_RESID", columnList = "RES_ID"),
@Index(name = "IDX_RESPARMPRESENT_HASHPRES", columnList = "HASH_PRESENCE")
})
public class SearchParamPresent implements Serializable {
@ -42,17 +40,15 @@ public class SearchParamPresent implements Serializable {
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_RESPARMPRESENT_ID")
@Column(name = "PID")
private Long myId;
@Column(name = "SP_PRESENT", nullable = false)
private boolean myPresent;
@ManyToOne()
@JoinColumn(name = "RES_ID", referencedColumnName = "RES_ID", nullable = false, foreignKey = @ForeignKey(name = "FK_RESPARMPRES_RESID"))
private ResourceTable myResource;
@ManyToOne()
@JoinColumn(name = "SP_ID", referencedColumnName = "PID", nullable = false, foreignKey = @ForeignKey(name = "FK_RESPARMPRES_SPID"))
private SearchParam mySearchParam;
@Transient
private transient String myParamName;
@Column(name = "HASH_PRESENCE")
private Long myHashPresence;
/**
* Constructor
@ -60,13 +56,40 @@ public class SearchParamPresent implements Serializable {
public SearchParamPresent() {
super();
}
@SuppressWarnings("unused")
@PrePersist
public void calculateHashes() {
if (myHashPresence == null) {
String resourceType = getResource().getResourceType();
String paramName = getParamName();
boolean present = myPresent;
setHashPresence(calculateHashPresence(resourceType, paramName, present));
}
}
public Long getHashPresence() {
return myHashPresence;
}
public void setHashPresence(Long theHashPresence) {
myHashPresence = theHashPresence;
}
public String getParamName() {
return myParamName;
}
public void setParamName(String theParamName) {
myParamName = theParamName;
}
public ResourceTable getResource() {
return myResource;
}
public SearchParam getSearchParam() {
return mySearchParam;
public void setResource(ResourceTable theResourceTable) {
myResource = theResourceTable;
}
public boolean isPresent() {
@ -77,22 +100,18 @@ public class SearchParamPresent implements Serializable {
myPresent = thePresent;
}
public void setResource(ResourceTable theResourceTable) {
myResource = theResourceTable;
}
public void setSearchParam(SearchParam theSearchParam) {
mySearchParam = theSearchParam;
}
@Override
public String toString() {
ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE);
b.append("res_pid", myResource.getIdDt().toUnqualifiedVersionless().getValue());
b.append("param", mySearchParam.getParamName());
b.append("resPid", myResource.getIdDt().toUnqualifiedVersionless().getValue());
b.append("paramName", myParamName);
b.append("present", myPresent);
return b.build();
}
public static long calculateHashPresence(String theResourceType, String theParamName, boolean thePresent) {
return BaseResourceIndexedSearchParam.hash(theResourceType, theParamName, Boolean.toString(thePresent));
}
}

View File

@ -77,12 +77,12 @@ public class TermConcept implements Serializable {
})
private String myDisplay;
@OneToMany(mappedBy = "myConcept", orphanRemoval = true)
@OneToMany(mappedBy = "myConcept", orphanRemoval = false)
@Field(name = "PROPmyProperties", analyzer = @Analyzer(definition = "termConceptPropertyAnalyzer"))
@FieldBridge(impl = TermConceptPropertyFieldBridge.class)
private Collection<TermConceptProperty> myProperties;
@OneToMany(mappedBy = "myConcept", orphanRemoval = true)
@OneToMany(mappedBy = "myConcept", orphanRemoval = false)
private Collection<TermConceptDesignation> myDesignations;
@Id()
@ -130,6 +130,7 @@ public class TermConcept implements Serializable {
public TermConceptDesignation addDesignation() {
TermConceptDesignation designation = new TermConceptDesignation();
designation.setConcept(this);
designation.setCodeSystemVersion(myCodeSystem);
getDesignations().add(designation);
return designation;
}
@ -139,6 +140,7 @@ public class TermConcept implements Serializable {
TermConceptProperty property = new TermConceptProperty();
property.setConcept(this);
property.setCodeSystemVersion(myCodeSystem);
property.setType(thePropertyType);
property.setKey(thePropertyName);
property.setValue(thePropertyValue);

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -48,6 +48,14 @@ public class TermConceptDesignation implements Serializable {
private String myUseDisplay;
@Column(name = "VAL", length = 500, nullable = false)
private String myValue;
/**
* TODO: Make this non-null
*
* @since 3.5.0
*/
@ManyToOne
@JoinColumn(name = "CS_VER_PID", nullable = true, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTDESIG_CSV"))
private TermCodeSystemVersion myCodeSystemVersion;
public String getLanguage() {
return myLanguage;
@ -94,6 +102,11 @@ public class TermConceptDesignation implements Serializable {
return this;
}
public TermConceptDesignation setCodeSystemVersion(TermCodeSystemVersion theCodeSystemVersion) {
myCodeSystemVersion = theCodeSystemVersion;
return this;
}
public TermConceptDesignation setConcept(TermConcept theConcept) {
myConcept = theConcept;
return this;

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -38,6 +38,14 @@ public class TermConceptProperty implements Serializable {
@ManyToOne
@JoinColumn(name = "CONCEPT_PID", referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTPROP_CONCEPT"))
private TermConcept myConcept;
/**
* TODO: Make this non-null
*
* @since 3.5.0
*/
@ManyToOne
@JoinColumn(name = "CS_VER_PID", nullable = true, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTPROP_CSV"))
private TermCodeSystemVersion myCodeSystemVersion;
@Id()
@SequenceGenerator(name = "SEQ_CONCEPT_PROP_PID", sequenceName = "SEQ_CONCEPT_PROP_PID")
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_CONCEPT_PROP_PID")
@ -124,6 +132,11 @@ public class TermConceptProperty implements Serializable {
myValue = theValue;
}
public TermConceptProperty setCodeSystemVersion(TermCodeSystemVersion theCodeSystemVersion) {
myCodeSystemVersion = theCodeSystemVersion;
return this;
}
public void setConcept(TermConcept theConcept) {
myConcept = theConcept;
}

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.sp;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -20,14 +20,12 @@ package ca.uhn.fhir.jpa.sp;
* #L%
*/
import java.util.Map;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import java.util.Map;
public interface ISearchParamPresenceSvc {
void updatePresence(ResourceTable theResource, Map<String, Boolean> theParamNameToPresence);
void flushCachesForUnitTest();
}

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.sp;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -20,29 +20,17 @@ package ca.uhn.fhir.jpa.sp;
* #L%
*/
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import org.apache.commons.lang3.tuple.Pair;
import org.springframework.beans.factory.annotation.Autowired;
import ca.uhn.fhir.jpa.dao.data.ISearchParamDao;
import ca.uhn.fhir.jpa.dao.data.ISearchParamPresentDao;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.jpa.entity.SearchParam;
import ca.uhn.fhir.jpa.entity.SearchParamPresent;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.*;
import java.util.Map.Entry;
public class SearchParamPresenceSvcImpl implements ISearchParamPresenceSvc {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SearchParamPresenceSvcImpl.class);
private Map<Pair<String, String>, SearchParam> myResourceTypeToSearchParamToEntity = new ConcurrentHashMap<Pair<String, String>, SearchParam>();
@Autowired
private ISearchParamDao mySearchParamDao;
@Autowired
private ISearchParamPresentDao mySearchParamPresentDao;
@ -55,62 +43,48 @@ public class SearchParamPresenceSvcImpl implements ISearchParamPresenceSvc {
return;
}
Map<String, Boolean> presenceMap = new HashMap<String, Boolean>(theParamNameToPresence);
List<SearchParamPresent> entitiesToSave = new ArrayList<SearchParamPresent>();
List<SearchParamPresent> entitiesToDelete = new ArrayList<SearchParamPresent>();
Map<String, Boolean> presenceMap = new HashMap<>(theParamNameToPresence);
// Find existing entries
Collection<SearchParamPresent> existing;
existing = mySearchParamPresentDao.findAllForResource(theResource);
Map<Long, SearchParamPresent> existingHashToPresence = new HashMap<>();
for (SearchParamPresent nextExistingEntity : existing) {
String nextSearchParamName = nextExistingEntity.getSearchParam().getParamName();
Boolean existingValue = presenceMap.remove(nextSearchParamName);
if (existingValue == null) {
entitiesToDelete.add(nextExistingEntity);
} else if (existingValue.booleanValue() == nextExistingEntity.isPresent()) {
ourLog.trace("No change for search param {}", nextSearchParamName);
} else {
nextExistingEntity.setPresent(existingValue);
entitiesToSave.add(nextExistingEntity);
}
existingHashToPresence.put(nextExistingEntity.getHashPresence(), nextExistingEntity);
}
// Find newly wanted set of entries
Map<Long, SearchParamPresent> newHashToPresence = new HashMap<>();
for (Entry<String, Boolean> next : presenceMap.entrySet()) {
String resourceType = theResource.getResourceType();
String paramName = next.getKey();
Pair<String, String> key = Pair.of(resourceType, paramName);
SearchParam searchParam = myResourceTypeToSearchParamToEntity.get(key);
if (searchParam == null) {
searchParam = mySearchParamDao.findForResource(resourceType, paramName);
if (searchParam != null) {
myResourceTypeToSearchParamToEntity.put(key, searchParam);
} else {
searchParam = new SearchParam();
searchParam.setResourceName(resourceType);
searchParam.setParamName(paramName);
searchParam = mySearchParamDao.save(searchParam);
ourLog.info("Added search param {} with pid {}", paramName, searchParam.getId());
// Don't add the newly saved entity to the map in case the save fails
}
}
SearchParamPresent present = new SearchParamPresent();
present.setResource(theResource);
present.setSearchParam(searchParam);
present.setParamName(paramName);
present.setPresent(next.getValue());
entitiesToSave.add(present);
present.calculateHashes();
newHashToPresence.put(present.getHashPresence(), present);
}
mySearchParamPresentDao.deleteInBatch(entitiesToDelete);
mySearchParamPresentDao.saveAll(entitiesToSave);
// Delete any that should be deleted
List<SearchParamPresent> toDelete = new ArrayList<>();
for (Entry<Long, SearchParamPresent> nextEntry : existingHashToPresence.entrySet()) {
if (newHashToPresence.containsKey(nextEntry.getKey()) == false) {
toDelete.add(nextEntry.getValue());
}
}
mySearchParamPresentDao.deleteInBatch(toDelete);
}
// Add any that should be added
List<SearchParamPresent> toAdd = new ArrayList<>();
for (Entry<Long, SearchParamPresent> nextEntry : newHashToPresence.entrySet()) {
if (existingHashToPresence.containsKey(nextEntry.getKey()) == false) {
toAdd.add(nextEntry.getValue());
}
}
mySearchParamPresentDao.saveAll(toAdd);
@Override
public void flushCachesForUnitTest() {
myResourceTypeToSearchParamToEntity.clear();
}
}

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.term;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -65,6 +65,7 @@ import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Slice;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionStatus;
@ -161,14 +162,6 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
}
}
private void removeCodeFromExpansion(String theCodeSystem, String theCode, ValueSet.ValueSetExpansionComponent theExpansionComponent) {
theExpansionComponent
.getContains()
.removeIf(t ->
theCodeSystem.equals(t.getSystem()) &&
theCode.equals(t.getCode()));
}
private void addConceptsToList(ValueSet.ValueSetExpansionComponent theExpansionComponent, Set<String> theAddedCodes, String theSystem, List<CodeSystem.ConceptDefinitionComponent> theConcept, boolean theAdd) {
for (CodeSystem.ConceptDefinitionComponent next : theConcept) {
if (theAdd && theAddedCodes.add(next.getCode())) {
@ -272,21 +265,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
int i = 0;
for (TermCodeSystemVersion next : myCodeSystemVersionDao.findByCodeSystemResource(theCodeSystem.getPid())) {
myConceptParentChildLinkDao.deleteByCodeSystemVersion(next.getPid());
for (TermConcept nextConcept : myConceptDao.findByCodeSystemVersion(next.getPid())) {
myConceptPropertyDao.deleteAll(nextConcept.getProperties());
myConceptDesignationDao.deleteAll(nextConcept.getDesignations());
myConceptDao.delete(nextConcept);
}
if (next.getCodeSystem().getCurrentVersion() == next) {
next.getCodeSystem().setCurrentVersion(null);
myCodeSystemDao.save(next.getCodeSystem());
}
myCodeSystemVersionDao.delete(next);
if (i++ % 1000 == 0) {
myEntityManager.flush();
}
deleteCodeSystemVersion(next.getPid());
}
myCodeSystemVersionDao.deleteForCodeSystem(theCodeSystem);
myCodeSystemDao.delete(theCodeSystem);
@ -294,6 +273,130 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
myEntityManager.flush();
}
public void deleteCodeSystemVersion(Long theCodeSystemVersionPid) {
ourLog.info(" * Deleting code system version {}", theCodeSystemVersionPid);
PageRequest page = PageRequest.of(0, 1000);
int count;
// Parent/Child links
ourLog.info(" * Deleting parent/child links");
count = 0;
while (true) {
Slice<TermConceptParentChildLink> link = myConceptParentChildLinkDao.findByCodeSystemVersion(page, theCodeSystemVersionPid);
if (link.hasContent() == false) {
break;
}
myConceptParentChildLinkDao.deleteInBatch(link);
count += link.getNumberOfElements();
ourLog.info(" * {} parent/child links deleted", count);
}
myConceptParentChildLinkDao.flush();
// Properties
ourLog.info(" * Deleting properties");
count = 0;
while (true) {
Slice<TermConceptProperty> link = myConceptPropertyDao.findByCodeSystemVersion(page, theCodeSystemVersionPid);
if (link.hasContent() == false) {
break;
}
myConceptPropertyDao.deleteInBatch(link);
count += link.getNumberOfElements();
ourLog.info(" * {} concept properties deleted", count);
}
myConceptPropertyDao.flush();
// Properties
ourLog.info(" * Deleting designations");
count = 0;
while (true) {
Slice<TermConceptDesignation> link = myConceptDesignationDao.findByCodeSystemVersion(page, theCodeSystemVersionPid);
if (link.hasContent() == false) {
break;
}
myConceptDesignationDao.deleteInBatch(link);
count += link.getNumberOfElements();
ourLog.info(" * {} concept designations deleted", count);
}
myConceptDesignationDao.flush();
// Concepts
ourLog.info(" * Deleting concepts");
count = 0;
while (true) {
Slice<TermConcept> link = myConceptDao.findByCodeSystemVersion(page, theCodeSystemVersionPid);
if (link.hasContent() == false) {
break;
}
myConceptDao.deleteInBatch(link);
myConceptDao.flush();
count += link.getNumberOfElements();
ourLog.info(" * {} concepts deleted", count);
}
Optional<TermCodeSystem> codeSystemOpt = myCodeSystemDao.findWithCodeSystemVersionAsCurrentVersion(theCodeSystemVersionPid);
if (codeSystemOpt.isPresent()) {
TermCodeSystem codeSystem = codeSystemOpt.get();
ourLog.info(" * Removing code system version {} as current version of code system {}", theCodeSystemVersionPid, codeSystem.getPid());
codeSystem.setCurrentVersion(null);
myCodeSystemDao.save(codeSystem);
}
ourLog.info(" * Deleting code system version");
myCodeSystemVersionDao.deleteById(theCodeSystemVersionPid);
}
public void deleteConceptMap(ResourceTable theResourceTable) {
// Get existing entity so it can be deleted.
Optional<TermConceptMap> optionalExistingTermConceptMapById = myConceptMapDao.findTermConceptMapByResourcePid(theResourceTable.getId());
if (optionalExistingTermConceptMapById.isPresent()) {
TermConceptMap existingTermConceptMap = optionalExistingTermConceptMapById.get();
ourLog.info("Deleting existing TermConceptMap {} and its children...", existingTermConceptMap.getId());
for (TermConceptMapGroup group : existingTermConceptMap.getConceptMapGroups()) {
for (TermConceptMapGroupElement element : group.getConceptMapGroupElements()) {
for (TermConceptMapGroupElementTarget target : element.getConceptMapGroupElementTargets()) {
myConceptMapGroupElementTargetDao.deleteTermConceptMapGroupElementTargetById(target.getId());
}
myConceptMapGroupElementDao.deleteTermConceptMapGroupElementById(element.getId());
}
myConceptMapGroupDao.deleteTermConceptMapGroupById(group.getId());
}
myConceptMapDao.deleteTermConceptMapById(existingTermConceptMap.getId());
ourLog.info("Done deleting existing TermConceptMap {} and its children.", existingTermConceptMap.getId());
ourLog.info("Flushing...");
myConceptMapGroupElementTargetDao.flush();
myConceptMapGroupElementDao.flush();
myConceptMapGroupDao.flush();
myConceptMapDao.flush();
ourLog.info("Done flushing.");
}
}
@Override
@Transactional
public void deleteConceptMapAndChildren(ResourceTable theResourceTable) {
deleteConceptMap(theResourceTable);
}
private int ensureParentsSaved(Collection<TermConceptParentChildLink> theParents) {
ourLog.trace("Checking {} parents", theParents.size());
int retVal = 0;
@ -336,6 +439,19 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
return valueSet;
}
protected List<VersionIndependentConcept> expandValueSetAndReturnVersionIndependentConcepts(org.hl7.fhir.r4.model.ValueSet theValueSetToExpandR4) {
org.hl7.fhir.r4.model.ValueSet.ValueSetExpansionComponent expandedR4 = expandValueSet(theValueSetToExpandR4).getExpansion();
ArrayList<VersionIndependentConcept> retVal = new ArrayList<>();
for (org.hl7.fhir.r4.model.ValueSet.ValueSetExpansionContainsComponent nextContains : expandedR4.getContains()) {
retVal.add(
new VersionIndependentConcept()
.setSystem(nextContains.getSystem())
.setCode(nextContains.getCode()));
}
return retVal;
}
public void expandValueSetHandleIncludeOrExclude(ValueSet.ValueSetExpansionComponent theExpansionComponent, Set<String> theAddedCodes, ValueSet.ConceptSetComponent include, boolean theAdd) {
String system = include.getSystem();
if (isNotBlank(system)) {
@ -492,19 +608,6 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
}
}
protected List<VersionIndependentConcept> expandValueSetAndReturnVersionIndependentConcepts(org.hl7.fhir.r4.model.ValueSet theValueSetToExpandR4) {
org.hl7.fhir.r4.model.ValueSet.ValueSetExpansionComponent expandedR4 = expandValueSet(theValueSetToExpandR4).getExpansion();
ArrayList<VersionIndependentConcept> retVal = new ArrayList<>();
for (org.hl7.fhir.r4.model.ValueSet.ValueSetExpansionContainsComponent nextContains : expandedR4.getContains()) {
retVal.add(
new VersionIndependentConcept()
.setSystem(nextContains.getSystem())
.setCode(nextContains.getCode()));
}
return retVal;
}
private void fetchChildren(TermConcept theConcept, Set<TermConcept> theSetToPopulate) {
for (TermConceptParentChildLink nextChildLink : theConcept.getChildren()) {
TermConcept nextChild = nextChildLink.getChild();
@ -807,6 +910,14 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
}
private void removeCodeFromExpansion(String theCodeSystem, String theCode, ValueSet.ValueSetExpansionComponent theExpansionComponent) {
theExpansionComponent
.getContains()
.removeIf(t ->
theCodeSystem.equals(t.getSystem()) &&
theCode.equals(t.getCode()));
}
private int saveConcept(TermConcept theConcept) {
int retVal = 0;
@ -854,7 +965,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
TransactionTemplate tt = new TransactionTemplate(myTransactionMgr);
tt.setPropagationBehavior(TransactionTemplate.PROPAGATION_REQUIRES_NEW);
if(!myDeferredConcepts.isEmpty() || !myConceptLinksToSaveLater.isEmpty()) {
if (!myDeferredConcepts.isEmpty() || !myConceptLinksToSaveLater.isEmpty()) {
tt.execute(t -> {
processDeferredConcepts();
return null;
@ -910,20 +1021,12 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
ourLog.info("Deleting old code system versions");
for (TermCodeSystemVersion next : existing) {
ourLog.info(" * Deleting code system version {}", next.getPid());
myConceptParentChildLinkDao.deleteByCodeSystemVersion(next.getPid());
for (TermConcept nextConcept : myConceptDao.findByCodeSystemVersion(next.getPid())) {
myConceptPropertyDao.deleteAll(nextConcept.getProperties());
myConceptDao.delete(nextConcept);
}
Long codeSystemVersionPid = next.getPid();
deleteCodeSystemVersion(codeSystemVersionPid);
}
ourLog.info("Flushing...");
myConceptParentChildLinkDao.flush();
myConceptPropertyDao.flush();
myConceptDao.flush();
ourLog.info("Done flushing");
/*
@ -1026,42 +1129,10 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
termConceptMap.setResource(theResourceTable);
termConceptMap.setUrl(theConceptMap.getUrl());
// Get existing entity so it can be deleted.
Optional<TermConceptMap> optionalExistingTermConceptMapById = myConceptMapDao.findTermConceptMapByResourcePid(theResourceTable.getId());
/*
* For now we always delete old versions. At some point, it would be nice to allow configuration to keep old versions.
*/
if (optionalExistingTermConceptMapById.isPresent()) {
TermConceptMap existingTermConceptMap = optionalExistingTermConceptMapById.get();
ourLog.info("Deleting existing TermConceptMap {} and its children...", existingTermConceptMap.getId());
for (TermConceptMapGroup group : existingTermConceptMap.getConceptMapGroups()) {
for (TermConceptMapGroupElement element : group.getConceptMapGroupElements()) {
for (TermConceptMapGroupElementTarget target : element.getConceptMapGroupElementTargets()) {
myConceptMapGroupElementTargetDao.deleteTermConceptMapGroupElementTargetById(target.getId());
}
myConceptMapGroupElementDao.deleteTermConceptMapGroupElementById(element.getId());
}
myConceptMapGroupDao.deleteTermConceptMapGroupById(group.getId());
}
myConceptMapDao.deleteTermConceptMapById(existingTermConceptMap.getId());
ourLog.info("Done deleting existing TermConceptMap {} and its children.", existingTermConceptMap.getId());
ourLog.info("Flushing...");
myConceptMapGroupElementTargetDao.flush();
myConceptMapGroupElementDao.flush();
myConceptMapGroupDao.flush();
myConceptMapDao.flush();
ourLog.info("Done flushing.");
}
deleteConceptMap(theResourceTable);
/*
* Do the upload.
@ -1082,6 +1153,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
throw new InternalErrorException(fe);
}
myConceptMapDao.save(termConceptMap);
int codesSaved = 0;
if (theConceptMap.hasGroup()) {
TermConceptMapGroup termConceptMapGroup;
@ -1117,7 +1189,12 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
termConceptMapGroupElementTarget.setCode(target.getCode());
termConceptMapGroupElementTarget.setDisplay(target.getDisplay());
termConceptMapGroupElementTarget.setEquivalence(target.getEquivalence());
myConceptMapGroupElementTargetDao.saveAndFlush(termConceptMapGroupElementTarget);
myConceptMapGroupElementTargetDao.save(termConceptMapGroupElementTarget);
if (codesSaved++ % 250 == 0) {
ourLog.info("Have saved {} codes in conceptmap", codesSaved);
myConceptMapGroupElementTargetDao.flush();
}
}
}
}

View File

@ -68,6 +68,8 @@ public interface IHapiTerminologySvc {
*/
IIdType storeNewCodeSystemVersion(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails, List<org.hl7.fhir.r4.model.ValueSet> theValueSets, List<org.hl7.fhir.r4.model.ConceptMap> theConceptMaps);
void deleteConceptMapAndChildren(ResourceTable theResourceTable);
void storeTermConceptMapAndChildren(ResourceTable theResourceTable, ConceptMap theConceptMap);
boolean supportsSystem(String theCodeSystem);

View File

@ -11,6 +11,7 @@ import ca.uhn.fhir.jpa.term.snomedct.SctHandlerRelationship;
import ca.uhn.fhir.jpa.util.Counter;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
@ -77,8 +78,12 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
public static final String LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE = "LoincUniversalLabOrdersValueSet.csv";
public static final String LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV = "LoincIeeeMedicalDeviceCodeMappingTable.csv";
public static final String LOINC_IMAGING_DOCUMENT_CODES_FILE = "ImagingDocumentCodes.csv";
private static final int LOG_INCREMENT = 100000;
public static final String LOINC_GROUP_FILE = "Group.csv";
public static final String LOINC_GROUP_TERMS_FILE = "GroupLoincTerms.csv";
public static final String LOINC_PARENT_GROUP_FILE = "ParentGroup.csv";
private static final int LOG_INCREMENT = 1000;
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcImpl.class);
@Autowired
private IHapiTerminologySvc myTermSvc;
@Autowired(required = false)
@ -119,12 +124,20 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
}
private void iterateOverZipFile(LoadedFileDescriptors theDescriptors, String theFileNamePart, IRecordHandler theHandler, char theDelimiter, QuoteMode theQuoteMode) {
private void iterateOverZipFile(LoadedFileDescriptors theDescriptors, String theFileNamePart, IRecordHandler theHandler, char theDelimiter, QuoteMode theQuoteMode, boolean theIsPartialFilename) {
boolean foundMatch = false;
for (FileDescriptor nextZipBytes : theDescriptors.getUncompressedFileDescriptors()) {
String nextFilename = nextZipBytes.getFilename();
if (nextFilename.contains(theFileNamePart)) {
boolean matches;
if (theIsPartialFilename) {
matches = nextFilename.contains(theFileNamePart);
} else {
matches = nextFilename.endsWith("/" + theFileNamePart) || nextFilename.equals(theFileNamePart);
}
if (matches) {
ourLog.info("Processing file {}", nextFilename);
foundMatch = true;
Reader reader;
CSVParser parsed;
@ -164,6 +177,10 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
}
if (!foundMatch) {
throw new InvalidRequestException("Did not find file matching " + theFileNamePart);
}
}
@Override
@ -171,10 +188,7 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
LoadedFileDescriptors descriptors = new LoadedFileDescriptors(theFiles);
List<String> mandatoryFilenameFragments = Arrays.asList(
LOINC_FILE,
LOINC_HIERARCHY_FILE);
descriptors.verifyMandatoryFilesExist(mandatoryFilenameFragments);
List<String> optionalFilenameFragments = Arrays.asList(
LOINC_HIERARCHY_FILE,
LOINC_UPLOAD_PROPERTIES_FILE,
LOINC_ANSWERLIST_FILE,
LOINC_ANSWERLIST_LINK_FILE,
@ -189,6 +203,10 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV,
LOINC_IMAGING_DOCUMENT_CODES_FILE
);
descriptors.verifyMandatoryFilesExist(mandatoryFilenameFragments);
List<String> optionalFilenameFragments = Arrays.asList(
);
descriptors.verifyOptionalFilesExist(optionalFilenameFragments);
ourLog.info("Beginning LOINC processing");
@ -251,60 +269,75 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
// Part file
handler = new LoincPartHandler(codeSystemVersion, code2concept);
iterateOverZipFile(theDescriptors, LOINC_PART_FILE, handler, ',', QuoteMode.NON_NUMERIC);
iterateOverZipFile(theDescriptors, LOINC_PART_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
Map<PartTypeAndPartName, String> partTypeAndPartNameToPartNumber = ((LoincPartHandler) handler).getPartTypeAndPartNameToPartNumber();
// Loinc Codes
handler = new LoincHandler(codeSystemVersion, code2concept, propertyNamesToTypes, partTypeAndPartNameToPartNumber);
iterateOverZipFile(theDescriptors, LOINC_FILE, handler, ',', QuoteMode.NON_NUMERIC);
iterateOverZipFile(theDescriptors, LOINC_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// Loinc Hierarchy
handler = new LoincHierarchyHandler(codeSystemVersion, code2concept);
iterateOverZipFile(theDescriptors, LOINC_HIERARCHY_FILE, handler, ',', QuoteMode.NON_NUMERIC);
iterateOverZipFile(theDescriptors, LOINC_HIERARCHY_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// Answer lists (ValueSets of potential answers/values for loinc "questions")
handler = new LoincAnswerListHandler(codeSystemVersion, code2concept, valueSets, conceptMaps, uploadProperties);
iterateOverZipFile(theDescriptors, LOINC_ANSWERLIST_FILE, handler, ',', QuoteMode.NON_NUMERIC);
iterateOverZipFile(theDescriptors, LOINC_ANSWERLIST_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// Answer list links (connects loinc observation codes to answerlist codes)
handler = new LoincAnswerListLinkHandler(code2concept, valueSets);
iterateOverZipFile(theDescriptors, LOINC_ANSWERLIST_LINK_FILE, handler, ',', QuoteMode.NON_NUMERIC);
iterateOverZipFile(theDescriptors, LOINC_ANSWERLIST_LINK_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// RSNA Playbook file
// Note that this should come before the "Part Related Code Mapping"
// file because there are some duplicate mappings between these
// two files, and the RSNA Playbook file has more metadata
handler = new LoincRsnaPlaybookHandler(code2concept, valueSets, conceptMaps, uploadProperties);
iterateOverZipFile(theDescriptors, LOINC_RSNA_PLAYBOOK_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// Part link file
handler = new LoincPartLinkHandler(codeSystemVersion, code2concept);
iterateOverZipFile(theDescriptors, LOINC_PART_LINK_FILE, handler, ',', QuoteMode.NON_NUMERIC);
iterateOverZipFile(theDescriptors, LOINC_PART_LINK_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// Part related code mapping
handler = new LoincPartRelatedCodeMappingHandler(code2concept, valueSets, conceptMaps, uploadProperties);
iterateOverZipFile(theDescriptors, LOINC_PART_RELATED_CODE_MAPPING_FILE, handler, ',', QuoteMode.NON_NUMERIC);
iterateOverZipFile(theDescriptors, LOINC_PART_RELATED_CODE_MAPPING_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// Document Ontology File
handler = new LoincDocumentOntologyHandler(code2concept, propertyNamesToTypes, valueSets, conceptMaps, uploadProperties);
iterateOverZipFile(theDescriptors, LOINC_DOCUMENT_ONTOLOGY_FILE, handler, ',', QuoteMode.NON_NUMERIC);
// RSNA Playbook file
handler = new LoincRsnaPlaybookHandler(code2concept, valueSets, conceptMaps, uploadProperties);
iterateOverZipFile(theDescriptors, LOINC_RSNA_PLAYBOOK_FILE, handler, ',', QuoteMode.NON_NUMERIC);
iterateOverZipFile(theDescriptors, LOINC_DOCUMENT_ONTOLOGY_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// Top 2000 Codes - US
handler = new LoincTop2000LabResultsUsHandler(code2concept, valueSets, conceptMaps, uploadProperties);
iterateOverZipFile(theDescriptors, LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE, handler, ',', QuoteMode.NON_NUMERIC);
iterateOverZipFile(theDescriptors, LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// Top 2000 Codes - SI
handler = new LoincTop2000LabResultsSiHandler(code2concept, valueSets, conceptMaps, uploadProperties);
iterateOverZipFile(theDescriptors, LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE, handler, ',', QuoteMode.NON_NUMERIC);
iterateOverZipFile(theDescriptors, LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// Universal Lab Order ValueSet
handler = new LoincUniversalOrderSetHandler(code2concept, valueSets, conceptMaps, uploadProperties);
iterateOverZipFile(theDescriptors, LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE, handler, ',', QuoteMode.NON_NUMERIC);
iterateOverZipFile(theDescriptors, LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// IEEE Medical Device Codes
handler = new LoincIeeeMedicalDeviceCodeHandler(code2concept, valueSets, conceptMaps, uploadProperties);
iterateOverZipFile(theDescriptors, LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV, handler, ',', QuoteMode.NON_NUMERIC);
iterateOverZipFile(theDescriptors, LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV, handler, ',', QuoteMode.NON_NUMERIC, false);
// Imaging Document Codes
handler = new LoincImagingDocumentCodeHandler(code2concept, valueSets, conceptMaps, uploadProperties);
iterateOverZipFile(theDescriptors, LOINC_IMAGING_DOCUMENT_CODES_FILE, handler, ',', QuoteMode.NON_NUMERIC);
iterateOverZipFile(theDescriptors, LOINC_IMAGING_DOCUMENT_CODES_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// Group File
handler = new LoincGroupFileHandler(code2concept, valueSets, conceptMaps, uploadProperties);
iterateOverZipFile(theDescriptors, LOINC_GROUP_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// Group Terms File
handler = new LoincGroupTermsFileHandler(code2concept, valueSets, conceptMaps, uploadProperties);
iterateOverZipFile(theDescriptors, LOINC_GROUP_TERMS_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
// Parent Group File
handler = new LoincParentGroupFileHandler(code2concept, valueSets, conceptMaps, uploadProperties);
iterateOverZipFile(theDescriptors, LOINC_PARENT_GROUP_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
IOUtils.closeQuietly(theDescriptors);
@ -332,18 +365,18 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
final Set<String> validConceptIds = new HashSet<>();
IRecordHandler handler = new SctHandlerConcept(validConceptIds);
iterateOverZipFile(theDescriptors, SCT_FILE_CONCEPT, handler, '\t', null);
iterateOverZipFile(theDescriptors, SCT_FILE_CONCEPT, handler, '\t', null, true);
ourLog.info("Have {} valid concept IDs", validConceptIds.size());
handler = new SctHandlerDescription(validConceptIds, code2concept, id2concept, codeSystemVersion);
iterateOverZipFile(theDescriptors, SCT_FILE_DESCRIPTION, handler, '\t', null);
iterateOverZipFile(theDescriptors, SCT_FILE_DESCRIPTION, handler, '\t', null, true);
ourLog.info("Got {} concepts, cloning map", code2concept.size());
final HashMap<String, TermConcept> rootConcepts = new HashMap<>(code2concept);
handler = new SctHandlerRelationship(codeSystemVersion, rootConcepts, code2concept);
iterateOverZipFile(theDescriptors, SCT_FILE_RELATIONSHIP, handler, '\t', null);
iterateOverZipFile(theDescriptors, SCT_FILE_RELATIONSHIP, handler, '\t', null, true);
IOUtils.closeQuietly(theDescriptors);

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.term.loinc;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -26,6 +26,8 @@ import org.hl7.fhir.r4.model.ConceptMap;
import org.hl7.fhir.r4.model.ContactPoint;
import org.hl7.fhir.r4.model.Enumerations;
import org.hl7.fhir.r4.model.ValueSet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.List;
@ -35,8 +37,9 @@ import java.util.Properties;
import static org.apache.commons.lang3.StringUtils.*;
public abstract class BaseLoincHandler implements IRecordHandler {
private static final Logger ourLog = LoggerFactory.getLogger(BaseLoincHandler.class);
public static final String LOINC_COPYRIGHT_STATEMENT = "This content from LOINC® is copyright © 1995 Regenstrief Institute, Inc. and the LOINC Committee, and available at no cost under the license at https://loinc.org/license/";
/**
* This is <b>NOT</b> the LOINC CodeSystem URI! It is just
* the website URL to LOINC.
@ -52,8 +55,10 @@ public abstract class BaseLoincHandler implements IRecordHandler {
BaseLoincHandler(Map<String, TermConcept> theCode2Concept, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps, Properties theUploadProperties) {
myValueSets = theValueSets;
myValueSets.forEach(t -> myIdToValueSet.put(t.getId(), t));
myCode2Concept = theCode2Concept;
myConceptMaps = theConceptMaps;
myConceptMaps.forEach(t -> myIdToConceptMaps.put(t.getId(), t));
myUploadProperties = theUploadProperties;
}
@ -80,10 +85,9 @@ public abstract class BaseLoincHandler implements IRecordHandler {
String displayName = theDisplayName;
if (isBlank(displayName)) {
for (TermConcept next : myCode2Concept.values()) {
if (next.getCode().equals(theCode)) {
displayName = next.getDisplay();
}
TermConcept concept = myCode2Concept.get(theCode);
if (concept != null) {
displayName = concept.getDisplay();
}
}
@ -176,6 +180,8 @@ public abstract class BaseLoincHandler implements IRecordHandler {
.setCode(theMapping.getTargetCode())
.setDisplay(theMapping.getTargetDisplay())
.setEquivalence(theMapping.getEquivalence());
} else {
ourLog.info("Not going to add a mapping from [{}/{}] to [{}/{}] because one already exists", theMapping.getSourceCodeSystem(), theMapping.getSourceCode(), theMapping.getTargetCodeSystem(), theMapping.getTargetCode());
}
}
@ -192,7 +198,6 @@ public abstract class BaseLoincHandler implements IRecordHandler {
vs.setUrl(theValueSetUri);
vs.setId(theValueSetId);
vs.setVersion(version);
vs.setName(theValueSetName);
vs.setStatus(Enumerations.PublicationStatus.ACTIVE);
vs.setPublisher(REGENSTRIEF_INSTITUTE_INC);
vs.addContact()
@ -206,6 +211,11 @@ public abstract class BaseLoincHandler implements IRecordHandler {
} else {
vs = myIdToValueSet.get(theValueSetId);
}
if (isBlank(vs.getName()) && isNotBlank(theValueSetName)) {
vs.setName(theValueSetName);
}
return vs;
}

View File

@ -0,0 +1,62 @@
package ca.uhn.fhir.jpa.term.loinc;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.term.IRecordHandler;
import org.apache.commons.csv.CSVRecord;
import org.hl7.fhir.r4.model.ConceptMap;
import org.hl7.fhir.r4.model.ValueSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import static org.apache.commons.lang3.StringUtils.trim;
public class LoincGroupFileHandler extends BaseLoincHandler implements IRecordHandler {
public static final String VS_URI_PREFIX = "http://loinc.org/vs/";
public LoincGroupFileHandler(Map<String, TermConcept> theCode2concept, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps, Properties theUploadProperties) {
super(theCode2concept, theValueSets, theConceptMaps, theUploadProperties);
}
@Override
public void accept(CSVRecord theRecord) {
//"ParentGroupId","GroupId","Group","Archetype","Status","VersionFirstReleased"
String parentGroupId = trim(theRecord.get("ParentGroupId"));
String groupId = trim(theRecord.get("GroupId"));
String groupName = trim(theRecord.get("Group"));
ValueSet parentValueSet = getValueSet(parentGroupId, VS_URI_PREFIX + parentGroupId, null, null);
parentValueSet
.getCompose()
.getIncludeFirstRep()
.addValueSet(VS_URI_PREFIX + groupId);
// Create group to set its name (terms are added in a different
// handler)
getValueSet(groupId, VS_URI_PREFIX + groupId, groupName, null);
}
}

View File

@ -0,0 +1,53 @@
package ca.uhn.fhir.jpa.term.loinc;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
import ca.uhn.fhir.jpa.term.IRecordHandler;
import org.apache.commons.csv.CSVRecord;
import org.hl7.fhir.r4.model.ConceptMap;
import org.hl7.fhir.r4.model.ValueSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import static org.apache.commons.lang3.StringUtils.trim;
public class LoincGroupTermsFileHandler extends BaseLoincHandler implements IRecordHandler {
public LoincGroupTermsFileHandler(Map<String, TermConcept> theCode2concept, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps, Properties theUploadProperties) {
super(theCode2concept, theValueSets, theConceptMaps, theUploadProperties);
}
@Override
public void accept(CSVRecord theRecord) {
//"Category","GroupId","Archetype","LoincNumber","LongCommonName"
String groupId = trim(theRecord.get("GroupId"));
String loincNumber = trim(theRecord.get("LoincNumber"));
ValueSet valueSet = getValueSet(groupId, LoincGroupFileHandler.VS_URI_PREFIX + groupId, null, null);
addCodeAsIncludeToValueSet(valueSet, IHapiTerminologyLoaderSvc.LOINC_URI, loincNumber, null);
}
}

View File

@ -37,7 +37,7 @@ import static org.apache.commons.lang3.StringUtils.trim;
public class LoincIeeeMedicalDeviceCodeHandler extends BaseLoincHandler implements IRecordHandler {
public static final String LOINC_IEEE_CM_ID = "LOINC-IEEE-MEDICAL-DEVICE-CM";
public static final String LOINC_IEEE_CM_URI = "http://loinc.org/fhir/loinc-ieee-device-code-mappings";
public static final String LOINC_IEEE_CM_URI = "http://loinc.org/cm/loinc-to-ieee-device-codes";
public static final String LOINC_IEEE_CM_NAME = "LOINC/IEEE Device Code Mappings";
private static final String CM_COPYRIGHT = "This content from LOINC® is copyright © 1995 Regenstrief Institute, Inc. and the LOINC Committee, and available at no cost under the license at https://loinc.org/license/. The LOINC/IEEE Medical Device Code Mapping Table contains content from IEEE (http://ieee.org), copyright © 2017 IEEE.";

View File

@ -36,7 +36,7 @@ import static org.apache.commons.lang3.StringUtils.trim;
public class LoincImagingDocumentCodeHandler extends BaseLoincHandler implements IRecordHandler {
public static final String VS_ID = "loinc-imaging-document-codes";
public static final String VS_URI = "http://loinc.org/fhir/loinc-imaging-document-codes";
public static final String VS_URI = "http://loinc.org/vs/loinc-imaging-document-codes";
public static final String VS_NAME = "LOINC Imaging Document Codes";
public LoincImagingDocumentCodeHandler(Map<String, TermConcept> theCode2concept, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps, Properties theUploadProperties) {

View File

@ -0,0 +1,51 @@
package ca.uhn.fhir.jpa.term.loinc;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.term.IRecordHandler;
import org.apache.commons.csv.CSVRecord;
import org.hl7.fhir.r4.model.ConceptMap;
import org.hl7.fhir.r4.model.ValueSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import static org.apache.commons.lang3.StringUtils.trim;
public class LoincParentGroupFileHandler extends BaseLoincHandler implements IRecordHandler {
public LoincParentGroupFileHandler(Map<String, TermConcept> theCode2concept, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps, Properties theUploadProperties) {
super(theCode2concept, theValueSets, theConceptMaps, theUploadProperties);
}
@Override
public void accept(CSVRecord theRecord) {
// "ParentGroupId","ParentGroup","Status"
String parentGroupId = trim(theRecord.get("ParentGroupId"));
String parentGroupName = trim(theRecord.get("ParentGroup"));
getValueSet(parentGroupId, LoincGroupFileHandler.VS_URI_PREFIX + parentGroupId, parentGroupName, null);
}
}

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.term.loinc;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -40,13 +40,13 @@ public class LoincPartRelatedCodeMappingHandler extends BaseLoincHandler impleme
public static final String LOINC_SCT_PART_MAP_ID = "loinc-parts-to-snomed-ct";
public static final String LOINC_SCT_PART_MAP_URI = "http://loinc.org/cm/loinc-parts-to-snomed-ct";
static final String LOINC_RADLEX_PART_MAP_ID = "loinc-parts-to-radlex";
static final String LOINC_RADLEX_PART_MAP_URI = "http://loinc.org/cm/loinc-parts-to-radlex";
static final String LOINC_RADLEX_PART_MAP_NAME = "LOINC Part Map to RADLEX";
private static final String LOINC_SCT_PART_MAP_NAME = "LOINC Part Map to SNOMED CT";
private static final String LOINC_RXNORM_PART_MAP_ID = "loinc-parts-to-rxnorm";
private static final String LOINC_RXNORM_PART_MAP_URI = "http://loinc.org/cm/loinc-parts-to-rxnorm";
private static final String LOINC_RXNORM_PART_MAP_NAME = "LOINC Part Map to RxNORM";
private static final String LOINC_RADLEX_PART_MAP_ID = "loinc-parts-to-radlex";
private static final String LOINC_RADLEX_PART_MAP_URI = "http://loinc.org/cm/loinc-parts-to-radlex";
private static final String LOINC_RADLEX_PART_MAP_NAME = "LOINC Part Map to RADLEX";
private static final String CM_COPYRIGHT = "This content from LOINC® is copyright © 1995 Regenstrief Institute, Inc. and the LOINC Committee, and available at no cost under the license at https://loinc.org/license/. The LOINC Part File, LOINC/SNOMED CT Expression Association and Map Sets File, RELMA database and associated search index files include SNOMED Clinical Terms (SNOMED CT®) which is used by permission of the International Health Terminology Standards Development Organisation (IHTSDO) under license. All rights are reserved. SNOMED CT® was originally created by The College of American Pathologists. “SNOMED” and “SNOMED CT” are registered trademarks of the IHTSDO. Use of SNOMED CT content is subject to the terms and conditions set forth in the SNOMED CT Affiliate License Agreement. It is the responsibility of those implementing this product to ensure they are appropriately licensed and for more information on the license, including how to register as an Affiliate Licensee, please refer to http://www.snomed.org/snomed-ct/get-snomed-ct or info@snomed.org. Under the terms of the Affiliate License, use of SNOMED CT in countries that are not IHTSDO Members is subject to reporting and fee payment obligations. However, IHTSDO agrees to waive the requirements to report and pay fees for use of SNOMED CT content included in the LOINC Part Mapping and LOINC Term Associations for purposes that support or enable more effective use of LOINC. This material includes content from the US Edition to SNOMED CT, which is developed and maintained by the U.S. National Library of Medicine and is available to authorized UMLS Metathesaurus Licensees from the UTS Downloads site at https://uts.nlm.nih.gov.";
private static final String LOINC_PUBCHEM_PART_MAP_URI = "http://loinc.org/cm/loinc-parts-to-pubchem";
private static final String LOINC_PUBCHEM_PART_MAP_ID = "loinc-parts-to-pubchem";

View File

@ -39,21 +39,19 @@ public class LoincRsnaPlaybookHandler extends BaseLoincHandler implements IRecor
public static final String RSNA_CODES_VS_ID = "loinc-rsna-radiology-playbook";
public static final String RSNA_CODES_VS_URI = "http://loinc.org/vs/loinc-rsna-radiology-playbook";
public static final String RSNA_CODES_VS_NAME = "LOINC/RSNA Radiology Playbook";
public static final String RID_MAPPING_CM_ID = "LOINC-TO-RID-CODES-CM";
public static final String RID_MAPPING_CM_URI = "http://loinc.org/rid-codes";
public static final String RID_MAPPING_CM_NAME = "RSNA Playbook RID Codes Mapping";
public static final String RADLEX_MAPPING_CM_ID = LoincPartRelatedCodeMappingHandler.LOINC_RADLEX_PART_MAP_ID;
public static final String RADLEX_MAPPING_CM_URI = LoincPartRelatedCodeMappingHandler.LOINC_RADLEX_PART_MAP_URI;
public static final String RADLEX_MAPPING_CM_NAME = LoincPartRelatedCodeMappingHandler.LOINC_RADLEX_PART_MAP_NAME;
public static final String RID_CS_URI = "http://www.radlex.org";
public static final String RPID_MAPPING_CM_ID = "LOINC-TO-RPID-CODES-CM";
public static final String RPID_MAPPING_CM_URI = "http://loinc.org/rpid-codes";
public static final String RPID_MAPPING_CM_NAME = "RSNA Playbook RPID Codes Mapping";
/*
* About these being the same - Per Dan:
* About these being the same - Per Dan Vreeman:
* We had some discussion about this, and both
* RIDs (RadLex clinical terms) and RPIDs (Radlex Playbook Ids)
* RIDs (RadLex clinical terms) and RPIDs (Radlex Playbook Ids)
* belong to the same "code system" since they will never collide.
* The codesystem uri is "http://www.radlex.org". FYI, that's
* now listed on the FHIR page:
* https://www.hl7.org/fhir/terminologies-systems.html
* -ja
*/
public static final String RPID_CS_URI = RID_CS_URI;
private static final String CM_COPYRIGHT = "This content from LOINC® is copyright © 1995 Regenstrief Institute, Inc. and the LOINC Committee, and available at no cost under the license at https://loinc.org/license/. The LOINC/RSNA Radiology Playbook and the LOINC Part File contain content from RadLex® (http://rsna.org/RadLex.aspx), copyright © 2005-2017, The Radiological Society of North America, Inc., available at no cost under the license at http://www.rsna.org/uploadedFiles/RSNA/Content/Informatics/RadLex_License_Agreement_and_Terms_of_Use_V2_Final.pdf.";
@ -179,9 +177,9 @@ public class LoincRsnaPlaybookHandler extends BaseLoincHandler implements IRecor
if (isNotBlank(rid)) {
addConceptMapEntry(
new ConceptMapping()
.setConceptMapId(RID_MAPPING_CM_ID)
.setConceptMapUri(RID_MAPPING_CM_URI)
.setConceptMapName(RID_MAPPING_CM_NAME)
.setConceptMapId(RADLEX_MAPPING_CM_ID)
.setConceptMapUri(RADLEX_MAPPING_CM_URI)
.setConceptMapName(RADLEX_MAPPING_CM_NAME)
.setSourceCodeSystem(IHapiTerminologyLoaderSvc.LOINC_URI)
.setSourceCode(partNumber)
.setSourceDisplay(partName)
@ -196,9 +194,9 @@ public class LoincRsnaPlaybookHandler extends BaseLoincHandler implements IRecor
if (isNotBlank(rpid)) {
addConceptMapEntry(
new ConceptMapping()
.setConceptMapId(RPID_MAPPING_CM_ID)
.setConceptMapUri(RPID_MAPPING_CM_URI)
.setConceptMapName(RPID_MAPPING_CM_NAME)
.setConceptMapId(RADLEX_MAPPING_CM_ID)
.setConceptMapUri(RADLEX_MAPPING_CM_URI)
.setConceptMapName(RADLEX_MAPPING_CM_NAME)
.setSourceCodeSystem(IHapiTerminologyLoaderSvc.LOINC_URI)
.setSourceCode(loincNumber)
.setSourceDisplay(longCommonName)

View File

@ -34,7 +34,7 @@ import static org.apache.commons.lang3.StringUtils.trim;
public class LoincUniversalOrderSetHandler extends BaseLoincHandler implements IRecordHandler {
public static final String VS_ID = "loinc-universal-order-set-vs";
public static final String VS_URI = "http://loinc.org/fhir/loinc-universal-order-set";
public static final String VS_URI = "http://loinc.org/vs/loinc-universal-order-set";
public static final String VS_NAME = "LOINC Universal Order Set";
public LoincUniversalOrderSetHandler(Map<String, TermConcept> theCode2concept, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps, Properties theUploadProperties) {

View File

@ -83,14 +83,19 @@ public class ReindexController implements IReindexController {
break;
}
}
} catch (Exception e) {
ourLog.error("Failure during reindex", e);
count = -1;
} finally {
myReindexingLock.release();
}
synchronized (this) {
if (count == null) {
ourLog.info("Reindex pass complete, no remaining resource to index");
myDontReindexUntil = System.currentTimeMillis() + DateUtils.MILLIS_PER_HOUR;
} else {
ourLog.info("Reindex pass complete, {} remaining resource to index", count);
myDontReindexUntil = null;
}
}

View File

@ -24,7 +24,7 @@ import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import org.hl7.fhir.r4.hapi.ctx.DefaultProfileValidationSupport;
import org.hl7.fhir.r4.hapi.ctx.ValidationSupportChain;
import org.hl7.fhir.r4.hapi.validation.ValidationSupportChain;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;

View File

@ -107,7 +107,7 @@ public class TestR4Config extends BaseJavaConfigR4 {
DataSource dataSource = ProxyDataSourceBuilder
.create(retVal)
// .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL")
.logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL")
.logSlowQueryBySlf4j(10, TimeUnit.SECONDS)
.countQuery(new ThreadQueryCountHolder())
.build();

View File

@ -331,7 +331,6 @@ public abstract class BaseJpaTest {
theSystemDao.expunge(new ExpungeOptions().setExpungeEverything(true));
theDaoConfig.setExpungeEnabled(expungeEnabled);
theSearchParamPresenceSvc.flushCachesForUnitTest();
theSearchParamRegistry.forceRefresh();
}

View File

@ -24,17 +24,33 @@ public class FhirResourceDaoDstu3ConceptMapTest extends BaseJpaDstu3Test {
private IIdType myConceptMapId;
@AfterClass
public static void afterClassClearContext() {
TestUtil.clearAllStaticFieldsForUnitTest();
}
@Before
@Transactional
public void before02() {
myConceptMapId = myConceptMapDao.create(createConceptMap(), mySrd).getId().toUnqualifiedVersionless();
}
@Test
public void testDeleteConceptMap() {
myConceptMapDao.delete(myConceptMapId);
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
TranslationRequest translationRequest = new TranslationRequest();
translationRequest.getCodeableConcept().addCoding()
.setSystem(CS_URL)
.setCode("12345");
translationRequest.setTargetSystem(new UriType(CS_URL_3));
TranslationResult translationResult = myConceptMapDao.translate(translationRequest, null);
assertFalse(translationResult.getResult().booleanValue());
}
});
}
@Test
public void testTranslateByCodeSystemsAndSourceCodeOneToMany() {
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
@ -81,4 +97,9 @@ public class FhirResourceDaoDstu3ConceptMapTest extends BaseJpaDstu3Test {
}
});
}
@AfterClass
public static void afterClassClearContext() {
TestUtil.clearAllStaticFieldsForUnitTest();
}
}

View File

@ -7,6 +7,7 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.search.JpaRuntimeSearchParam;
import org.hl7.fhir.dstu3.hapi.validation.DefaultProfileValidationSupport;
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
@ -81,7 +82,8 @@ public class SearchParamExtractorDstu3Test {
}
};
SearchParamExtractorDstu3 extractor = new SearchParamExtractorDstu3(ourCtx, ourValidationSupport, searchParamRegistry);
SearchParamExtractorDstu3 extractor = new SearchParamExtractorDstu3(new DaoConfig(), ourCtx, ourValidationSupport, searchParamRegistry);
extractor.start();
Set<BaseResourceIndexedSearchParam> tokens = extractor.extractSearchParamTokens(new ResourceTable(), obs);
assertEquals(1, tokens.size());
ResourceIndexedSearchParamToken token = (ResourceIndexedSearchParamToken) tokens.iterator().next();

View File

@ -66,8 +66,6 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
@Autowired
protected IResourceLinkDao myResourceLinkDao;
@Autowired
protected ISearchParamDao mySearchParamDao;
@Autowired
protected ISearchParamPresentDao mySearchParamPresentDao;
@Autowired
protected IResourceIndexedSearchParamStringDao myResourceIndexedSearchParamStringDao;
@ -170,6 +168,12 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
@Qualifier("myPatientDaoR4")
protected IFhirResourceDaoPatient<Patient> myPatientDao;
@Autowired
protected IResourceTableDao myResourceTableDao;
@Autowired
protected IResourceHistoryTableDao myResourceHistoryTableDao;
@Autowired
protected IForcedIdDao myForcedIdDao;
@Autowired
@Qualifier("myCoverageDaoR4")
protected IFhirResourceDao<Coverage> myCoverageDao;
@Autowired
@ -188,10 +192,6 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
@Qualifier("myResourceProvidersR4")
protected Object myResourceProviders;
@Autowired
protected IResourceTableDao myResourceTableDao;
@Autowired
protected IResourceHistoryTableDao myResourceHistoryTableDao;
@Autowired
protected IResourceTagDao myResourceTagDao;
@Autowired
protected ISearchCoordinatorSvc mySearchCoordinatorSvc;
@ -257,6 +257,7 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
myDaoConfig.setExpireSearchResultsAfterMillis(new DaoConfig().getExpireSearchResultsAfterMillis());
myDaoConfig.setReuseCachedSearchResultsForMillis(new DaoConfig().getReuseCachedSearchResultsForMillis());
myDaoConfig.setSuppressUpdatesWithNoChange(new DaoConfig().isSuppressUpdatesWithNoChange());
myDaoConfig.setAllowContainsSearches(new DaoConfig().isAllowContainsSearches());
}
@After
@ -283,7 +284,7 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
@Before
public void beforeFlushFT() {
runInTransaction(()->{
runInTransaction(() -> {
FullTextEntityManager ftem = Search.getFullTextEntityManager(myEntityManager);
ftem.purgeAll(ResourceTable.class);
ftem.purgeAll(ResourceIndexedSearchParamString.class);
@ -314,6 +315,11 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
return myFhirCtx;
}
@Override
protected PlatformTransactionManager getTxManager() {
return myTxManager;
}
protected <T extends IBaseResource> T loadResourceFromClasspath(Class<T> type, String resourceName) throws IOException {
InputStream stream = FhirResourceDaoDstu2SearchNoFtTest.class.getResourceAsStream(resourceName);
if (stream == null) {
@ -324,11 +330,6 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
return newJsonParser.parseResource(type, string);
}
@Override
protected PlatformTransactionManager getTxManager() {
return myTxManager;
}
@AfterClass
public static void afterClassClearContextBaseJpaR4Test() throws Exception {
ourValueSetDao.purgeCaches();

View File

@ -26,17 +26,33 @@ public class FhirResourceDaoR4ConceptMapTest extends BaseJpaR4Test {
private IIdType myConceptMapId;
@AfterClass
public static void afterClassClearContext() {
TestUtil.clearAllStaticFieldsForUnitTest();
}
@Before
@Transactional
public void before02() {
myConceptMapId = myConceptMapDao.create(createConceptMap(), mySrd).getId().toUnqualifiedVersionless();
}
@Test
public void testDeleteConceptMap() {
myConceptMapDao.delete(myConceptMapId);
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
TranslationRequest translationRequest = new TranslationRequest();
translationRequest.getCodeableConcept().addCoding()
.setSystem(CS_URL)
.setCode("12345");
translationRequest.setTargetSystem(new UriType(CS_URL_3));
TranslationResult translationResult = myConceptMapDao.translate(translationRequest, null);
assertFalse(translationResult.getResult().booleanValue());
}
});
}
@Test
public void testTranslateByCodeSystemsAndSourceCodeOneToMany() {
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
@ -205,6 +221,98 @@ public class FhirResourceDaoR4ConceptMapTest extends BaseJpaR4Test {
});
}
@Test
public void testTranslateUsingPredicatesWithSourceAndTargetSystem2() {
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
ourLog.info("ConceptMap:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap));
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
/*
* Provided:
* source code
* source code system
* target code system #2
*/
TranslationRequest translationRequest = new TranslationRequest();
translationRequest.getCodeableConcept().addCoding()
.setSystem(CS_URL)
.setCode("12345");
translationRequest.setTargetSystem(new UriType(CS_URL_2));
TranslationResult translationResult = myConceptMapDao.translate(translationRequest, null);
assertTrue(translationResult.getResult().booleanValue());
assertEquals("Matches found!", translationResult.getMessage().getValueAsString());
assertEquals(1, translationResult.getMatches().size());
TranslationMatch translationMatch = translationResult.getMatches().get(0);
assertEquals(ConceptMapEquivalence.EQUAL.toCode(), translationMatch.getEquivalence().getCode());
Coding concept = translationMatch.getConcept();
assertEquals("34567", concept.getCode());
assertEquals("Target Code 34567", concept.getDisplay());
assertEquals(CS_URL_2, concept.getSystem());
assertEquals("Version 2", concept.getVersion());
assertFalse(concept.getUserSelected());
assertEquals(CM_URL, translationMatch.getSource().getValueAsString());
}
});
}
@Test
public void testTranslateUsingPredicatesWithSourceAndTargetSystem3() {
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
ourLog.info("ConceptMap:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap));
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
/*
* Provided:
* source code
* source code system
* target code system #3
*/
TranslationRequest translationRequest = new TranslationRequest();
translationRequest.getCodeableConcept().addCoding()
.setSystem(CS_URL)
.setCode("12345");
translationRequest.setTargetSystem(new UriType(CS_URL_3));
TranslationResult translationResult = myConceptMapDao.translate(translationRequest, null);
assertTrue(translationResult.getResult().booleanValue());
assertEquals("Matches found!", translationResult.getMessage().getValueAsString());
assertEquals(2, translationResult.getMatches().size());
TranslationMatch translationMatch = translationResult.getMatches().get(0);
assertEquals(ConceptMapEquivalence.EQUAL.toCode(), translationMatch.getEquivalence().getCode());
Coding concept = translationMatch.getConcept();
assertEquals("56789", concept.getCode());
assertEquals("Target Code 56789", concept.getDisplay());
assertEquals(CS_URL_3, concept.getSystem());
assertEquals("Version 4", concept.getVersion());
assertFalse(concept.getUserSelected());
assertEquals(CM_URL, translationMatch.getSource().getValueAsString());
translationMatch = translationResult.getMatches().get(1);
assertEquals(ConceptMapEquivalence.WIDER.toCode(), translationMatch.getEquivalence().getCode());
concept = translationMatch.getConcept();
assertEquals("67890", concept.getCode());
assertEquals("Target Code 67890", concept.getDisplay());
assertEquals(CS_URL_3, concept.getSystem());
assertEquals("Version 4", concept.getVersion());
assertFalse(concept.getUserSelected());
assertEquals(CM_URL, translationMatch.getSource().getValueAsString());
}
});
}
@Test
public void testTranslateUsingPredicatesWithSourceSystem() {
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
@ -356,98 +464,6 @@ public class FhirResourceDaoR4ConceptMapTest extends BaseJpaR4Test {
});
}
@Test
public void testTranslateUsingPredicatesWithSourceAndTargetSystem2() {
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
ourLog.info("ConceptMap:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap));
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
/*
* Provided:
* source code
* source code system
* target code system #2
*/
TranslationRequest translationRequest = new TranslationRequest();
translationRequest.getCodeableConcept().addCoding()
.setSystem(CS_URL)
.setCode("12345");
translationRequest.setTargetSystem(new UriType(CS_URL_2));
TranslationResult translationResult = myConceptMapDao.translate(translationRequest, null);
assertTrue(translationResult.getResult().booleanValue());
assertEquals("Matches found!", translationResult.getMessage().getValueAsString());
assertEquals(1, translationResult.getMatches().size());
TranslationMatch translationMatch = translationResult.getMatches().get(0);
assertEquals(ConceptMapEquivalence.EQUAL.toCode(), translationMatch.getEquivalence().getCode());
Coding concept = translationMatch.getConcept();
assertEquals("34567", concept.getCode());
assertEquals("Target Code 34567", concept.getDisplay());
assertEquals(CS_URL_2, concept.getSystem());
assertEquals("Version 2", concept.getVersion());
assertFalse(concept.getUserSelected());
assertEquals(CM_URL, translationMatch.getSource().getValueAsString());
}
});
}
@Test
public void testTranslateUsingPredicatesWithSourceAndTargetSystem3() {
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
ourLog.info("ConceptMap:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap));
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
/*
* Provided:
* source code
* source code system
* target code system #3
*/
TranslationRequest translationRequest = new TranslationRequest();
translationRequest.getCodeableConcept().addCoding()
.setSystem(CS_URL)
.setCode("12345");
translationRequest.setTargetSystem(new UriType(CS_URL_3));
TranslationResult translationResult = myConceptMapDao.translate(translationRequest, null);
assertTrue(translationResult.getResult().booleanValue());
assertEquals("Matches found!", translationResult.getMessage().getValueAsString());
assertEquals(2, translationResult.getMatches().size());
TranslationMatch translationMatch = translationResult.getMatches().get(0);
assertEquals(ConceptMapEquivalence.EQUAL.toCode(), translationMatch.getEquivalence().getCode());
Coding concept = translationMatch.getConcept();
assertEquals("56789", concept.getCode());
assertEquals("Target Code 56789", concept.getDisplay());
assertEquals(CS_URL_3, concept.getSystem());
assertEquals("Version 4", concept.getVersion());
assertFalse(concept.getUserSelected());
assertEquals(CM_URL, translationMatch.getSource().getValueAsString());
translationMatch = translationResult.getMatches().get(1);
assertEquals(ConceptMapEquivalence.WIDER.toCode(), translationMatch.getEquivalence().getCode());
concept = translationMatch.getConcept();
assertEquals("67890", concept.getCode());
assertEquals("Target Code 67890", concept.getDisplay());
assertEquals(CS_URL_3, concept.getSystem());
assertEquals("Version 4", concept.getVersion());
assertFalse(concept.getUserSelected());
assertEquals(CM_URL, translationMatch.getSource().getValueAsString());
}
});
}
@Test
public void testTranslateUsingPredicatesWithSourceValueSet() {
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
@ -686,6 +702,92 @@ public class FhirResourceDaoR4ConceptMapTest extends BaseJpaR4Test {
});
}
@Test
public void testTranslateWithReverseUsingPredicatesWithSourceAndTargetSystem1() {
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
ourLog.info("ConceptMap:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap));
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
/*
* Provided:
* source code
* source code system
* target code system #1
* reverse = true
*/
TranslationRequest translationRequest = new TranslationRequest();
translationRequest.getCodeableConcept().addCoding()
.setSystem(CS_URL_2)
.setCode("34567");
translationRequest.setTargetSystem(new UriType(CS_URL));
translationRequest.setReverse(true);
TranslationResult translationResult = myConceptMapDao.translate(translationRequest, null);
assertTrue(translationResult.getResult().booleanValue());
assertEquals("Matches found!", translationResult.getMessage().getValueAsString());
assertEquals(1, translationResult.getMatches().size());
TranslationMatch translationMatch = translationResult.getMatches().get(0);
assertNull(translationMatch.getEquivalence());
Coding concept = translationMatch.getConcept();
assertEquals("12345", concept.getCode());
assertEquals("Source Code 12345", concept.getDisplay());
assertEquals(CS_URL, concept.getSystem());
assertEquals("Version 1", concept.getVersion());
assertFalse(concept.getUserSelected());
assertEquals(CM_URL, translationMatch.getSource().getValueAsString());
}
});
}
@Test
public void testTranslateWithReverseUsingPredicatesWithSourceAndTargetSystem4() {
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
ourLog.info("ConceptMap:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap));
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
/*
* Provided:
* source code
* source code system
* target code system #4
* reverse = true
*/
TranslationRequest translationRequest = new TranslationRequest();
translationRequest.getCodeableConcept().addCoding()
.setSystem(CS_URL_2)
.setCode("34567");
translationRequest.setTargetSystem(new UriType(CS_URL_4));
translationRequest.setReverse(true);
TranslationResult translationResult = myConceptMapDao.translate(translationRequest, null);
assertTrue(translationResult.getResult().booleanValue());
assertEquals("Matches found!", translationResult.getMessage().getValueAsString());
assertEquals(1, translationResult.getMatches().size());
TranslationMatch translationMatch = translationResult.getMatches().get(0);
assertNull(translationMatch.getEquivalence());
Coding concept = translationMatch.getConcept();
assertEquals("78901", concept.getCode());
assertEquals("Source Code 78901", concept.getDisplay());
assertEquals(CS_URL_4, concept.getSystem());
assertEquals("Version 5", concept.getVersion());
assertFalse(concept.getUserSelected());
assertEquals(CM_URL, translationMatch.getSource().getValueAsString());
}
});
}
@Test
public void testTranslateWithReverseUsingPredicatesWithSourceSystem() {
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
@ -790,92 +892,6 @@ public class FhirResourceDaoR4ConceptMapTest extends BaseJpaR4Test {
});
}
@Test
public void testTranslateWithReverseUsingPredicatesWithSourceAndTargetSystem1() {
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
ourLog.info("ConceptMap:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap));
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
/*
* Provided:
* source code
* source code system
* target code system #1
* reverse = true
*/
TranslationRequest translationRequest = new TranslationRequest();
translationRequest.getCodeableConcept().addCoding()
.setSystem(CS_URL_2)
.setCode("34567");
translationRequest.setTargetSystem(new UriType(CS_URL));
translationRequest.setReverse(true);
TranslationResult translationResult = myConceptMapDao.translate(translationRequest, null);
assertTrue(translationResult.getResult().booleanValue());
assertEquals("Matches found!", translationResult.getMessage().getValueAsString());
assertEquals(1, translationResult.getMatches().size());
TranslationMatch translationMatch = translationResult.getMatches().get(0);
assertNull(translationMatch.getEquivalence());
Coding concept = translationMatch.getConcept();
assertEquals("12345", concept.getCode());
assertEquals("Source Code 12345", concept.getDisplay());
assertEquals(CS_URL, concept.getSystem());
assertEquals("Version 1", concept.getVersion());
assertFalse(concept.getUserSelected());
assertEquals(CM_URL, translationMatch.getSource().getValueAsString());
}
});
}
@Test
public void testTranslateWithReverseUsingPredicatesWithSourceAndTargetSystem4() {
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
ourLog.info("ConceptMap:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap));
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
/*
* Provided:
* source code
* source code system
* target code system #4
* reverse = true
*/
TranslationRequest translationRequest = new TranslationRequest();
translationRequest.getCodeableConcept().addCoding()
.setSystem(CS_URL_2)
.setCode("34567");
translationRequest.setTargetSystem(new UriType(CS_URL_4));
translationRequest.setReverse(true);
TranslationResult translationResult = myConceptMapDao.translate(translationRequest, null);
assertTrue(translationResult.getResult().booleanValue());
assertEquals("Matches found!", translationResult.getMessage().getValueAsString());
assertEquals(1, translationResult.getMatches().size());
TranslationMatch translationMatch = translationResult.getMatches().get(0);
assertNull(translationMatch.getEquivalence());
Coding concept = translationMatch.getConcept();
assertEquals("78901", concept.getCode());
assertEquals("Source Code 78901", concept.getDisplay());
assertEquals(CS_URL_4, concept.getSystem());
assertEquals("Version 5", concept.getVersion());
assertFalse(concept.getUserSelected());
assertEquals(CM_URL, translationMatch.getSource().getValueAsString());
}
});
}
@Test
public void testTranslateWithReverseUsingPredicatesWithSourceValueSet() {
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
@ -977,4 +993,9 @@ public class FhirResourceDaoR4ConceptMapTest extends BaseJpaR4Test {
}
});
}
@AfterClass
public static void afterClassClearContext() {
TestUtil.clearAllStaticFieldsForUnitTest();
}
}

View File

@ -46,7 +46,6 @@ public class FhirResourceDaoR4SearchMissingTest extends BaseJpaR4Test {
org.setActive(true);
myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
assertThat(mySearchParamDao.findAll(), empty());
assertThat(mySearchParamPresentDao.findAll(), empty());
assertThat(myResourceIndexedSearchParamStringDao.findAll(), empty());
assertThat(myResourceIndexedSearchParamDateDao.findAll(), empty());

View File

@ -485,11 +485,18 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
Class<ResourceIndexedSearchParamNumber> type = ResourceIndexedSearchParamNumber.class;
List<ResourceIndexedSearchParamNumber> results = myEntityManager.createQuery("SELECT i FROM " + type.getSimpleName() + " i", type).getResultList();
ourLog.info(toStringMultiline(results));
assertThat(results, containsInAnyOrder(
((ResourceIndexedSearchParamNumber) (new ResourceIndexedSearchParamNumber(ImmunizationRecommendation.SP_DOSE_SEQUENCE, null).setResource(resource).setMissing(true))),
((ResourceIndexedSearchParamNumber) (new ResourceIndexedSearchParamNumber(ImmunizationRecommendation.SP_DOSE_NUMBER, new BigDecimal("1.00")).setResource(resource))),
((ResourceIndexedSearchParamNumber) (new ResourceIndexedSearchParamNumber(ImmunizationRecommendation.SP_DOSE_NUMBER, new BigDecimal("2.00")).setResource(resource)))
));
ResourceIndexedSearchParamNumber expect0 = new ResourceIndexedSearchParamNumber(ImmunizationRecommendation.SP_DOSE_NUMBER, new BigDecimal("2.00"));
expect0.setResource(resource);
expect0.calculateHashes();
ResourceIndexedSearchParamNumber expect1 = new ResourceIndexedSearchParamNumber(ImmunizationRecommendation.SP_DOSE_SEQUENCE, null);
expect1.setResource(resource).setMissing(true);
expect1.calculateHashes();
ResourceIndexedSearchParamNumber expect2 = new ResourceIndexedSearchParamNumber(ImmunizationRecommendation.SP_DOSE_NUMBER, new BigDecimal("1.00"));
expect2.setResource(resource);
expect2.calculateHashes();
assertThat(results, containsInAnyOrder(expect0, expect1, expect2));
}
});
}
@ -504,10 +511,12 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
IIdType id = mySubstanceDao.create(res, mySrd).getId().toUnqualifiedVersionless();
Class<ResourceIndexedSearchParamQuantity> type = ResourceIndexedSearchParamQuantity.class;
List<?> results = myEntityManager.createQuery("SELECT i FROM " + type.getSimpleName() + " i", type).getResultList();
ourLog.info(toStringMultiline(results));
assertEquals(2, results.size());
runInTransaction(()->{
Class<ResourceIndexedSearchParamQuantity> type = ResourceIndexedSearchParamQuantity.class;
List<?> results = myEntityManager.createQuery("SELECT i FROM " + type.getSimpleName() + " i", type).getResultList();
ourLog.info(toStringMultiline(results));
assertEquals(2, results.size());
});
List<IIdType> actual = toUnqualifiedVersionlessIds(
mySubstanceDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Substance.SP_QUANTITY, new QuantityParam(null, 123, "http://foo", "UNIT"))));
@ -2261,6 +2270,7 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
@Test
public void testSearchWithContains() {
myDaoConfig.setAllowContainsSearches(true);
Patient pt1 = new Patient();
pt1.addName().setFamily("ABCDEFGHIJK");

View File

@ -679,6 +679,8 @@ public class FhirResourceDaoR4UpdateTest extends BaseJpaR4Test {
ourLog.info("Now have {} inserts", QueryCountHolder.getGrandTotal().getInsert());
QueryCountHolder.clear();
ourLog.info("** About to update");
pt.setId(id);
pt.getNameFirstRep().addGiven("GIVEN1C");
myPatientDao.update(pt);

View File

@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.dao.r4;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.dao.ISearchParamRegistry;
import ca.uhn.fhir.jpa.entity.BaseResourceIndexedSearchParam;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamToken;
@ -80,7 +81,7 @@ public class SearchParamExtractorR4Test {
}
};
SearchParamExtractorR4 extractor = new SearchParamExtractorR4(ourCtx, ourValidationSupport, searchParamRegistry);
SearchParamExtractorR4 extractor = new SearchParamExtractorR4(new DaoConfig(), ourCtx, ourValidationSupport, searchParamRegistry);
Set<BaseResourceIndexedSearchParam> tokens = extractor.extractSearchParamTokens(new ResourceTable(), obs);
assertEquals(1, tokens.size());
ResourceIndexedSearchParamToken token = (ResourceIndexedSearchParamToken) tokens.iterator().next();

View File

@ -19,19 +19,8 @@ public class ResourceIndexedSearchParamQuantityTest {
ResourceIndexedSearchParamQuantity token = createParam("NAME", "123.001", "value", "VALUE");
// Make sure our hashing function gives consistent results
assertEquals(945335027461836896L, token.getHashUnitsAndValPrefix().longValue());
assertEquals(5549105497508660145L, token.getHashValPrefix().longValue());
}
@Test
public void testValueTrimming() {
assertEquals(7265149425397186226L, createParam("NAME", "401.001", "value", "VALUE").getHashUnitsAndValPrefix().longValue());
assertEquals(7265149425397186226L, createParam("NAME", "401.99999", "value", "VALUE").getHashUnitsAndValPrefix().longValue());
assertEquals(7265149425397186226L, createParam("NAME", "401", "value", "VALUE").getHashUnitsAndValPrefix().longValue());
// Should be different
assertEquals(-8387917096585386046L, createParam("NAME", "400.9999999", "value", "VALUE").getHashUnitsAndValPrefix().longValue());
// Should be different
assertEquals(8819656626732693650L, createParam("NAME", "402.000000", "value", "VALUE").getHashUnitsAndValPrefix().longValue());
assertEquals(834432764963581074L, token.getHashIdentity().longValue());
assertEquals(-1970227166134682431L, token.getHashIdentityAndUnits().longValue());
}

View File

@ -1,14 +1,16 @@
package ca.uhn.fhir.jpa.entity;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import org.junit.Test;
import static org.junit.Assert.*;
@SuppressWarnings("SpellCheckingInspection")
public class ResourceIndexedSearchParamStringTest {
@Test
public void testHashFunctions() {
ResourceIndexedSearchParamString token = new ResourceIndexedSearchParamString("NAME", "value", "VALUE");
ResourceIndexedSearchParamString token = new ResourceIndexedSearchParamString(new DaoConfig(), "NAME", "value", "VALUE");
token.setResource(new ResourceTable().setResourceType("Patient"));
// Make sure our hashing function gives consistent results
@ -18,7 +20,7 @@ public class ResourceIndexedSearchParamStringTest {
@Test
public void testHashFunctionsPrefixOnly() {
ResourceIndexedSearchParamString token = new ResourceIndexedSearchParamString("NAME", "vZZZZZZZZZZZZZZZZ", "VZZZZZZzzzZzzzZ");
ResourceIndexedSearchParamString token = new ResourceIndexedSearchParamString(new DaoConfig(), "NAME", "vZZZZZZZZZZZZZZZZ", "VZZZZZZzzzZzzzZ");
token.setResource(new ResourceTable().setResourceType("Patient"));
// Should be the same as in testHashFunctions()

View File

@ -3159,16 +3159,13 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
testSearchWithEmptyParameter("/Observation?code=bar&value-concept=");
}
private void testSearchWithEmptyParameter(String url) throws IOException {
HttpGet get = new HttpGet(ourServerBase + url);
CloseableHttpResponse resp = ourHttpClient.execute(get);
try {
private void testSearchWithEmptyParameter(String theUrl) throws IOException {
HttpGet get = new HttpGet(ourServerBase + theUrl);
try (CloseableHttpResponse resp = ourHttpClient.execute(get)) {
assertEquals(200, resp.getStatusLine().getStatusCode());
String respString = IOUtils.toString(resp.getEntity().getContent(), Constants.CHARSET_UTF8);
Bundle bundle = myFhirCtx.newXmlParser().parseResource(Bundle.class, respString);
assertEquals(1, bundle.getEntry().size());
} finally {
IOUtils.closeQuietly(resp.getEntity().getContent());
}
}

View File

@ -1,10 +1,15 @@
package ca.uhn.fhir.jpa.provider.dstu3;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.Matchers.greaterThan;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
import ca.uhn.fhir.jpa.term.TerminologyLoaderSvcImpl;
import ca.uhn.fhir.jpa.term.ZipCollectionBuilder;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.util.TestUtil;
import org.apache.commons.io.IOUtils;
import org.hl7.fhir.dstu3.model.*;
import org.junit.AfterClass;
import org.junit.Test;
import java.io.ByteArrayOutputStream;
import java.io.File;
@ -15,34 +20,45 @@ import java.util.List;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import org.apache.commons.io.IOUtils;
import org.hl7.fhir.dstu3.model.Attachment;
import org.hl7.fhir.dstu3.model.IntegerType;
import org.hl7.fhir.dstu3.model.Parameters;
import org.hl7.fhir.dstu3.model.StringType;
import org.hl7.fhir.dstu3.model.UriType;
import org.junit.AfterClass;
import org.junit.Test;
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.util.TestUtil;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.Matchers.greaterThan;
import static org.junit.Assert.*;
public class TerminologyUploaderProviderDstu3Test extends BaseResourceProviderDstu3Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyUploaderProviderDstu3Test.class);
private byte[] createLoincZip() throws IOException {
private static void addFile(ZipOutputStream theZos, String theFileName) throws IOException {
theZos.putNextEntry(new ZipEntry(theFileName));
theZos.write(IOUtils.toByteArray(TerminologyUploaderProviderDstu3Test.class.getResourceAsStream("/loinc/" + theFileName)));
}
public static byte[] createLoincZip() throws IOException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ZipOutputStream zos = new ZipOutputStream(bos);
zos.putNextEntry(new ZipEntry("Loinc.csv"));
zos.write(IOUtils.toByteArray(getClass().getResourceAsStream("/loinc/Loinc.csv")));
zos.putNextEntry(new ZipEntry("MultiAxialHierarchy.csv"));
zos.write(IOUtils.toByteArray(getClass().getResourceAsStream("/loinc/MultiAxialHierarchy.csv")));
addFile(zos, "loincupload.properties");
addFile(zos, TerminologyLoaderSvcImpl.LOINC_PART_FILE);
addFile(zos, TerminologyLoaderSvcImpl.LOINC_FILE);
addFile(zos, TerminologyLoaderSvcImpl.LOINC_HIERARCHY_FILE);
addFile(zos, TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_FILE);
addFile(zos, TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_LINK_FILE);
addFile(zos, TerminologyLoaderSvcImpl.LOINC_GROUP_FILE);
addFile(zos, TerminologyLoaderSvcImpl.LOINC_GROUP_TERMS_FILE);
addFile(zos, TerminologyLoaderSvcImpl.LOINC_PARENT_GROUP_FILE);
addFile(zos, TerminologyLoaderSvcImpl.LOINC_PART_LINK_FILE);
addFile(zos, TerminologyLoaderSvcImpl.LOINC_PART_RELATED_CODE_MAPPING_FILE);
addFile(zos, TerminologyLoaderSvcImpl.LOINC_DOCUMENT_ONTOLOGY_FILE);
addFile(zos, TerminologyLoaderSvcImpl.LOINC_RSNA_PLAYBOOK_FILE);
addFile(zos, TerminologyLoaderSvcImpl.LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE);
addFile(zos, TerminologyLoaderSvcImpl.LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV);
addFile(zos, TerminologyLoaderSvcImpl.LOINC_IMAGING_DOCUMENT_CODES_FILE);
addFile(zos, TerminologyLoaderSvcImpl.LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE);
addFile(zos, TerminologyLoaderSvcImpl.LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE);
zos.close();
byte[] packageBytes = bos.toByteArray();
return packageBytes;
}
@ -51,7 +67,7 @@ public class TerminologyUploaderProviderDstu3Test extends BaseResourceProviderDs
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ZipOutputStream zos = new ZipOutputStream(bos);
List<String> inputNames = Arrays.asList("sct2_Concept_Full_INT_20160131.txt","sct2_Concept_Full-en_INT_20160131.txt","sct2_Description_Full-en_INT_20160131.txt","sct2_Identifier_Full_INT_20160131.txt","sct2_Relationship_Full_INT_20160131.txt","sct2_StatedRelationship_Full_INT_20160131.txt","sct2_TextDefinition_Full-en_INT_20160131.txt");
List<String> inputNames = Arrays.asList("sct2_Concept_Full_INT_20160131.txt", "sct2_Concept_Full-en_INT_20160131.txt", "sct2_Description_Full-en_INT_20160131.txt", "sct2_Identifier_Full_INT_20160131.txt", "sct2_Relationship_Full_INT_20160131.txt", "sct2_StatedRelationship_Full_INT_20160131.txt", "sct2_TextDefinition_Full-en_INT_20160131.txt");
for (String nextName : inputNames) {
zos.putNextEntry(new ZipEntry("SnomedCT_Release_INT_20160131_Full/Terminology/" + nextName));
byte[] b = IOUtils.toByteArray(getClass().getResourceAsStream("/sct/" + nextName));
@ -95,7 +111,7 @@ public class TerminologyUploaderProviderDstu3Test extends BaseResourceProviderDs
String resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(respParam);
ourLog.info(resp);
assertThat(((IntegerType)respParam.getParameter().get(0).getValue()).getValue(), greaterThan(1));
assertThat(((IntegerType) respParam.getParameter().get(0).getValue()).getValue(), greaterThan(1));
/*
* Try uploading a second time
@ -150,7 +166,7 @@ public class TerminologyUploaderProviderDstu3Test extends BaseResourceProviderDs
}
@Test
public void testUploadPackageMissingUrl() throws Exception {
public void testUploadPackageMissingUrl() {
try {
ourClient
.operation()
@ -179,7 +195,7 @@ public class TerminologyUploaderProviderDstu3Test extends BaseResourceProviderDs
String resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(respParam);
ourLog.info(resp);
assertThat(((IntegerType)respParam.getParameter().get(0).getValue()).getValue(), greaterThan(1));
assertThat(((IntegerType) respParam.getParameter().get(0).getValue()).getValue(), greaterThan(1));
}
@Test
@ -205,7 +221,7 @@ public class TerminologyUploaderProviderDstu3Test extends BaseResourceProviderDs
String resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(respParam);
ourLog.info(resp);
assertThat(((IntegerType)respParam.getParameter().get(0).getValue()).getValue(), greaterThan(1));
assertThat(((IntegerType) respParam.getParameter().get(0).getValue()).getValue(), greaterThan(1));
}
@AfterClass

View File

@ -2,19 +2,27 @@ package ca.uhn.fhir.jpa.provider.r4;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.util.ExpungeOptions;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.util.TestUtil;
import org.hamcrest.Matchers;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.Observation;
import org.hl7.fhir.r4.model.Patient;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.not;
import static org.junit.Assert.*;
public class ExpungeR4Test extends BaseResourceProviderR4Test {
@ -58,11 +66,8 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
getDao(theId).read(theId);
}
@Override
@Before
public void before() throws Exception {
super.before();
public void createStandardPatients() {
Patient p = new Patient();
p.setId("PT-ONEVERSION");
p.getMeta().addTag().setSystem("http://foo").setCode("bar");
@ -105,7 +110,6 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
o.setStatus(Observation.ObservationStatus.FINAL);
myDeletedObservationId = myObservationDao.create(o).getId();
myDeletedObservationId = myObservationDao.delete(myDeletedObservationId).getId();
}
private IFhirResourceDao<?> getDao(IIdType theId) {
@ -126,6 +130,8 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
@Test
public void testExpungeInstanceOldVersionsAndDeleted() {
createStandardPatients();
Patient p = new Patient();
p.setId("PT-TWOVERSION");
p.getMeta().addTag().setSystem("http://foo").setCode("bar");
@ -151,8 +157,35 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
assertGone(myDeletedObservationId);
}
@Test
public void testExpungeAllVersionsDeletesRow() {
// Create then delete
Patient p = new Patient();
p.setId("TEST");
p.getMeta().addTag().setSystem("http://foo").setCode("bar");
p.setActive(true);
p.addName().setFamily("FOO");
myPatientDao.update(p).getId();
myPatientDao.delete(new IdType("Patient/TEST"));
runInTransaction(()-> assertThat(myResourceTableDao.findAll(), not(empty())));
runInTransaction(()-> assertThat(myResourceHistoryTableDao.findAll(), not(empty())));
runInTransaction(()-> assertThat(myForcedIdDao.findAll(), not(empty())));
myPatientDao.expunge(new ExpungeOptions()
.setExpungeDeletedResources(true)
.setExpungeOldVersions(true));
runInTransaction(()-> assertThat(myResourceTableDao.findAll(), empty()));
runInTransaction(()-> assertThat(myResourceHistoryTableDao.findAll(), empty()));
runInTransaction(()-> assertThat(myForcedIdDao.findAll(), empty()));
}
@Test
public void testExpungeInstanceVersionCurrentVersion() {
createStandardPatients();
try {
myPatientDao.expunge(myTwoVersionPatientId.withVersion("2"), new ExpungeOptions()
@ -166,6 +199,8 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
@Test
public void testExpungeInstanceVersionOldVersionsAndDeleted() {
createStandardPatients();
Patient p = new Patient();
p.setId("PT-TWOVERSION");
p.getMeta().addTag().setSystem("http://foo").setCode("bar");
@ -193,6 +228,8 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
@Test
public void testExpungeSystemOldVersionsAndDeleted() {
createStandardPatients();
mySystemDao.expunge(new ExpungeOptions()
.setExpungeDeletedResources(true)
.setExpungeOldVersions(true));
@ -212,6 +249,8 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
@Test
public void testExpungeTypeDeletedResources() {
createStandardPatients();
myPatientDao.expunge(new ExpungeOptions()
.setExpungeDeletedResources(true)
.setExpungeOldVersions(false));
@ -231,6 +270,8 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
@Test
public void testExpungeTypeOldVersions() {
createStandardPatients();
myPatientDao.expunge(new ExpungeOptions()
.setExpungeDeletedResources(false)
.setExpungeOldVersions(true));
@ -251,6 +292,8 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
@Test
public void testExpungeSystemEverything() {
createStandardPatients();
mySystemDao.expunge(new ExpungeOptions()
.setExpungeEverything(true));
@ -270,6 +313,8 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
@Test
public void testExpungeTypeOldVersionsAndDeleted() {
createStandardPatients();
myPatientDao.expunge(new ExpungeOptions()
.setExpungeDeletedResources(true)
.setExpungeOldVersions(true));

View File

@ -3571,14 +3571,11 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
private void testSearchWithEmptyParameter(String url) throws IOException {
HttpGet get = new HttpGet(ourServerBase + url);
CloseableHttpResponse resp = ourHttpClient.execute(get);
try {
try (CloseableHttpResponse resp = ourHttpClient.execute(get)) {
assertEquals(200, resp.getStatusLine().getStatusCode());
String respString = IOUtils.toString(resp.getEntity().getContent(), Constants.CHARSET_UTF8);
Bundle bundle = myFhirCtx.newXmlParser().parseResource(Bundle.class, respString);
assertEquals(1, bundle.getEntry().size());
} finally {
IOUtils.closeQuietly(resp.getEntity().getContent());
}
}

View File

@ -1,5 +1,6 @@
package ca.uhn.fhir.jpa.provider.r4;
import ca.uhn.fhir.jpa.provider.dstu3.TerminologyUploaderProviderDstu3Test;
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.util.TestUtil;
@ -25,19 +26,6 @@ public class TerminologyUploaderProviderR4Test extends BaseResourceProviderR4Tes
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyUploaderProviderR4Test.class);
private byte[] createLoincZip() throws IOException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ZipOutputStream zos = new ZipOutputStream(bos);
zos.putNextEntry(new ZipEntry("Loinc.csv"));
zos.write(IOUtils.toByteArray(getClass().getResourceAsStream("/loinc/Loinc.csv")));
zos.putNextEntry(new ZipEntry("MultiAxialHierarchy.csv"));
zos.write(IOUtils.toByteArray(getClass().getResourceAsStream("/loinc/MultiAxialHierarchy.csv")));
zos.close();
byte[] packageBytes = bos.toByteArray();
return packageBytes;
}
private byte[] createSctZip() throws IOException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
@ -75,7 +63,7 @@ public class TerminologyUploaderProviderR4Test extends BaseResourceProviderR4Tes
@Test
public void testUploadLoinc() throws Exception {
byte[] packageBytes = createLoincZip();
byte[] packageBytes = TerminologyUploaderProviderDstu3Test.createLoincZip();
//@formatter:off
Parameters respParam = myClient

View File

@ -0,0 +1,69 @@
package ca.uhn.fhir.jpa.stresstest;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.dao.ISearchParamRegistry;
import ca.uhn.fhir.jpa.dao.dstu3.SearchParamExtractorDstu3;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamString;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.util.StopWatch;
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
import org.hl7.fhir.dstu3.hapi.validation.CachingValidationSupport;
import org.hl7.fhir.dstu3.hapi.validation.DefaultProfileValidationSupport;
import org.hl7.fhir.dstu3.hapi.validation.ValidationSupportChain;
import org.hl7.fhir.dstu3.model.Patient;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.*;
public class IndexStressTest {
private static final Logger ourLog = LoggerFactory.getLogger(IndexStressTest.class);
@Test
public void testExtractSearchParams() {
Patient p = new Patient();
p.addName().setFamily("FOO").addGiven("BAR").addGiven("BAR");
p.getMaritalStatus().setText("DDDDD");
p.addAddress().addLine("A").addLine("B").addLine("C");
DaoConfig daoConfig = new DaoConfig();
FhirContext ctx = FhirContext.forDstu3();
IValidationSupport mockValidationSupport = mock(IValidationSupport.class);
IValidationSupport validationSupport = new CachingValidationSupport(new ValidationSupportChain(new DefaultProfileValidationSupport(), mockValidationSupport));
ISearchParamRegistry searchParamRegistry = mock(ISearchParamRegistry.class);
SearchParamExtractorDstu3 extractor = new SearchParamExtractorDstu3(daoConfig, ctx, validationSupport, searchParamRegistry);
extractor.start();
Map<String, RuntimeSearchParam> spMap = ctx
.getResourceDefinition("Patient")
.getSearchParams()
.stream()
.collect(Collectors.toMap(RuntimeSearchParam::getName, t -> t));
when(searchParamRegistry.getActiveSearchParams(eq("Patient"))).thenReturn(spMap);
ResourceTable entity = new ResourceTable();
Set<ResourceIndexedSearchParamString> params = extractor.extractSearchParamStrings(entity, p);
StopWatch sw = new StopWatch();
int loops = 100;
for (int i = 0; i < loops; i++) {
entity = new ResourceTable();
params = extractor.extractSearchParamStrings(entity, p);
}
ourLog.info("Indexed {} times in {}ms/time", loops, sw.getMillisPerOperation(loops));
assertEquals(9, params.size());
verify(mockValidationSupport, times(1)).fetchAllStructureDefinitions((any(FhirContext.class)));
}
}

View File

@ -48,7 +48,6 @@ public class StressTestDstu3Test extends BaseResourceProviderDstu3Test {
myRequestValidatingInterceptor.addValidatorModule(module);
}
@Test
public void testMultithreadedSearch() throws Exception {
Bundle input = new Bundle();

View File

@ -67,7 +67,6 @@ public class TerminologyLoaderSvcIntegrationDstu3Test extends BaseJpaDstu3Test {
public void testExpandWithPropertyCoding() throws Exception {
ZipCollectionBuilder files = new ZipCollectionBuilder();
TerminologyLoaderSvcLoincTest.addLoincMandatoryFilesToZip(files);
TerminologyLoaderSvcLoincTest.addLoincOptionalFilesToZip(files);
myLoader.loadLoinc(files.getFiles(), mySrd);
// Search by code
@ -121,7 +120,6 @@ public class TerminologyLoaderSvcIntegrationDstu3Test extends BaseJpaDstu3Test {
public void testExpandWithPropertyString() throws Exception {
ZipCollectionBuilder files = new ZipCollectionBuilder();
TerminologyLoaderSvcLoincTest.addLoincMandatoryFilesToZip(files);
TerminologyLoaderSvcLoincTest.addLoincOptionalFilesToZip(files);
myLoader.loadLoinc(files.getFiles(), mySrd);
ValueSet input = new ValueSet();
@ -144,7 +142,6 @@ public class TerminologyLoaderSvcIntegrationDstu3Test extends BaseJpaDstu3Test {
public void testLookupWithProperties() throws Exception {
ZipCollectionBuilder files = new ZipCollectionBuilder();
TerminologyLoaderSvcLoincTest.addLoincMandatoryFilesToZip(files);
TerminologyLoaderSvcLoincTest.addLoincOptionalFilesToZip(files);
myLoader.loadLoinc(files.getFiles(), mySrd);
IFhirResourceDaoCodeSystem.LookupCodeResult result = myCodeSystemDao.lookupCode(new StringType("10013-1"), new StringType(IHapiTerminologyLoaderSvc.LOINC_URI), null, mySrd);
@ -176,7 +173,6 @@ public class TerminologyLoaderSvcIntegrationDstu3Test extends BaseJpaDstu3Test {
public void testLookupWithPropertiesExplicit() throws Exception {
ZipCollectionBuilder files = new ZipCollectionBuilder();
TerminologyLoaderSvcLoincTest.addLoincMandatoryFilesToZip(files);
TerminologyLoaderSvcLoincTest.addLoincOptionalFilesToZip(files);
myLoader.loadLoinc(files.getFiles(), mySrd);
IFhirResourceDaoCodeSystem.LookupCodeResult result = myCodeSystemDao.lookupCode(new StringType("10013-1"), new StringType(IHapiTerminologyLoaderSvc.LOINC_URI), null, mySrd);

View File

@ -13,6 +13,7 @@ import org.hl7.fhir.r4.model.Enumerations;
import org.hl7.fhir.r4.model.ValueSet;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
@ -26,9 +27,10 @@ import java.util.List;
import java.util.Map;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.junit.Assert.*;
import static org.mockito.Matchers.any;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
@ -92,7 +94,6 @@ public class TerminologyLoaderSvcLoincTest {
@Test
public void testLoadLoinc() throws Exception {
addLoincMandatoryFilesToZip(myFiles);
addLoincOptionalFilesToZip(myFiles);
// Actually do the load
mySvc.loadLoinc(myFiles.getFiles(), details);
@ -221,9 +222,9 @@ public class TerminologyLoaderSvcLoincTest {
assertEquals("NM", code.getCodingProperties(propertyName).get(0).getDisplay());
// RSNA Playbook - LOINC Part -> RadLex RID Mappings
conceptMap = conceptMaps.get(LoincRsnaPlaybookHandler.RID_MAPPING_CM_ID);
assertEquals(LoincRsnaPlaybookHandler.RID_MAPPING_CM_URI, conceptMap.getUrl());
assertEquals(LoincRsnaPlaybookHandler.RID_MAPPING_CM_NAME, conceptMap.getName());
conceptMap = conceptMaps.get(LoincRsnaPlaybookHandler.RADLEX_MAPPING_CM_ID);
assertEquals(LoincRsnaPlaybookHandler.RADLEX_MAPPING_CM_URI, conceptMap.getUrl());
assertEquals(LoincRsnaPlaybookHandler.RADLEX_MAPPING_CM_NAME, conceptMap.getName());
assertEquals(1, conceptMap.getGroup().size());
group = conceptMap.getGroupFirstRep();
// all entries have the same source and target so these should be null
@ -237,19 +238,19 @@ public class TerminologyLoaderSvcLoincTest {
assertEquals(Enumerations.ConceptMapEquivalence.EQUAL, group.getElement().get(0).getTarget().get(0).getEquivalence());
// RSNA Playbook - LOINC Term -> RadLex RPID Mappings
conceptMap = conceptMaps.get(LoincRsnaPlaybookHandler.RPID_MAPPING_CM_ID);
assertEquals(LoincRsnaPlaybookHandler.RPID_MAPPING_CM_URI, conceptMap.getUrl());
assertEquals(LoincRsnaPlaybookHandler.RPID_MAPPING_CM_NAME, conceptMap.getName());
conceptMap = conceptMaps.get(LoincRsnaPlaybookHandler.RADLEX_MAPPING_CM_ID);
assertEquals(LoincRsnaPlaybookHandler.RADLEX_MAPPING_CM_URI, conceptMap.getUrl());
assertEquals(LoincRsnaPlaybookHandler.RADLEX_MAPPING_CM_NAME, conceptMap.getName());
assertEquals(1, conceptMap.getGroup().size());
group = conceptMap.getGroupFirstRep();
// all entries have the same source and target so these should be null
assertEquals(IHapiTerminologyLoaderSvc.LOINC_URI, group.getSource());
assertEquals(LoincRsnaPlaybookHandler.RPID_CS_URI, group.getTarget());
assertEquals("24531-6", group.getElement().get(0).getCode());
assertEquals("US Retroperitoneum", group.getElement().get(0).getDisplay());
assertEquals("LP199995-4", group.getElement().get(0).getCode());
assertEquals("Neck", group.getElement().get(0).getDisplay());
assertEquals(1, group.getElement().get(0).getTarget().size());
assertEquals("RPID2142", group.getElement().get(0).getTarget().get(0).getCode());
assertEquals("US Retroperitoneum", group.getElement().get(0).getTarget().get(0).getDisplay());
assertEquals("RID7488", group.getElement().get(0).getTarget().get(0).getCode());
assertEquals("neck", group.getElement().get(0).getTarget().get(0).getDisplay());
assertEquals(Enumerations.ConceptMapEquivalence.EQUAL, group.getElement().get(0).getTarget().get(0).getEquivalence());
// TOP 2000 - US
@ -286,7 +287,7 @@ public class TerminologyLoaderSvcLoincTest {
// IEEE Medical Device Codes
conceptMap = conceptMaps.get(LoincIeeeMedicalDeviceCodeHandler.LOINC_IEEE_CM_ID);
ourLog.info(FhirContext.forR4().newXmlParser().setPrettyPrint(true).encodeResourceToString(conceptMap));
ourLog.debug(FhirContext.forR4().newXmlParser().setPrettyPrint(true).encodeResourceToString(conceptMap));
assertEquals(LoincIeeeMedicalDeviceCodeHandler.LOINC_IEEE_CM_NAME, conceptMap.getName());
assertEquals(LoincIeeeMedicalDeviceCodeHandler.LOINC_IEEE_CM_URI, conceptMap.getUrl());
assertEquals(1, conceptMap.getGroup().size());
@ -308,9 +309,29 @@ public class TerminologyLoaderSvcLoincTest {
assertEquals(9, vs.getCompose().getInclude().get(0).getConcept().size());
assertEquals("11525-3", vs.getCompose().getInclude().get(0).getConcept().get(0).getCode());
assertEquals("US Pelvis Fetus for pregnancy", vs.getCompose().getInclude().get(0).getConcept().get(0).getDisplay());
// Group - Parent
vs = valueSets.get("LG100-4");
ourLog.info(FhirContext.forR4().newXmlParser().setPrettyPrint(true).encodeResourceToString(vs));
assertEquals("Chem_DrugTox_Chal_Sero_Allergy<SAME:Comp|Prop|Tm|Syst (except intravascular and urine)><ANYBldSerPlas,ANYUrineUrineSed><ROLLUP:Method>", vs.getName());
assertEquals("http://loinc.org/vs/LG100-4", vs.getUrl());
assertEquals(1, vs.getCompose().getInclude().size());
assertEquals(1, vs.getCompose().getInclude().get(0).getValueSet().size());
assertEquals("http://loinc.org/vs/LG1695-8", vs.getCompose().getInclude().get(0).getValueSet().get(0).getValueAsString());
// Group - Child
vs = valueSets.get("LG1695-8");
ourLog.info(FhirContext.forR4().newXmlParser().setPrettyPrint(true).encodeResourceToString(vs));
assertEquals("1,4-Dichlorobenzene|MCnc|Pt|ANYBldSerPl", vs.getName());
assertEquals("http://loinc.org/vs/LG1695-8", vs.getUrl());
assertEquals(1, vs.getCompose().getInclude().size());
assertEquals(2, vs.getCompose().getInclude().get(0).getConcept().size());
assertEquals("17424-3", vs.getCompose().getInclude().get(0).getConcept().get(0).getCode());
assertEquals("13006-2", vs.getCompose().getInclude().get(0).getConcept().get(1).getCode());
}
@Test
@Ignore
public void testLoadLoincMandatoryFilesOnly() throws IOException {
addLoincMandatoryFilesToZip(myFiles);
@ -334,26 +355,29 @@ public class TerminologyLoaderSvcLoincTest {
@Test
public void testLoadLoincMissingMandatoryFiles() throws IOException {
addLoincOptionalFilesToZip(myFiles);
myFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_UPLOAD_PROPERTIES_FILE);
myFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_GROUP_FILE);
// Actually do the load
try {
mySvc.loadLoinc(myFiles.getFiles(), details);
fail();
} catch (UnprocessableEntityException e) {
assertEquals("Could not find the following mandatory files in input: [Loinc.csv, MultiAxialHierarchy.csv]", e.getMessage());
assertThat(e.getMessage(), containsString("Could not find the following mandatory files in input:"));
assertThat(e.getMessage(), containsString("Loinc.csv"));
assertThat(e.getMessage(), containsString("MultiAxialHierarchy.csv"));
}
}
static void addLoincMandatoryFilesToZip(ZipCollectionBuilder theFiles) throws IOException {
theFiles.addFileZip("/loinc/", "Loinc.csv", TerminologyLoaderSvcImpl.LOINC_FILE);
theFiles.addFileZip("/loinc/", "MultiAxialHierarchy.csv", TerminologyLoaderSvcImpl.LOINC_HIERARCHY_FILE);
}
static void addLoincOptionalFilesToZip(ZipCollectionBuilder theFiles) throws IOException {
theFiles.addFileZip("/loinc/", "loincupload.properties");
theFiles.addFileZip("/loinc/", "AnswerList.csv", TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_FILE);
public static void addLoincMandatoryFilesToZip(ZipCollectionBuilder theFiles) throws IOException {
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_UPLOAD_PROPERTIES_FILE);
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_GROUP_FILE);
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_GROUP_TERMS_FILE);
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_PARENT_GROUP_FILE);
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_FILE, TerminologyLoaderSvcImpl.LOINC_FILE);
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_HIERARCHY_FILE, TerminologyLoaderSvcImpl.LOINC_HIERARCHY_FILE);
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_FILE, TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_FILE);
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_LINK_FILE, TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_LINK_FILE);
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_PART_FILE, TerminologyLoaderSvcImpl.LOINC_PART_FILE);
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_PART_LINK_FILE, TerminologyLoaderSvcImpl.LOINC_PART_LINK_FILE);
@ -363,13 +387,8 @@ public class TerminologyLoaderSvcLoincTest {
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE);
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV);
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_IMAGING_DOCUMENT_CODES_FILE);
/*
* Top 2000 files have versions in the filename so don't use the
* constant.. that way this is a better test
*/
theFiles.addFilePlain("/loinc/", "Top2000CommonLabResultsSi.csv");
theFiles.addFilePlain("/loinc/", "Top2000CommonLabResultsUs.csv");
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE);
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE);
}
@AfterClass

View File

@ -0,0 +1,47 @@
package ca.uhn.fhir.jpa.util;
import ca.uhn.fhir.util.StopWatch;
import com.google.common.base.Charsets;
import com.google.common.hash.HashFunction;
import com.google.common.hash.Hasher;
import com.google.common.hash.Hashing;
import com.helger.commons.base64.Base64;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.TimeUnit;
public class HashFunctionTester {
byte[] bytes1 = "ABCDiohseoiey oisegoyi loegsiyosg l".getBytes(Charsets.UTF_8);
byte[] bytes2 = "sepyo pyoiyi fdfff".getBytes(Charsets.UTF_8);
byte[] bytes3 = "us".getBytes(Charsets.UTF_8);
byte[] bytes4 = "f;pspus sgrygliy gfdygfio fdgxylxgfdyfgxloygfxdofgixyl yxxfly3ar3r8a3988".getBytes(Charsets.UTF_8);
@Test
public void testHashBenchmark() {
test(Hashing.murmur3_128(), "murmur3_128 ");
test(Hashing.sha256() , "sha256 ");
test(Hashing.sha384() , "sha384 ");
test(Hashing.sha512() , "sha512 ");
}
public void test(HashFunction theHashFunction, String theName) {
int loops = 10000;
StopWatch sw = new StopWatch();
String output = "";
for (int i = 0; i < loops; i++) {
Hasher hasher = theHashFunction.newHasher();
hasher.putBytes(bytes1);
hasher.putBytes(bytes2);
hasher.putBytes(bytes3);
hasher.putBytes(bytes4);
output = Base64.encodeBytes(hasher.hash().asBytes());
}
ourLog.info("{} took {}ms for {} or {}/second to generate {} chars: {}", theName, sw.getMillis(), loops, sw.getThroughput(loops, TimeUnit.SECONDS), output.length(), output);
}
private static final Logger ourLog = LoggerFactory.getLogger(HashFunctionTester.class);
}

View File

@ -0,0 +1,2 @@
"ParentGroupId","GroupId","Group","Archetype","Status","VersionFirstReleased"
"LG100-4","LG1695-8","1,4-Dichlorobenzene|MCnc|Pt|ANYBldSerPl","","Active",""
1 ParentGroupId GroupId Group Archetype Status VersionFirstReleased
2 LG100-4 LG1695-8 1,4-Dichlorobenzene|MCnc|Pt|ANYBldSerPl Active

View File

@ -0,0 +1,3 @@
"Category","GroupId","Archetype","LoincNumber","LongCommonName"
"Flowsheet","LG1695-8","","17424-3","1,4-Dichlorobenzene [Mass/volume] in Blood"
"Flowsheet","LG1695-8","","13006-2","1,4-Dichlorobenzene [Mass/volume] in Serum or Plasma"
1 Category GroupId Archetype LoincNumber LongCommonName
2 Flowsheet LG1695-8 17424-3 1,4-Dichlorobenzene [Mass/volume] in Blood
3 Flowsheet LG1695-8 13006-2 1,4-Dichlorobenzene [Mass/volume] in Serum or Plasma

View File

@ -0,0 +1,2 @@
"ParentGroupId","ParentGroup","Status"
"LG100-4","Chem_DrugTox_Chal_Sero_Allergy<SAME:Comp|Prop|Tm|Syst (except intravascular and urine)><ANYBldSerPlas,ANYUrineUrineSed><ROLLUP:Method>","ACTIVE"
1 ParentGroupId ParentGroup Status
2 LG100-4 Chem_DrugTox_Chal_Sero_Allergy<SAME:Comp|Prop|Tm|Syst (except intravascular and urine)><ANYBldSerPlas,ANYUrineUrineSed><ROLLUP:Method> ACTIVE

View File

@ -136,3 +136,28 @@ drop table cdr_audit_evt_target_user cascade constraints;
drop table cdr_xact_log_step cascade constraints;
drop table cdr_xact_log cascade constraints;
Upgrading
drop index IDX_SP_STRING;
create index IDX_SP_STRING_HASH_NRM;
create index IDX_SP_STRING_HASH_EXCT;
drop index IDX_SP_TOKEN;
drop index IDX_SP_TOKEN_UNQUAL;
create index IDX_SP_TOKEN_HASH;
create index IDX_SP_TOKEN_HASH_S;
create index IDX_SP_TOKEN_HASH_SV;
create index IDX_SP_TOKEN_HASH_V;
drop index IDX_SP_DATE;
create index IDX_SP_DATE_HASH;
drop index IDX_SP_QUANTITY;
create index IDX_SP_QUANTITY_HASH;
create index IDX_SP_QUANTITY_HASH_UN;
drop index IDX_FORCEDID_TYPE_FORCEDID;
create index IDX_FORCEDID_TYPE_FID;
drop index IDX_SP_NUMBER;
create index IDX_SP_NUMBER_HASH_VAL;
drop column SP_ID from table HFJ_RES_PARAM_PRESENT;
drop index IDX_SEARCHPARM_RESTYPE_SPNAME;
drop index IDX_RESPARMPRESENT_SPID_RESID;
drop table HFJ_SEARCH_PARM;

View File

@ -146,7 +146,6 @@
<version>1.1</version>
</dependency>
<dependency>
<groupId>xpp3</groupId>
<artifactId>xpp3</artifactId>

View File

@ -19,7 +19,6 @@ import org.hl7.fhir.dstu3.terminologies.ValueSetExpanderFactory;
import org.hl7.fhir.dstu3.terminologies.ValueSetExpanderSimple;
import org.hl7.fhir.dstu3.utils.INarrativeGenerator;
import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.exceptions.TerminologyServiceException;
import org.hl7.fhir.utilities.validation.ValidationMessage.IssueSeverity;
import java.util.*;
@ -28,311 +27,310 @@ import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
public final class HapiWorkerContext implements IWorkerContext, ValueSetExpander, ValueSetExpanderFactory {
private final FhirContext myCtx;
private Map<String, Resource> myFetchedResourceCache = new HashMap<String, Resource>();
private IValidationSupport myValidationSupport;
private ExpansionProfile myExpansionProfile;
private final FhirContext myCtx;
private Map<String, Resource> myFetchedResourceCache = new HashMap<String, Resource>();
private IValidationSupport myValidationSupport;
private ExpansionProfile myExpansionProfile;
public HapiWorkerContext(FhirContext theCtx, IValidationSupport theValidationSupport) {
Validate.notNull(theCtx, "theCtx must not be null");
Validate.notNull(theValidationSupport, "theValidationSupport must not be null");
myCtx = theCtx;
myValidationSupport = theValidationSupport;
}
public HapiWorkerContext(FhirContext theCtx, IValidationSupport theValidationSupport) {
Validate.notNull(theCtx, "theCtx must not be null");
Validate.notNull(theValidationSupport, "theValidationSupport must not be null");
myCtx = theCtx;
myValidationSupport = theValidationSupport;
}
@Override
public List<StructureDefinition> allStructures() {
return myValidationSupport.fetchAllStructureDefinitions(myCtx);
}
@Override
@CoverageIgnore
public List<MetadataResource> allConformanceResources() {
throw new UnsupportedOperationException();
}
@Override
public CodeSystem fetchCodeSystem(String theSystem) {
if (myValidationSupport == null) {
return null;
} else {
return myValidationSupport.fetchCodeSystem(myCtx, theSystem);
}
}
@Override
public List<StructureDefinition> allStructures() {
return myValidationSupport.fetchAllStructureDefinitions(myCtx);
}
@Override
public <T extends Resource> T fetchResource(Class<T> theClass, String theUri) {
if (myValidationSupport == null) {
return null;
} else {
@SuppressWarnings("unchecked")
T retVal = (T) myFetchedResourceCache.get(theUri);
if (retVal == null) {
retVal = myValidationSupport.fetchResource(myCtx, theClass, theUri);
if (retVal != null) {
myFetchedResourceCache.put(theUri, retVal);
}
}
return retVal;
}
}
@Override
public ValueSetExpansionOutcome expand(ValueSet theSource, ExpansionProfile theProfile) {
ValueSetExpansionOutcome vso;
try {
vso = getExpander().expand(theSource, theProfile);
} catch (InvalidRequestException e) {
throw e;
} catch (Exception e) {
throw new InternalErrorException(e);
}
if (vso.getError() != null) {
throw new InvalidRequestException(vso.getError());
} else {
return vso;
}
}
@Override
public List<ConceptMap> findMapsForSource(String theUrl) {
throw new UnsupportedOperationException();
}
@Override
public ValueSetExpansionOutcome expandVS(ValueSet theSource, boolean theCacheOk, boolean theHeiarchical) {
throw new UnsupportedOperationException();
}
@Override
public String getAbbreviation(String theName) {
throw new UnsupportedOperationException();
}
@Override
public ValueSetExpansionComponent expandVS(ConceptSetComponent theInc, boolean theHeiarchical) {
return myValidationSupport.expandValueSet(myCtx, theInc);
}
@Override
public ValueSetExpander getExpander() {
ValueSetExpanderSimple retVal = new ValueSetExpanderSimple(this, this);
retVal.setMaxExpansionSize(Integer.MAX_VALUE);
return retVal;
}
@Override
public CodeSystem fetchCodeSystem(String theSystem) {
if (myValidationSupport == null) {
return null;
} else {
return myValidationSupport.fetchCodeSystem(myCtx, theSystem);
}
}
@Override
public INarrativeGenerator getNarrativeGenerator(String thePrefix, String theBasePath) {
throw new UnsupportedOperationException();
}
@Override
public <T extends Resource> T fetchResource(Class<T> theClass, String theUri) {
if (myValidationSupport == null) {
return null;
} else {
@SuppressWarnings("unchecked")
T retVal = (T) myFetchedResourceCache.get(theUri);
if (retVal == null) {
retVal = myValidationSupport.fetchResource(myCtx, theClass, theUri);
if (retVal != null) {
myFetchedResourceCache.put(theUri, retVal);
}
}
return retVal;
}
}
@Override
public IParser getParser(ParserType theType) {
throw new UnsupportedOperationException();
}
@Override
public <T extends Resource> T fetchResourceWithException(Class<T> theClass_, String theUri) throws FHIRException {
T retVal = fetchResource(theClass_, theUri);
if (retVal == null) {
throw new FHIRException("Unable to fetch " + theUri);
}
return retVal;
}
@Override
public IParser getParser(String theType) {
throw new UnsupportedOperationException();
}
@Override
public List<ConceptMap> findMapsForSource(String theUrl) {
throw new UnsupportedOperationException();
}
@Override
public List<String> getResourceNames() {
List<String> result = new ArrayList<String>();
for (ResourceType next : ResourceType.values()) {
result.add(next.name());
}
Collections.sort(result);
return result;
}
@Override
public String getAbbreviation(String theName) {
throw new UnsupportedOperationException();
}
@Override
public <T extends Resource> boolean hasResource(Class<T> theClass_, String theUri) {
throw new UnsupportedOperationException();
}
@Override
public ValueSetExpander getExpander() {
ValueSetExpanderSimple retVal = new ValueSetExpanderSimple(this, this);
retVal.setMaxExpansionSize(Integer.MAX_VALUE);
return retVal;
}
@Override
public IParser newJsonParser() {
throw new UnsupportedOperationException();
}
@Override
public ExpansionProfile getExpansionProfile() {
return myExpansionProfile;
}
@Override
public void setExpansionProfile(ExpansionProfile theExpProfile) {
myExpansionProfile = theExpProfile;
}
@Override
public IParser newXmlParser() {
throw new UnsupportedOperationException();
}
@Override
public INarrativeGenerator getNarrativeGenerator(String thePrefix, String theBasePath) {
throw new UnsupportedOperationException();
}
@Override
public String oid2Uri(String theCode) {
throw new UnsupportedOperationException();
}
@Override
public IParser getParser(ParserType theType) {
throw new UnsupportedOperationException();
}
@Override
public boolean supportsSystem(String theSystem) {
if (myValidationSupport == null) {
return false;
} else {
return myValidationSupport.isCodeSystemSupported(myCtx, theSystem);
}
}
@Override
public IParser getParser(String theType) {
throw new UnsupportedOperationException();
}
@Override
public Set<String> typeTails() {
return new HashSet<String>(Arrays.asList("Integer", "UnsignedInt", "PositiveInt", "Decimal", "DateTime", "Date", "Time", "Instant", "String", "Uri", "Oid", "Uuid", "Id", "Boolean", "Code",
"Markdown", "Base64Binary", "Coding", "CodeableConcept", "Attachment", "Identifier", "Quantity", "SampledData", "Range", "Period", "Ratio", "HumanName", "Address", "ContactPoint",
"Timing", "Reference", "Annotation", "Signature", "Meta"));
}
@Override
public ValidationResult validateCode(CodeableConcept theCode, ValueSet theVs) {
for (Coding next : theCode.getCoding()) {
ValidationResult retVal = validateCode(next, theVs);
if (retVal != null && retVal.isOk()) {
return retVal;
}
}
return new ValidationResult(null, null);
}
@Override
public ValidationResult validateCode(Coding theCode, ValueSet theVs) {
String system = theCode.getSystem();
String code = theCode.getCode();
String display = theCode.getDisplay();
return validateCode(system, code, display, theVs);
}
@Override
public ValidationResult validateCode(String theSystem, String theCode, String theDisplay) {
IValidationSupport.CodeValidationResult result = myValidationSupport.validateCode(myCtx, theSystem, theCode, theDisplay);
if (result == null) {
return null;
}
return new ValidationResult(result.getSeverity(), result.getMessage(), result.asConceptDefinition());
}
@Override
public ValidationResult validateCode(String theSystem, String theCode, String theDisplay, ConceptSetComponent theVsi) {
throw new UnsupportedOperationException();
}
@Override
public ValidationResult validateCode(String theSystem, String theCode, String theDisplay, ValueSet theVs) {
if (theVs != null && isNotBlank(theCode)) {
for (ConceptSetComponent next : theVs.getCompose().getInclude()) {
if (isBlank(theSystem) || theSystem.equals(next.getSystem())) {
for (ConceptReferenceComponent nextCode : next.getConcept()) {
if (theCode.equals(nextCode.getCode())) {
CodeType code = new CodeType(theCode);
return new ValidationResult(new ConceptDefinitionComponent(code));
}
}
}
}
}
boolean caseSensitive = true;
if (isNotBlank(theSystem)) {
CodeSystem system = fetchCodeSystem(theSystem);
if (system == null) {
return new ValidationResult(IssueSeverity.INFORMATION, "Code " + theSystem + "/" + theCode + " was not validated because the code system is not present");
}
if (system.hasCaseSensitive()) {
caseSensitive = system.getCaseSensitive();
}
}
String wantCode = theCode;
if (!caseSensitive) {
wantCode = wantCode.toUpperCase();
}
ValueSetExpansionOutcome expandedValueSet = null;
/*
* The following valueset is a special case, since the BCP codesystem is very difficult to expand
*/
if (theVs != null && "http://hl7.org/fhir/ValueSet/languages".equals(theVs.getId())) {
ValueSet expansion = new ValueSet();
for (ConceptSetComponent nextInclude : theVs.getCompose().getInclude()) {
for (ConceptReferenceComponent nextConcept : nextInclude.getConcept()) {
expansion.getExpansion().addContains().setCode(nextConcept.getCode()).setDisplay(nextConcept.getDisplay());
}
}
expandedValueSet = new ValueSetExpansionOutcome(expansion);
}
if (expandedValueSet == null) {
expandedValueSet = expand(theVs, null);
}
for (ValueSetExpansionContainsComponent next : expandedValueSet.getValueset().getExpansion().getContains()) {
String nextCode = next.getCode();
if (!caseSensitive) {
nextCode = nextCode.toUpperCase();
}
if (nextCode.equals(wantCode)) {
if (theSystem == null || next.getSystem().equals(theSystem)) {
ConceptDefinitionComponent definition = new ConceptDefinitionComponent();
definition.setCode(next.getCode());
definition.setDisplay(next.getDisplay());
ValidationResult retVal = new ValidationResult(definition);
return retVal;
}
}
}
return new ValidationResult(IssueSeverity.ERROR, "Unknown code[" + theCode + "] in system[" + theSystem + "]");
}
@Override
@CoverageIgnore
public List<MetadataResource> allConformanceResources() {
throw new UnsupportedOperationException();
}
@Override
@CoverageIgnore
public boolean hasCache() {
throw new UnsupportedOperationException();
}
@Override
public ValueSetExpansionOutcome expand(ValueSet theSource, ExpansionProfile theProfile) {
ValueSetExpansionOutcome vso;
try {
vso = getExpander().expand(theSource, theProfile);
} catch (InvalidRequestException e) {
throw e;
} catch (Exception e) {
throw new InternalErrorException(e);
}
if (vso.getError() != null) {
throw new InvalidRequestException(vso.getError());
} else {
return vso;
}
}
@Override
public ExpansionProfile getExpansionProfile() {
return myExpansionProfile;
}
@Override
public void setExpansionProfile(ExpansionProfile theExpProfile) {
myExpansionProfile = theExpProfile;
}
@Override
public ValueSetExpansionOutcome expandVS(ValueSet theSource, boolean theCacheOk, boolean theHeiarchical) {
throw new UnsupportedOperationException();
}
@Override
public ValueSetExpansionComponent expandVS(ConceptSetComponent theInc, boolean theHeiarchical) throws TerminologyServiceException {
return myValidationSupport.expandValueSet(myCtx, theInc);
}
@Override
public void setLogger(ILoggingService theLogger) {
throw new UnsupportedOperationException();
}
@Override
public String getVersion() {
return myCtx.getVersion().getVersion().getFhirVersionString();
}
@Override
public boolean isNoTerminologyServer() {
return false;
}
@Override
public <T extends Resource> T fetchResourceWithException(Class<T> theClass_, String theUri) throws FHIRException {
T retVal = fetchResource(theClass_, theUri);
if (retVal == null) {
throw new FHIRException("Unable to fetch " + theUri);
}
return retVal;
}
@Override
public List<String> getResourceNames() {
List<String> result = new ArrayList<String>();
for (ResourceType next : ResourceType.values()) {
result.add(next.name());
}
Collections.sort(result);
return result;
}
@Override
public List<String> getTypeNames() {
throw new UnsupportedOperationException();
}
@Override
public String getVersion() {
return myCtx.getVersion().getVersion().getFhirVersionString();
}
@Override
@CoverageIgnore
public boolean hasCache() {
throw new UnsupportedOperationException();
}
@Override
public <T extends Resource> boolean hasResource(Class<T> theClass_, String theUri) {
throw new UnsupportedOperationException();
}
@Override
public boolean isNoTerminologyServer() {
return false;
}
@Override
public IParser newJsonParser() {
throw new UnsupportedOperationException();
}
@Override
public IParser newXmlParser() {
throw new UnsupportedOperationException();
}
@Override
public String oid2Uri(String theCode) {
throw new UnsupportedOperationException();
}
@Override
public void setLogger(ILoggingService theLogger) {
throw new UnsupportedOperationException();
}
@Override
public boolean supportsSystem(String theSystem) {
if (myValidationSupport == null) {
return false;
} else {
return myValidationSupport.isCodeSystemSupported(myCtx, theSystem);
}
}
@Override
public Set<String> typeTails() {
return new HashSet<String>(Arrays.asList("Integer", "UnsignedInt", "PositiveInt", "Decimal", "DateTime", "Date", "Time", "Instant", "String", "Uri", "Oid", "Uuid", "Id", "Boolean", "Code",
"Markdown", "Base64Binary", "Coding", "CodeableConcept", "Attachment", "Identifier", "Quantity", "SampledData", "Range", "Period", "Ratio", "HumanName", "Address", "ContactPoint",
"Timing", "Reference", "Annotation", "Signature", "Meta"));
}
@Override
public ValidationResult validateCode(CodeableConcept theCode, ValueSet theVs) {
for (Coding next : theCode.getCoding()) {
ValidationResult retVal = validateCode(next, theVs);
if (retVal != null && retVal.isOk()) {
return retVal;
}
}
return new ValidationResult(null, null);
}
@Override
public ValidationResult validateCode(Coding theCode, ValueSet theVs) {
String system = theCode.getSystem();
String code = theCode.getCode();
String display = theCode.getDisplay();
return validateCode(system, code, display, theVs);
}
@Override
public ValidationResult validateCode(String theSystem, String theCode, String theDisplay) {
IValidationSupport.CodeValidationResult result = myValidationSupport.validateCode(myCtx, theSystem, theCode, theDisplay);
if (result == null) {
return null;
}
return new ValidationResult(result.getSeverity(), result.getMessage(), result.asConceptDefinition());
}
@Override
public ValidationResult validateCode(String theSystem, String theCode, String theDisplay, ConceptSetComponent theVsi) {
throw new UnsupportedOperationException();
}
@Override
public ValidationResult validateCode(String theSystem, String theCode, String theDisplay, ValueSet theVs) {
if (theVs != null && isNotBlank(theCode)) {
for (ConceptSetComponent next : theVs.getCompose().getInclude()) {
if (isBlank(theSystem) || theSystem.equals(next.getSystem())) {
for (ConceptReferenceComponent nextCode : next.getConcept()) {
if (theCode.equals(nextCode.getCode())) {
CodeType code = new CodeType(theCode);
return new ValidationResult(new ConceptDefinitionComponent(code));
}
}
}
}
}
boolean caseSensitive = true;
if (isNotBlank(theSystem)) {
CodeSystem system = fetchCodeSystem(theSystem);
if (system == null) {
return new ValidationResult(IssueSeverity.INFORMATION, "Code " + theSystem + "/" + theCode + " was not validated because the code system is not present");
}
if (system.hasCaseSensitive()) {
caseSensitive = system.getCaseSensitive();
}
}
String wantCode = theCode;
if (!caseSensitive) {
wantCode = wantCode.toUpperCase();
}
ValueSetExpansionOutcome expandedValueSet = null;
/*
* The following valueset is a special case, since the BCP codesystem is very difficult to expand
*/
if (theVs != null && "http://hl7.org/fhir/ValueSet/languages".equals(theVs.getId())) {
ValueSet expansion = new ValueSet();
for (ConceptSetComponent nextInclude : theVs.getCompose().getInclude()) {
for (ConceptReferenceComponent nextConcept : nextInclude.getConcept()) {
expansion.getExpansion().addContains().setCode(nextConcept.getCode()).setDisplay(nextConcept.getDisplay());
}
}
expandedValueSet = new ValueSetExpansionOutcome(expansion);
}
if (expandedValueSet == null) {
expandedValueSet = expand(theVs, null);
}
for (ValueSetExpansionContainsComponent next : expandedValueSet.getValueset().getExpansion().getContains()) {
String nextCode = next.getCode();
if (!caseSensitive) {
nextCode = nextCode.toUpperCase();
}
if (nextCode.equals(wantCode)) {
if (theSystem == null || next.getSystem().equals(theSystem)) {
ConceptDefinitionComponent definition = new ConceptDefinitionComponent();
definition.setCode(next.getCode());
definition.setDisplay(next.getDisplay());
ValidationResult retVal = new ValidationResult(definition);
return retVal;
}
}
}
return new ValidationResult(IssueSeverity.ERROR, "Unknown code[" + theCode + "] in system[" + theSystem + "]");
}
}

View File

@ -0,0 +1,67 @@
package org.hl7.fhir.dstu3.hapi.validation;
import ca.uhn.fhir.context.FhirContext;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
import org.hl7.fhir.dstu3.model.CodeSystem;
import org.hl7.fhir.dstu3.model.StructureDefinition;
import org.hl7.fhir.dstu3.model.ValueSet;
import org.hl7.fhir.instance.model.api.IBaseResource;
import java.util.List;
import java.util.concurrent.TimeUnit;
@SuppressWarnings("unchecked")
public class CachingValidationSupport implements IValidationSupport {
private final IValidationSupport myWrap;
private final Cache<String, Object> myCache;
public CachingValidationSupport(IValidationSupport theWrap) {
myWrap = theWrap;
myCache = Caffeine.newBuilder().expireAfterWrite(60, TimeUnit.SECONDS).build();
}
@Override
public ValueSet.ValueSetExpansionComponent expandValueSet(FhirContext theContext, ValueSet.ConceptSetComponent theInclude) {
return myWrap.expandValueSet(theContext, theInclude);
}
@Override
public List<IBaseResource> fetchAllConformanceResources(FhirContext theContext) {
return (List<IBaseResource>) myCache.get("fetchAllConformanceResources",
t -> myWrap.fetchAllConformanceResources(theContext));
}
@Override
public List<StructureDefinition> fetchAllStructureDefinitions(FhirContext theContext) {
return (List<StructureDefinition>) myCache.get("fetchAllStructureDefinitions",
t -> myWrap.fetchAllStructureDefinitions(theContext));
}
@Override
public CodeSystem fetchCodeSystem(FhirContext theContext, String theSystem) {
return myWrap.fetchCodeSystem(theContext, theSystem);
}
@Override
public <T extends IBaseResource> T fetchResource(FhirContext theContext, Class<T> theClass, String theUri) {
return myWrap.fetchResource(theContext, theClass, theUri);
}
@Override
public StructureDefinition fetchStructureDefinition(FhirContext theCtx, String theUrl) {
return myWrap.fetchStructureDefinition(theCtx, theUrl);
}
@Override
public boolean isCodeSystemSupported(FhirContext theContext, String theSystem) {
return myWrap.isCodeSystemSupported(theContext, theSystem);
}
@Override
public CodeValidationResult validateCode(FhirContext theContext, String theCodeSystem, String theCode, String theDisplay) {
return myWrap.validateCode(theContext, theCodeSystem, theCode, theDisplay);
}
}

View File

@ -0,0 +1,56 @@
package org.hl7.fhir.instance.hapi.validation;
import ca.uhn.fhir.context.FhirContext;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import org.hl7.fhir.instance.model.StructureDefinition;
import org.hl7.fhir.instance.model.ValueSet;
import org.hl7.fhir.instance.model.api.IBaseResource;
import java.util.List;
import java.util.concurrent.TimeUnit;
@SuppressWarnings("unchecked")
public class CachingValidationSupport implements IValidationSupport {
private final IValidationSupport myWrap;
private final Cache<String, Object> myCache;
public CachingValidationSupport(IValidationSupport theWrap) {
myWrap = theWrap;
myCache = Caffeine.newBuilder().expireAfterWrite(60, TimeUnit.SECONDS).build();
}
@Override
public List<StructureDefinition> allStructures() {
return (List<StructureDefinition>) myCache.get("fetchAllStructureDefinitions",
t -> myWrap.allStructures());
}
@Override
public ValueSet.ValueSetExpansionComponent expandValueSet(FhirContext theContext, ValueSet.ConceptSetComponent theInclude) {
return myWrap.expandValueSet(theContext, theInclude);
}
@Override
public ValueSet fetchCodeSystem(FhirContext theContext, String theSystem) {
return myWrap.fetchCodeSystem(theContext, theSystem);
}
@Override
public <T extends IBaseResource> T fetchResource(FhirContext theContext, Class<T> theClass, String theUri) {
return myWrap.fetchResource(theContext, theClass, theUri);
}
@Override
public boolean isCodeSystemSupported(FhirContext theContext, String theSystem) {
return myWrap.isCodeSystemSupported(theContext, theSystem);
}
@Override
public CodeValidationResult validateCode(FhirContext theContext, String theCodeSystem, String theCode, String theDisplay) {
return myWrap.validateCode(theContext, theCodeSystem, theCode, theDisplay);
}
}

View File

@ -0,0 +1,67 @@
package org.hl7.fhir.r4.hapi.validation;
import ca.uhn.fhir.context.FhirContext;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.StructureDefinition;
import org.hl7.fhir.r4.model.ValueSet;
import org.hl7.fhir.instance.model.api.IBaseResource;
import java.util.List;
import java.util.concurrent.TimeUnit;
@SuppressWarnings("unchecked")
public class CachingValidationSupport implements IValidationSupport {
private final IValidationSupport myWrap;
private final Cache<String, Object> myCache;
public CachingValidationSupport(IValidationSupport theWrap) {
myWrap = theWrap;
myCache = Caffeine.newBuilder().expireAfterWrite(60, TimeUnit.SECONDS).build();
}
@Override
public ValueSet.ValueSetExpansionComponent expandValueSet(FhirContext theContext, ValueSet.ConceptSetComponent theInclude) {
return myWrap.expandValueSet(theContext, theInclude);
}
@Override
public List<IBaseResource> fetchAllConformanceResources(FhirContext theContext) {
return (List<IBaseResource>) myCache.get("fetchAllConformanceResources",
t -> myWrap.fetchAllConformanceResources(theContext));
}
@Override
public List<StructureDefinition> fetchAllStructureDefinitions(FhirContext theContext) {
return (List<StructureDefinition>) myCache.get("fetchAllStructureDefinitions",
t -> myWrap.fetchAllStructureDefinitions(theContext));
}
@Override
public CodeSystem fetchCodeSystem(FhirContext theContext, String theSystem) {
return myWrap.fetchCodeSystem(theContext, theSystem);
}
@Override
public <T extends IBaseResource> T fetchResource(FhirContext theContext, Class<T> theClass, String theUri) {
return myWrap.fetchResource(theContext, theClass, theUri);
}
@Override
public StructureDefinition fetchStructureDefinition(FhirContext theCtx, String theUrl) {
return myWrap.fetchStructureDefinition(theCtx, theUrl);
}
@Override
public boolean isCodeSystemSupported(FhirContext theContext, String theSystem) {
return myWrap.isCodeSystemSupported(theContext, theSystem);
}
@Override
public CodeValidationResult validateCode(FhirContext theContext, String theCodeSystem, String theCode, String theDisplay) {
return myWrap.validateCode(theContext, theCodeSystem, theCode, theDisplay);
}
}

View File

@ -1,8 +1,9 @@
package org.hl7.fhir.r4.hapi.ctx;
package org.hl7.fhir.r4.hapi.validation;
import ca.uhn.fhir.context.FhirContext;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.MetadataResource;
import org.hl7.fhir.r4.model.StructureDefinition;

View File

@ -1,7 +1,8 @@
package org.hl7.fhir.r4.hapi.ctx;
package org.hl7.fhir.r4.hapi.validation;
import ca.uhn.fhir.context.FhirContext;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.StructureDefinition;
import org.hl7.fhir.r4.model.ValueSet.ConceptSetComponent;

View File

@ -77,7 +77,7 @@ public class FhirInstanceValidatorDstu3Test {
myVal.setValidateAgainstStandardSchematron(false);
myMockSupport = mock(IValidationSupport.class);
ValidationSupportChain validationSupport = new ValidationSupportChain(myMockSupport, myDefaultValidationSupport);
CachingValidationSupport validationSupport = new CachingValidationSupport(new ValidationSupportChain(myMockSupport, myDefaultValidationSupport));
myInstanceVal = new FhirInstanceValidator(validationSupport);
myVal.registerValidatorModule(myInstanceVal);

View File

@ -17,7 +17,10 @@ import org.hl7.fhir.r4.conformance.ProfileUtilities;
import org.hl7.fhir.r4.context.IWorkerContext;
import org.hl7.fhir.r4.hapi.ctx.*;
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport.CodeValidationResult;
import org.hl7.fhir.r4.hapi.validation.CachingValidationSupport;
import org.hl7.fhir.r4.hapi.validation.FhirInstanceValidator;
import org.hl7.fhir.r4.hapi.validation.PrePopulatedValidationSupport;
import org.hl7.fhir.r4.hapi.validation.ValidationSupportChain;
import org.hl7.fhir.r4.model.*;
import org.hl7.fhir.r4.model.Bundle.BundleEntryComponent;
import org.hl7.fhir.r4.model.CodeSystem.ConceptDefinitionComponent;
@ -94,7 +97,7 @@ public class FhirInstanceValidatorR4Test {
myVal.setValidateAgainstStandardSchematron(false);
myMockSupport = mock(IValidationSupport.class);
ValidationSupportChain validationSupport = new ValidationSupportChain(myMockSupport, myDefaultValidationSupport);
CachingValidationSupport validationSupport = new CachingValidationSupport(new ValidationSupportChain(myMockSupport, myDefaultValidationSupport));
myInstanceVal = new FhirInstanceValidator(validationSupport);
myVal.registerValidatorModule(myInstanceVal);
@ -539,7 +542,7 @@ public class FhirInstanceValidatorR4Test {
public void testValidateProfileWithExtension() throws IOException, FHIRException {
PrePopulatedValidationSupport valSupport = new PrePopulatedValidationSupport();
DefaultProfileValidationSupport defaultSupport = new DefaultProfileValidationSupport();
ValidationSupportChain support = new ValidationSupportChain(valSupport, defaultSupport);
CachingValidationSupport support = new CachingValidationSupport(new ValidationSupportChain(valSupport, defaultSupport));
// Prepopulate SDs
valSupport.addStructureDefinition(loadStructureDefinition(defaultSupport, "/dstu3/myconsent-profile.xml"));

View File

@ -7,13 +7,12 @@ import ca.uhn.fhir.validation.FhirValidator;
import ca.uhn.fhir.validation.ResultSeverityEnum;
import ca.uhn.fhir.validation.SingleValidationMessage;
import ca.uhn.fhir.validation.ValidationResult;
import org.apache.commons.io.IOUtils;
import org.hl7.fhir.r4.context.IWorkerContext;
import org.hl7.fhir.r4.hapi.ctx.DefaultProfileValidationSupport;
import org.hl7.fhir.r4.hapi.ctx.HapiWorkerContext;
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport.CodeValidationResult;
import org.hl7.fhir.r4.hapi.ctx.ValidationSupportChain;
import org.hl7.fhir.r4.hapi.validation.ValidationSupportChain;
import org.hl7.fhir.r4.hapi.validation.FhirInstanceValidator;
import org.hl7.fhir.r4.model.*;
import org.hl7.fhir.r4.model.CodeSystem.CodeSystemContentMode;
@ -23,7 +22,6 @@ import org.hl7.fhir.r4.model.Questionnaire.QuestionnaireItemOptionComponent;
import org.hl7.fhir.r4.model.Questionnaire.QuestionnaireItemType;
import org.hl7.fhir.r4.model.QuestionnaireResponse.QuestionnaireResponseItemComponent;
import org.hl7.fhir.r4.model.QuestionnaireResponse.QuestionnaireResponseStatus;
import org.hl7.fhir.r4.model.IdType;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Test;

View File

@ -89,6 +89,25 @@
client-assigned reosurce ID). Thanks to Frank Tao for the pull
request!
</action>
<action type="add">
A new IValidationSupport implementation has been added, named CachingValidationSupport. This
module wraps another implementation and provides short-term caching. This can have a dramatic
performance improvement on servers that are validating or executing FHIRPath repeatedly
under load. This module is used by default in the JPA server.
</action>
<action type="fix">
An index in the JPA server on the HFJ_FORCED_ID table was incorrectly
not marked as unique. This meant that under heavy load it was possible to
create two resources with the same client-assigned ID.
</action>
<action type="fix">
The JPA server
<![CDATA[<code>$expunge</code>]]>
operation deleted components of an individual resource record in
separate database transactions, meaning that if an operation failed
unexpectedly resources could be left in a weird state. This has been
corrected.
</action>
<action type="fix" issue="1015">
A bug was fixed in the JPA terminology uploader, where it was possible
in some cases for some ValueSets and ConceptMaps to not be saved because
@ -111,6 +130,10 @@
calls required when reading resources back. This causes an improvement in performance.
Thanks to Frank Tao for the pull request!
</action>
<action type="fix">
A crash was fixed when deleting a ConceptMap resource in the
JPA server. This crash was a regression in HAPI FHIR 3.4.0.
</action>
</release>
<release version="3.4.0" date="2018-05-28">
<action type="add">