Add support for of-type token modifier (#3309)
* Add support for of-type token modifier * Add changelog * Clean up hash calculation logic * Test fix * Rework hash generation * Work on hash generation * Test fix * Avoid redundant hash calculations * Revert some useless changes * Cleanup * Test fixes
This commit is contained in:
parent
48caff30d9
commit
3fa76a9027
|
@ -55,7 +55,7 @@ public class ParserOptions {
|
|||
* serialized.
|
||||
* </p>
|
||||
*
|
||||
* @since 6.0.0
|
||||
* @since 5.7.0
|
||||
*/
|
||||
public boolean isAutoContainReferenceTargetsWithNoId() {
|
||||
return myAutoContainReferenceTargetsWithNoId;
|
||||
|
@ -72,7 +72,7 @@ public class ParserOptions {
|
|||
* serialized.
|
||||
* </p>
|
||||
*
|
||||
* @since 6.0.0
|
||||
* @since 5.7.0
|
||||
*/
|
||||
public void setAutoContainReferenceTargetsWithNoId(boolean theAllowAutoContainedReferences) {
|
||||
myAutoContainReferenceTargetsWithNoId = theAllowAutoContainedReferences;
|
||||
|
|
|
@ -217,6 +217,7 @@ public class Constants {
|
|||
public static final String PARAMQUALIFIER_STRING_EXACT = ":exact";
|
||||
public static final String PARAMQUALIFIER_TOKEN_TEXT = ":text";
|
||||
public static final String PARAMQUALIFIER_MDM = ":mdm";
|
||||
public static final String PARAMQUALIFIER_TOKEN_OF_TYPE = ":of-type";
|
||||
public static final int STATUS_HTTP_200_OK = 200;
|
||||
public static final int STATUS_HTTP_201_CREATED = 201;
|
||||
public static final int STATUS_HTTP_204_NO_CONTENT = 204;
|
||||
|
|
|
@ -62,7 +62,7 @@ public enum TokenParamModifier {
|
|||
/**
|
||||
* :of-type
|
||||
*/
|
||||
OF_TYPE(":of-type");
|
||||
OF_TYPE(Constants.PARAMQUALIFIER_TOKEN_OF_TYPE);
|
||||
|
||||
private static final Map<String, TokenParamModifier> VALUE_TO_ENUM;
|
||||
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: add
|
||||
issue: 1892
|
||||
title: "In the JPA server, the token `:of-type` modifier is now supported. This is an optional feature and
|
||||
must be explicitly enabled (default is disabled)."
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: perf
|
||||
issue: 1892
|
||||
title: "A redundant set of hash calculations in the JPA server was eliminated."
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: remove
|
||||
issue: 1892
|
||||
title: "In the JPA server, searching using the token `:text` modifier will no longer index and match
|
||||
values in `Identifier.type.text`. Previously we indexed this element, but the FHIR specification does not
|
||||
actually show this field as being indexed for this modifier, and intuitively it doesn't make sense to
|
||||
do so."
|
|
@ -11,6 +11,7 @@ import ca.uhn.fhir.context.RuntimeSearchParam;
|
|||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IDao;
|
||||
|
@ -134,7 +135,6 @@ import javax.persistence.criteria.CriteriaQuery;
|
|||
import javax.persistence.criteria.Root;
|
||||
import javax.xml.stream.events.Characters;
|
||||
import javax.xml.stream.events.XMLEvent;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
|
@ -1229,7 +1229,16 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
if (thePerformIndexing || ((ResourceTable) theEntity).getVersion() == 1) {
|
||||
|
||||
newParams = new ResourceIndexedSearchParams();
|
||||
mySearchParamWithInlineReferencesExtractor.populateFromResource(newParams, theTransactionDetails, entity, theResource, existingParams, theRequest, thePerformIndexing);
|
||||
|
||||
RequestPartitionId requestPartitionId;
|
||||
if (!myPartitionSettings.isPartitioningEnabled()) {
|
||||
requestPartitionId = RequestPartitionId.allPartitions();
|
||||
} else if (entity.getPartitionId() != null) {
|
||||
requestPartitionId = entity.getPartitionId().toPartitionId();
|
||||
} else {
|
||||
requestPartitionId = RequestPartitionId.defaultPartition();
|
||||
}
|
||||
mySearchParamWithInlineReferencesExtractor.populateFromResource(requestPartitionId, newParams, theTransactionDetails, entity, theResource, existingParams, theRequest, thePerformIndexing);
|
||||
|
||||
changed = populateResourceIntoEntity(theTransactionDetails, theRequest, theResource, entity, true);
|
||||
|
||||
|
@ -1656,20 +1665,23 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
myConfig = theDaoConfig;
|
||||
}
|
||||
|
||||
private class AddTagDefinitionToCacheAfterCommitSynchronization implements TransactionSynchronization {
|
||||
|
||||
private final TagDefinition myTagDefinition;
|
||||
private final MemoryCacheService.TagDefinitionCacheKey myKey;
|
||||
|
||||
public AddTagDefinitionToCacheAfterCommitSynchronization(MemoryCacheService.TagDefinitionCacheKey theKey, TagDefinition theTagDefinition) {
|
||||
myTagDefinition = theTagDefinition;
|
||||
myKey = theKey;
|
||||
public void populateFullTextFields(final FhirContext theContext, final IBaseResource theResource, ResourceTable theEntity, ResourceIndexedSearchParams theNewParams) {
|
||||
if (theEntity.getDeleted() != null) {
|
||||
theEntity.setNarrativeText(null);
|
||||
theEntity.setContentText(null);
|
||||
} else {
|
||||
theEntity.setNarrativeText(parseNarrativeTextIntoWords(theResource));
|
||||
theEntity.setContentText(parseContentTextIntoWords(theContext, theResource));
|
||||
if (myDaoConfig.isAdvancedLuceneIndexing()) {
|
||||
ExtendedLuceneIndexData luceneIndexData = myFulltextSearchSvc.extractLuceneIndexData(theResource, theNewParams);
|
||||
theEntity.setLuceneIndexData(luceneIndexData);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterCommit() {
|
||||
myMemoryCacheService.put(MemoryCacheService.CacheEnum.TAG_DEFINITION, myKey, myTagDefinition);
|
||||
}
|
||||
@VisibleForTesting
|
||||
public void setPartitionSettingsForUnitTest(PartitionSettings thePartitionSettings) {
|
||||
myPartitionSettings = thePartitionSettings;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
|
@ -1706,20 +1718,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
return retVal.toString();
|
||||
}
|
||||
|
||||
public void populateFullTextFields(final FhirContext theContext, final IBaseResource theResource, ResourceTable theEntity, ResourceIndexedSearchParams theNewParams) {
|
||||
if (theEntity.getDeleted() != null) {
|
||||
theEntity.setNarrativeText(null);
|
||||
theEntity.setContentText(null);
|
||||
} else {
|
||||
theEntity.setNarrativeText(parseNarrativeTextIntoWords(theResource));
|
||||
theEntity.setContentText(parseContentTextIntoWords(theContext, theResource));
|
||||
if (myDaoConfig.isAdvancedLuceneIndexing()) {
|
||||
ExtendedLuceneIndexData luceneIndexData = myFulltextSearchSvc.extractLuceneIndexData(theResource, theNewParams);
|
||||
theEntity.setLuceneIndexData(luceneIndexData);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static String decodeResource(byte[] theResourceBytes, ResourceEncodingEnum theResourceEncoding) {
|
||||
String resourceText = null;
|
||||
switch (theResourceEncoding) {
|
||||
|
@ -1804,4 +1802,20 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
}
|
||||
}
|
||||
|
||||
private class AddTagDefinitionToCacheAfterCommitSynchronization implements TransactionSynchronization {
|
||||
|
||||
private final TagDefinition myTagDefinition;
|
||||
private final MemoryCacheService.TagDefinitionCacheKey myKey;
|
||||
|
||||
public AddTagDefinitionToCacheAfterCommitSynchronization(MemoryCacheService.TagDefinitionCacheKey theKey, TagDefinition theTagDefinition) {
|
||||
myTagDefinition = theTagDefinition;
|
||||
myKey = theKey;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterCommit() {
|
||||
myMemoryCacheService.put(MemoryCacheService.CacheEnum.TAG_DEFINITION, myKey, myTagDefinition);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -121,7 +121,6 @@ public class DaoSearchParamSynchronizer {
|
|||
// Take a row we were going to remove, and repurpose its ID
|
||||
T entityToReuse = theIndexesToRemove.remove(theIndexesToRemove.size() - 1);
|
||||
entityToReuse.copyMutableValuesFrom(targetEntity);
|
||||
entityToReuse.calculateHashes();
|
||||
theIndexesToAdd.set(addIndex, entityToReuse);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -131,6 +131,7 @@ public class IdHelperService {
|
|||
@Nonnull
|
||||
public IResourceLookup resolveResourceIdentity(@Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theResourceId) throws ResourceNotFoundException {
|
||||
assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive();
|
||||
assert theRequestPartitionId != null;
|
||||
|
||||
IdDt id = new IdDt(theResourceType, theResourceId);
|
||||
Map<String, List<IResourceLookup>> matches = translateForcedIdToPids(theRequestPartitionId,
|
||||
|
@ -377,6 +378,8 @@ public class IdHelperService {
|
|||
}
|
||||
|
||||
private Map<String, List<IResourceLookup>> translateForcedIdToPids(@Nonnull RequestPartitionId theRequestPartitionId, Collection<IIdType> theId) {
|
||||
assert theRequestPartitionId != null;
|
||||
|
||||
theId.forEach(id -> Validate.isTrue(id.hasIdPart()));
|
||||
|
||||
if (theId.isEmpty()) {
|
||||
|
|
|
@ -31,7 +31,6 @@ import ca.uhn.fhir.jpa.dao.MatchResourceUrlService;
|
|||
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedComboStringUniqueDao;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
|
||||
import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedComboStringUnique;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedComboTokenNonUnique;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
|
||||
|
@ -56,12 +55,12 @@ import com.google.common.annotations.VisibleForTesting;
|
|||
import org.hl7.fhir.instance.model.api.IBaseReference;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import javax.annotation.Nullable;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Lazy;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.PersistenceContext;
|
||||
import javax.persistence.PersistenceContextType;
|
||||
|
@ -104,6 +103,8 @@ public class SearchParamWithInlineReferencesExtractor {
|
|||
private IResourceIndexedComboStringUniqueDao myResourceIndexedCompositeStringUniqueDao;
|
||||
@Autowired
|
||||
private PartitionSettings myPartitionSettings;
|
||||
@Autowired
|
||||
private MemoryCacheService myMemoryCacheService;
|
||||
|
||||
@VisibleForTesting
|
||||
public void setPartitionSettings(PartitionSettings thePartitionSettings) {
|
||||
|
@ -120,17 +121,10 @@ public class SearchParamWithInlineReferencesExtractor {
|
|||
mySearchParamRegistry = theSearchParamRegistry;
|
||||
}
|
||||
|
||||
public void populateFromResource(ResourceIndexedSearchParams theParams, TransactionDetails theTransactionDetails, ResourceTable theEntity, IBaseResource theResource, ResourceIndexedSearchParams theExistingParams, RequestDetails theRequest, boolean theFailOnInvalidReference) {
|
||||
public void populateFromResource(RequestPartitionId theRequestPartitionId, ResourceIndexedSearchParams theParams, TransactionDetails theTransactionDetails, ResourceTable theEntity, IBaseResource theResource, ResourceIndexedSearchParams theExistingParams, RequestDetails theRequest, boolean theFailOnInvalidReference) {
|
||||
extractInlineReferences(theResource, theTransactionDetails, theRequest);
|
||||
|
||||
RequestPartitionId partitionId;
|
||||
if (myPartitionSettings.isPartitioningEnabled()) {
|
||||
partitionId = PartitionablePartitionId.toRequestPartitionId(theEntity.getPartitionId());
|
||||
} else {
|
||||
partitionId = RequestPartitionId.allPartitions();
|
||||
}
|
||||
|
||||
mySearchParamExtractorService.extractFromResource(partitionId, theRequest, theParams, theEntity, theResource, theTransactionDetails, theFailOnInvalidReference);
|
||||
mySearchParamExtractorService.extractFromResource(theRequestPartitionId, theRequest, theParams, theEntity, theResource, theTransactionDetails, theFailOnInvalidReference);
|
||||
|
||||
Set<Map.Entry<String, RuntimeSearchParam>> activeSearchParams = mySearchParamRegistry.getActiveSearchParams(theEntity.getResourceType()).entrySet();
|
||||
if (myDaoConfig.getIndexMissingFields() == DaoConfig.IndexEnabledEnum.ENABLED) {
|
||||
|
@ -183,8 +177,8 @@ public class SearchParamWithInlineReferencesExtractor {
|
|||
Set<String> queryStringsToPopulate = extractParameterCombinationsForComboParam(theParams, theResourceType, theParam);
|
||||
|
||||
for (String nextQueryString : queryStringsToPopulate) {
|
||||
ourLog.trace("Adding composite unique SP: {}", nextQueryString);
|
||||
theParams.myComboStringUniques.add(new ResourceIndexedComboStringUnique(theEntity, nextQueryString, theParam.getId()));
|
||||
ourLog.trace("Adding composite unique SP: {}", nextQueryString);
|
||||
theParams.myComboStringUniques.add(new ResourceIndexedComboStringUnique(theEntity, nextQueryString, theParam.getId()));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -299,13 +293,12 @@ public class SearchParamWithInlineReferencesExtractor {
|
|||
if (paramsListForCompositePart != null) {
|
||||
paramsListForCompositePart = paramsListForCompositePart
|
||||
.stream()
|
||||
.filter(t->t.getParamName().equals(nextCompositeOf.getName()))
|
||||
.filter(t -> t.getParamName().equals(nextCompositeOf.getName()))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
return paramsListForCompositePart;
|
||||
}
|
||||
|
||||
|
||||
@VisibleForTesting
|
||||
public void setDaoConfig(DaoConfig theDaoConfig) {
|
||||
myDaoConfig = theDaoConfig;
|
||||
|
@ -316,12 +309,6 @@ public class SearchParamWithInlineReferencesExtractor {
|
|||
myContext = theContext;
|
||||
}
|
||||
|
||||
|
||||
@Autowired
|
||||
private MemoryCacheService myMemoryCacheService;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Handle references within the resource that are match URLs, for example references like "Patient?identifier=foo". These match URLs are resolved and replaced with the ID of the
|
||||
* matching resource.
|
||||
|
|
|
@ -61,15 +61,14 @@ import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LO
|
|||
public class FhirResourceDaoCodeSystemR4 extends BaseHapiFhirResourceDao<CodeSystem> implements IFhirResourceDaoCodeSystem<CodeSystem, Coding, CodeableConcept> {
|
||||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoCodeSystemR4.class);
|
||||
|
||||
@Autowired
|
||||
private IValidationSupport myValidationSupport;
|
||||
@Autowired
|
||||
protected ITermCodeSystemStorageSvc myTerminologyCodeSystemStorageSvc;
|
||||
@Autowired
|
||||
private FhirContext myFhirContext;
|
||||
@Autowired
|
||||
protected ITermDeferredStorageSvc myTermDeferredStorageSvc;
|
||||
@Autowired
|
||||
private IValidationSupport myValidationSupport;
|
||||
@Autowired
|
||||
private FhirContext myFhirContext;
|
||||
|
||||
@Override
|
||||
public List<IIdType> findCodeSystemIdsContainingSystemAndCode(String theCode, String theSystem, RequestDetails theRequest) {
|
||||
|
@ -170,8 +169,8 @@ public class FhirResourceDaoCodeSystemR4 extends BaseHapiFhirResourceDao<CodeSys
|
|||
|
||||
@Override
|
||||
public CodeValidationResult validateCode(IIdType theCodeSystemId, IPrimitiveType<String> theCodeSystemUrl,
|
||||
IPrimitiveType<String> theVersion, IPrimitiveType<String> theCode, IPrimitiveType<String> theDisplay,
|
||||
Coding theCoding, CodeableConcept theCodeableConcept, RequestDetails theRequestDetails) {
|
||||
IPrimitiveType<String> theVersion, IPrimitiveType<String> theCode, IPrimitiveType<String> theDisplay,
|
||||
Coding theCoding, CodeableConcept theCodeableConcept, RequestDetails theRequestDetails) {
|
||||
|
||||
return myTerminologySvc.codeSystemValidateCode(theCodeSystemId, toStringOrNull(theCodeSystemUrl), toStringOrNull(theVersion), toStringOrNull(theCode), toStringOrNull(theDisplay), theCoding, theCodeableConcept);
|
||||
|
||||
|
|
|
@ -20,9 +20,9 @@ package ca.uhn.fhir.jpa.dao.r4;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.support.TranslateConceptResults;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoConceptMap;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationRequest;
|
||||
import ca.uhn.fhir.context.support.TranslateConceptResults;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
|
|
|
@ -22,20 +22,20 @@ package ca.uhn.fhir.jpa.dao.r5;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.context.support.ValidationSupportContext;
|
||||
import ca.uhn.fhir.context.support.IValidationSupport.CodeValidationResult;
|
||||
import ca.uhn.fhir.context.support.ValidationSupportContext;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
|
||||
import ca.uhn.fhir.jpa.util.LogicUtil;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import org.hl7.fhir.convertors.advisors.impl.BaseAdvisor_40_50;
|
||||
|
@ -65,11 +65,11 @@ public class FhirResourceDaoCodeSystemR5 extends BaseHapiFhirResourceDao<CodeSys
|
|||
@Autowired
|
||||
protected IdHelperService myIdHelperService;
|
||||
@Autowired
|
||||
protected ITermDeferredStorageSvc myTermDeferredStorageSvc;
|
||||
@Autowired
|
||||
private IValidationSupport myValidationSupport;
|
||||
@Autowired
|
||||
private FhirContext myFhirContext;
|
||||
@Autowired
|
||||
protected ITermDeferredStorageSvc myTermDeferredStorageSvc;
|
||||
|
||||
@Override
|
||||
public List<IIdType> findCodeSystemIdsContainingSystemAndCode(String theCode, String theSystem, RequestDetails theRequest) {
|
||||
|
@ -170,8 +170,8 @@ public class FhirResourceDaoCodeSystemR5 extends BaseHapiFhirResourceDao<CodeSys
|
|||
|
||||
@Override
|
||||
public CodeValidationResult validateCode(IIdType theCodeSystemId, IPrimitiveType<String> theCodeSystemUrl,
|
||||
IPrimitiveType<String> theVersion, IPrimitiveType<String> theCode, IPrimitiveType<String> theDisplay,
|
||||
Coding theCoding, CodeableConcept theCodeableConcept, RequestDetails theRequestDetails) {
|
||||
IPrimitiveType<String> theVersion, IPrimitiveType<String> theCode, IPrimitiveType<String> theDisplay,
|
||||
Coding theCoding, CodeableConcept theCodeableConcept, RequestDetails theRequestDetails) {
|
||||
|
||||
return myTerminologySvc.codeSystemValidateCode(theCodeSystemId, toStringOrNull(theCodeSystemUrl), toStringOrNull(theVersion), toStringOrNull(theCode), toStringOrNull(theDisplay), theCoding, theCodeableConcept);
|
||||
}
|
||||
|
|
|
@ -20,9 +20,9 @@ package ca.uhn.fhir.jpa.dao.r5;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.support.TranslateConceptResults;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoConceptMap;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationRequest;
|
||||
import ca.uhn.fhir.context.support.TranslateConceptResults;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
|
|
|
@ -28,6 +28,7 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
|||
import ca.uhn.fhir.jpa.dao.LegacySearchBuilder;
|
||||
import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser;
|
||||
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
|
||||
import ca.uhn.fhir.jpa.search.builder.QueryStack;
|
||||
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
|
||||
|
@ -35,16 +36,19 @@ import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
|||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.model.base.composite.BaseCodingDt;
|
||||
import ca.uhn.fhir.model.base.composite.BaseIdentifierDt;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.param.NumberParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParamModifier;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException;
|
||||
import ca.uhn.fhir.util.FhirVersionIndependentConcept;
|
||||
import com.google.common.collect.Sets;
|
||||
import com.healthmarketscience.sqlbuilder.BinaryCondition;
|
||||
import com.healthmarketscience.sqlbuilder.Condition;
|
||||
import com.healthmarketscience.sqlbuilder.InCondition;
|
||||
import com.healthmarketscience.sqlbuilder.dbspec.basic.DbColumn;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
|
@ -73,7 +77,8 @@ public class TokenPredicateBuilder extends BaseSearchParamPredicateBuilder {
|
|||
|
||||
@Autowired
|
||||
private ITermReadSvc myTerminologySvc;
|
||||
|
||||
@Autowired
|
||||
private ModelConfig myModelConfig;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
|
@ -129,7 +134,7 @@ public class TokenPredicateBuilder extends BaseSearchParamPredicateBuilder {
|
|||
if (nextParameter instanceof TokenParam) {
|
||||
TokenParam id = (TokenParam) nextParameter;
|
||||
system = id.getSystem();
|
||||
code = (id.getValue());
|
||||
code = id.getValue();
|
||||
modifier = id.getModifier();
|
||||
} else if (nextParameter instanceof BaseIdentifierDt) {
|
||||
BaseIdentifierDt id = (BaseIdentifierDt) nextParameter;
|
||||
|
@ -171,6 +176,20 @@ public class TokenPredicateBuilder extends BaseSearchParamPredicateBuilder {
|
|||
system = determineSystemIfMissing(theSearchParam, code, system);
|
||||
validateHaveSystemAndCodeForToken(paramName, code, system);
|
||||
codes.addAll(myTerminologySvc.findCodesBelow(system, code));
|
||||
} else if (modifier == TokenParamModifier.OF_TYPE) {
|
||||
if (!myModelConfig.isIndexIdentifierOfType()) {
|
||||
throw new MethodNotAllowedException("The :of-type modifier is not enabled on this server");
|
||||
}
|
||||
if (isBlank(system) || isBlank(code)) {
|
||||
throw new InvalidRequestException("Invalid parameter value for :of-type query");
|
||||
}
|
||||
int pipeIdx = code.indexOf('|');
|
||||
if (pipeIdx < 1 || pipeIdx == code.length() - 1) {
|
||||
throw new InvalidRequestException("Invalid parameter value for :of-type query");
|
||||
}
|
||||
|
||||
paramName = paramName + Constants.PARAMQUALIFIER_TOKEN_OF_TYPE;
|
||||
codes.add(new FhirVersionIndependentConcept(system, code));
|
||||
} else {
|
||||
if (modifier == TokenParamModifier.NOT && operation == null) {
|
||||
operation = SearchFilterParser.CompareOperation.ne;
|
||||
|
|
|
@ -15,6 +15,7 @@ import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
|
|||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedComboTokensNonUniqueDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamDateDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamStringDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamTokenDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
|
@ -94,6 +95,7 @@ import javax.persistence.EntityManager;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
@ -168,6 +170,8 @@ public abstract class BaseJpaTest extends BaseTest {
|
|||
@Autowired
|
||||
protected IResourceIndexedSearchParamTokenDao myResourceIndexedSearchParamTokenDao;
|
||||
@Autowired
|
||||
protected IResourceIndexedSearchParamStringDao myResourceIndexedSearchParamStringDao;
|
||||
@Autowired
|
||||
protected IResourceIndexedSearchParamDateDao myResourceIndexedSearchParamDateDao;
|
||||
@Autowired
|
||||
protected IResourceIndexedComboTokensNonUniqueDao myResourceIndexedComboTokensNonUniqueDao;
|
||||
|
@ -346,6 +350,19 @@ public abstract class BaseJpaTest extends BaseTest {
|
|||
});
|
||||
}
|
||||
|
||||
protected void logAllStringIndexes(String... theParamNames) {
|
||||
String messageSuffix = theParamNames.length > 0 ? " containing " + Arrays.asList(theParamNames) : "";
|
||||
runInTransaction(() -> {
|
||||
String message = myResourceIndexedSearchParamStringDao
|
||||
.findAll()
|
||||
.stream()
|
||||
.filter(t->theParamNames.length == 0 ? true : Arrays.asList(theParamNames).contains(t.getParamName()))
|
||||
.map(t -> t.toString())
|
||||
.collect(Collectors.joining("\n * "));
|
||||
ourLog.info("String indexes{}:\n * {}", messageSuffix, message);
|
||||
});
|
||||
}
|
||||
|
||||
protected void logAllResourceTags() {
|
||||
runInTransaction(() -> {
|
||||
ourLog.info("Token tags:\n * {}", myResourceTagDao.findAll().stream().map(t -> t.toString()).collect(Collectors.joining("\n * ")));
|
||||
|
|
|
@ -1193,8 +1193,12 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
|
||||
String url = "/Observation?subject.organization.partof.name=HealthCo";
|
||||
|
||||
logAllStringIndexes();
|
||||
|
||||
// execute
|
||||
myCaptureQueriesListener.clear();
|
||||
List<String> oids = searchAndReturnUnqualifiedVersionlessIdValues(url);
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
|
||||
// validate
|
||||
assertEquals(1L, oids.size());
|
||||
|
|
|
@ -303,7 +303,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
List<ResourceTable> resources = myResourceTableDao.findAll();
|
||||
String versions = "Resource Versions:\n * " + resources
|
||||
.stream()
|
||||
.map(t->"Resource " + t.getIdDt() + " has version: " + t.getVersion())
|
||||
.map(t -> "Resource " + t.getIdDt() + " has version: " + t.getVersion())
|
||||
.collect(Collectors.joining("\n * "));
|
||||
|
||||
for (ResourceTable next : resources) {
|
||||
|
@ -368,7 +368,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
List<ResourceTable> resources = myResourceTableDao.findAll();
|
||||
String versions = "Resource Versions:\n * " + resources
|
||||
.stream()
|
||||
.map(t->"Resource " + t.getIdDt() + " has version: " + t.getVersion())
|
||||
.map(t -> "Resource " + t.getIdDt() + " has version: " + t.getVersion())
|
||||
.collect(Collectors.joining("\n * "));
|
||||
|
||||
for (ResourceTable next : resources) {
|
||||
|
@ -554,9 +554,9 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
public void assertNoPartitionSelectors() {
|
||||
List<SqlQuery> selectQueries = myCaptureQueriesListener.getSelectQueriesForCurrentThread();
|
||||
for (SqlQuery next : selectQueries) {
|
||||
assertEquals(0, StringUtils.countMatches(next.getSql(true, true).toLowerCase(), "partition_id is null"));
|
||||
assertEquals(0, StringUtils.countMatches(next.getSql(true, true).toLowerCase(), "partition_id="));
|
||||
assertEquals(0, StringUtils.countMatches(next.getSql(true, true).toLowerCase(), "partition_id ="));
|
||||
assertEquals(0, StringUtils.countMatches(next.getSql(true, true).toLowerCase(), "partition_id is null"), () -> next.getSql(true, true));
|
||||
assertEquals(0, StringUtils.countMatches(next.getSql(true, true).toLowerCase(), "partition_id="), () -> next.getSql(true, true));
|
||||
assertEquals(0, StringUtils.countMatches(next.getSql(true, true).toLowerCase(), "partition_id ="), () -> next.getSql(true, true));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -125,7 +125,6 @@ import org.hl7.fhir.r4.model.Substance;
|
|||
import org.hl7.fhir.r4.model.Task;
|
||||
import org.hl7.fhir.r4.model.Timing;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
|
@ -170,6 +169,7 @@ import static org.hamcrest.Matchers.hasSize;
|
|||
import static org.hamcrest.Matchers.not;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
@ -193,9 +193,11 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
myDaoConfig.setAllowContainsSearches(new DaoConfig().isAllowContainsSearches());
|
||||
myDaoConfig.setSearchPreFetchThresholds(new DaoConfig().getSearchPreFetchThresholds());
|
||||
myDaoConfig.setIndexMissingFields(new DaoConfig().getIndexMissingFields());
|
||||
myModelConfig.setNormalizedQuantitySearchLevel(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED);
|
||||
|
||||
myModelConfig.setNormalizedQuantitySearchLevel(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED);
|
||||
myModelConfig.setAutoSupportDefaultSearchParams(true);
|
||||
myModelConfig.setIndexIdentifierOfType(new ModelConfig().isIndexIdentifierOfType());
|
||||
|
||||
mySearchParamRegistry.resetForUnitTest();
|
||||
}
|
||||
|
||||
|
@ -3793,6 +3795,8 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
patient.addIdentifier().setSystem("urn:system").setValue("TOKENB");
|
||||
String idBoth = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless().getValue();
|
||||
|
||||
logAllTokenIndexes();
|
||||
|
||||
patient = new Patient();
|
||||
patient.addIdentifier().setSystem("urn:system").setValue("TOKENA");
|
||||
String idA = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless().getValue();
|
||||
|
@ -5135,6 +5139,74 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
runInTransaction(() -> assertEquals(2, mySearchEntityDao.count()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTokenOfType() {
|
||||
myModelConfig.setIndexIdentifierOfType(true);
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient
|
||||
.addIdentifier()
|
||||
.setSystem("http://foo1")
|
||||
.setValue("bar1")
|
||||
.getType()
|
||||
.addCoding()
|
||||
.setSystem("http://terminology.hl7.org/CodeSystem/v2-0203")
|
||||
.setCode("MR");
|
||||
IIdType id1 = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
|
||||
|
||||
runInTransaction(()->{
|
||||
List<ResourceIndexedSearchParamToken> params = myResourceIndexedSearchParamTokenDao
|
||||
.findAll()
|
||||
.stream()
|
||||
.filter(t -> t.getParamName().equals("identifier:of-type"))
|
||||
.collect(Collectors.toList());
|
||||
assertEquals(1, params.size());
|
||||
assertNotNull(params.get(0).getHashSystemAndValue());
|
||||
assertNull(params.get(0).getHashSystem());
|
||||
assertNull(params.get(0).getHashValue());
|
||||
|
||||
});
|
||||
|
||||
// Shouldn't match
|
||||
patient = new Patient();
|
||||
patient
|
||||
.addIdentifier()
|
||||
.setSystem("http://terminology.hl7.org/CodeSystem/v2-0203")
|
||||
.setValue("MR|bar1");
|
||||
myPatientDao.create(patient);
|
||||
|
||||
TokenParam param = new TokenParam()
|
||||
.setSystem("http://terminology.hl7.org/CodeSystem/v2-0203")
|
||||
.setValue("MR|bar1")
|
||||
.setModifier(TokenParamModifier.OF_TYPE);
|
||||
SearchParameterMap map = SearchParameterMap.newSynchronous("identifier", param);
|
||||
|
||||
logAllTokenIndexes();
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
IBundleProvider outcome = myPatientDao.search(map, mySrd);
|
||||
List<IIdType> ids = toUnqualifiedVersionlessIds(outcome);
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
|
||||
assertThat(ids, contains(id1));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTokenOfType_Disabled() {
|
||||
try {
|
||||
TokenParam param = new TokenParam()
|
||||
.setSystem("http://terminology.hl7.org/CodeSystem/v2-0203")
|
||||
.setValue("MR|bar1")
|
||||
.setModifier(TokenParamModifier.OF_TYPE);
|
||||
SearchParameterMap map = SearchParameterMap.newSynchronous("identifier", param);
|
||||
myPatientDao.search(map, mySrd);
|
||||
fail();
|
||||
} catch (MethodNotAllowedException e) {
|
||||
assertEquals("The :of-type modifier is not enabled on this server", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTokenTextDisabled_Global() {
|
||||
myModelConfig.setSuppressStringIndexingInTokens(true);
|
||||
|
|
|
@ -1,9 +1,7 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.dao.BaseJpaTest;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
||||
import ca.uhn.fhir.jpa.entity.TermValueSet;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate;
|
||||
|
@ -102,7 +100,6 @@ import org.junit.jupiter.api.BeforeEach;
|
|||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.test.context.TestPropertySource;
|
||||
import org.springframework.transaction.TransactionDefinition;
|
||||
import org.springframework.transaction.TransactionStatus;
|
||||
import org.springframework.transaction.support.TransactionCallback;
|
||||
|
|
|
@ -387,13 +387,6 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest {
|
|||
assertObservationSearchMatches(":text matches code.display", map, id2);
|
||||
}
|
||||
|
||||
{
|
||||
// Identifier Type
|
||||
SearchParameterMap map = new SearchParameterMap();
|
||||
map.add("identifier", new TokenParam("Random").setModifier(TokenParamModifier.TEXT));
|
||||
assertObservationSearchMatches(":text matches identifier text", map, id4);
|
||||
}
|
||||
|
||||
{
|
||||
// multiple values means or
|
||||
SearchParameterMap map = new SearchParameterMap();
|
||||
|
|
|
@ -1318,8 +1318,11 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
|
||||
@Test
|
||||
public void testSearch_IdParamSecond_ForcedId_SpecificPartition() {
|
||||
IIdType patientIdNull = createPatient(withPartition(null), withId("PT-NULL"), withActiveTrue());
|
||||
// FIXME: move down
|
||||
IIdType patientId1 = createPatient(withPartition(1), withId("PT-1"), withActiveTrue());
|
||||
logAllTokenIndexes();
|
||||
|
||||
IIdType patientIdNull = createPatient(withPartition(null), withId("PT-NULL"), withActiveTrue());
|
||||
IIdType patientId2 = createPatient(withPartition(2), withId("PT-2"), withActiveTrue());
|
||||
|
||||
/* *******************************
|
||||
|
@ -2198,6 +2201,8 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
IIdType patientId1 = createPatient(withPartition(1), withFamily("FAMILY"));
|
||||
createPatient(withPartition(2), withFamily("FAMILY"));
|
||||
|
||||
logAllStringIndexes();
|
||||
|
||||
addReadPartition(1);
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
|
@ -2207,7 +2212,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
IBundleProvider results = myPatientDao.search(map, mySrd);
|
||||
List<IIdType> ids = toUnqualifiedVersionlessIds(results);
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertThat(ids, contains(patientId1));
|
||||
assertThat(ids.toString(), ids, contains(patientId1));
|
||||
|
||||
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
|
||||
ourLog.info("Search SQL:\n{}", searchSql);
|
||||
|
|
|
@ -224,57 +224,6 @@ public class SearchParamExtractorR4Test {
|
|||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testTokenText_Enabled_Identifier() {
|
||||
Observation obs = new Observation();
|
||||
obs.addIdentifier().setSystem("sys").setValue("val").getType().setText("Help Im a Bug");
|
||||
|
||||
SearchParamExtractorR4 extractor = new SearchParamExtractorR4(new ModelConfig(), myPartitionSettings, ourCtx, mySearchParamRegistry);
|
||||
|
||||
List<BaseResourceIndexedSearchParam> tokens = extractor.extractSearchParamTokens(obs)
|
||||
.stream()
|
||||
.filter(t -> t.getParamName().equals("identifier"))
|
||||
.sorted(comparing(o -> o.getClass().getName()).reversed())
|
||||
.collect(Collectors.toList());
|
||||
assertEquals(2, tokens.size());
|
||||
|
||||
ResourceIndexedSearchParamToken token = (ResourceIndexedSearchParamToken) tokens.get(0);
|
||||
assertEquals("identifier", token.getParamName());
|
||||
assertEquals("sys", token.getSystem());
|
||||
assertEquals("val", token.getValue());
|
||||
|
||||
ResourceIndexedSearchParamString string = (ResourceIndexedSearchParamString) tokens.get(1);
|
||||
assertEquals("identifier", string.getParamName());
|
||||
assertEquals("Help Im a Bug", string.getValueExact());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTokenText_DisabledInSearchParam_Identifier() {
|
||||
RuntimeSearchParam existingCodeSp = mySearchParamRegistry.getActiveSearchParams("Observation").get("identifier");
|
||||
RuntimeSearchParam codeSearchParam = new RuntimeSearchParam(existingCodeSp);
|
||||
codeSearchParam.addExtension(HapiExtensions.EXT_SEARCHPARAM_TOKEN_SUPPRESS_TEXT_INDEXING, new Extension(HapiExtensions.EXT_SEARCHPARAM_TOKEN_SUPPRESS_TEXT_INDEXING, new BooleanType(true)));
|
||||
|
||||
mySearchParamRegistry.addSearchParam(codeSearchParam);
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.addIdentifier().setSystem("sys").setValue("val").getType().setText("Help Im a Bug");
|
||||
|
||||
SearchParamExtractorR4 extractor = new SearchParamExtractorR4(new ModelConfig(), myPartitionSettings, ourCtx, mySearchParamRegistry);
|
||||
|
||||
List<BaseResourceIndexedSearchParam> tokens = extractor.extractSearchParamTokens(obs)
|
||||
.stream()
|
||||
.filter(t -> t.getParamName().equals("identifier"))
|
||||
.sorted(comparing(o -> o.getClass().getName()).reversed())
|
||||
.collect(Collectors.toList());
|
||||
assertEquals(1, tokens.size());
|
||||
|
||||
ResourceIndexedSearchParamToken token = (ResourceIndexedSearchParamToken) tokens.get(0);
|
||||
assertEquals("identifier", token.getParamName());
|
||||
assertEquals("sys", token.getSystem());
|
||||
assertEquals("val", token.getValue());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReferenceWithResolve() {
|
||||
Encounter enc = new Encounter();
|
||||
|
@ -394,6 +343,49 @@ public class SearchParamExtractorR4Test {
|
|||
assertEquals(2, list.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExtractIdentifierOfType() {
|
||||
|
||||
ModelConfig modelConfig = new ModelConfig();
|
||||
modelConfig.setIndexIdentifierOfType(true);
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient
|
||||
.addIdentifier()
|
||||
.setSystem("http://foo1")
|
||||
.setValue("bar1")
|
||||
.getType()
|
||||
.addCoding()
|
||||
.setSystem("http://terminology.hl7.org/CodeSystem/v2-0203")
|
||||
.setCode("MR");
|
||||
patient
|
||||
.addIdentifier()
|
||||
.setSystem("http://foo2")
|
||||
.setValue("bar2")
|
||||
.getType()
|
||||
.addCoding()
|
||||
.setSystem("http://terminology.hl7.org/CodeSystem/v2-0203")
|
||||
.setCode("MR");
|
||||
|
||||
SearchParamExtractorR4 extractor = new SearchParamExtractorR4(modelConfig, new PartitionSettings(), ourCtx, mySearchParamRegistry);
|
||||
List<ResourceIndexedSearchParamToken> list = extractor
|
||||
.extractSearchParamTokens(patient)
|
||||
.stream()
|
||||
.map(t->(ResourceIndexedSearchParamToken)t)
|
||||
.collect(Collectors.toList());
|
||||
list.forEach(t->t.calculateHashes());
|
||||
ourLog.info("Found tokens:\n * {}", list.stream().map(t->t.toString()).collect(Collectors.joining("\n * ")));
|
||||
|
||||
assertThat(list, containsInAnyOrder(
|
||||
new ResourceIndexedSearchParamToken(new PartitionSettings(), "Patient", "deceased", null, "false"),
|
||||
new ResourceIndexedSearchParamToken(new PartitionSettings(), "Patient", "identifier", "http://foo1", "bar1"),
|
||||
new ResourceIndexedSearchParamToken(new PartitionSettings(), "Patient", "identifier", "http://foo2", "bar2"),
|
||||
new ResourceIndexedSearchParamToken(new PartitionSettings(), "Patient", "identifier:of-type", "http://terminology.hl7.org/CodeSystem/v2-0203", "MR|bar1"),
|
||||
new ResourceIndexedSearchParamToken(new PartitionSettings(), "Patient", "identifier:of-type", "http://terminology.hl7.org/CodeSystem/v2-0203", "MR|bar2")
|
||||
));
|
||||
|
||||
}
|
||||
|
||||
private static class MySearchParamRegistry implements ISearchParamRegistry, ISearchParamRegistryController {
|
||||
|
||||
|
||||
|
|
|
@ -859,6 +859,7 @@ public class ResourceProviderR4SearchContainedTest extends BaseResourceProviderR
|
|||
obs.getSubject().setReference("#patient1");
|
||||
|
||||
oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
|
||||
logAllStringIndexes("subject.family");
|
||||
|
||||
ourLog.info("Input: {}", myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(obs));
|
||||
|
||||
|
@ -877,7 +878,9 @@ public class ResourceProviderR4SearchContainedTest extends BaseResourceProviderR
|
|||
}
|
||||
|
||||
// -- update
|
||||
oid1 = myObservationDao.update(createdObs, mySrd).getId().toUnqualifiedVersionless();
|
||||
myObservationDao.update(createdObs, mySrd).getId().toUnqualifiedVersionless();
|
||||
logAllStringIndexes("subject.family");
|
||||
|
||||
}
|
||||
|
||||
{
|
||||
|
@ -891,10 +894,12 @@ public class ResourceProviderR4SearchContainedTest extends BaseResourceProviderR
|
|||
obs.getSubject().setReference("#patient1");
|
||||
|
||||
myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
|
||||
logAllStringIndexes("subject.family");
|
||||
|
||||
ourLog.info("Input: {}", myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(obs));
|
||||
}
|
||||
|
||||
|
||||
{
|
||||
Patient p = new Patient();
|
||||
p.setId("patient1");
|
||||
|
|
|
@ -1,12 +1,15 @@
|
|||
package ca.uhn.fhir.jpa.provider.r4;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
|
@ -45,6 +48,8 @@ public class ResourceProviderSearchModifierR4Test extends BaseResourceProviderR4
|
|||
myDaoConfig.setAllowContainsSearches(new DaoConfig().isAllowContainsSearches());
|
||||
myDaoConfig.setIndexMissingFields(new DaoConfig().getIndexMissingFields());
|
||||
|
||||
myModelConfig.setIndexIdentifierOfType(new ModelConfig().isIndexIdentifierOfType());
|
||||
|
||||
myClient.unregisterInterceptor(myCapturingInterceptor);
|
||||
}
|
||||
|
||||
|
@ -199,7 +204,75 @@ public class ResourceProviderSearchModifierR4Test extends BaseResourceProviderR4
|
|||
assertEquals(obsList.get(5).toString(), ids.get(3));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearch_OfType_PartialBlocked() {
|
||||
myModelConfig.setIndexIdentifierOfType(true);
|
||||
|
||||
try {
|
||||
String uri = ourServerBase + "/Patient?identifier:of-type=A";
|
||||
myClient.search().byUrl(uri).execute();
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("HTTP 400 Bad Request: Invalid parameter value for :of-type query", e.getMessage());
|
||||
}
|
||||
|
||||
try {
|
||||
String uri = ourServerBase + "/Patient?identifier:of-type=A|B";
|
||||
myClient.search().byUrl(uri).execute();
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("HTTP 400 Bad Request: Invalid parameter value for :of-type query", e.getMessage());
|
||||
}
|
||||
|
||||
try {
|
||||
String uri = ourServerBase + "/Patient?identifier:of-type=A|B|";
|
||||
myClient.search().byUrl(uri).execute();
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("HTTP 400 Bad Request: Invalid parameter value for :of-type query", e.getMessage());
|
||||
}
|
||||
|
||||
try {
|
||||
String uri = ourServerBase + "/Patient?identifier:of-type=|B|C";
|
||||
myClient.search().byUrl(uri).execute();
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("HTTP 400 Bad Request: Invalid parameter value for :of-type query", e.getMessage());
|
||||
}
|
||||
|
||||
try {
|
||||
String uri = ourServerBase + "/Patient?identifier:of-type=||C";
|
||||
myClient.search().byUrl(uri).execute();
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("HTTP 400 Bad Request: Invalid parameter value for :of-type query", e.getMessage());
|
||||
}
|
||||
|
||||
try {
|
||||
String uri = ourServerBase + "/Patient?identifier:of-type=|B|";
|
||||
myClient.search().byUrl(uri).execute();
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("HTTP 400 Bad Request: Invalid parameter value for :of-type query", e.getMessage());
|
||||
}
|
||||
|
||||
try {
|
||||
String uri = ourServerBase + "/Patient?identifier:of-type=A||";
|
||||
myClient.search().byUrl(uri).execute();
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("HTTP 400 Bad Request: Invalid parameter value for :of-type query", e.getMessage());
|
||||
}
|
||||
|
||||
try {
|
||||
String uri = ourServerBase + "/Patient?identifier:of-type=||";
|
||||
myClient.search().byUrl(uri).execute();
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("HTTP 400 Bad Request: Invalid parameter value for :of-type query", e.getMessage());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private List<String> searchAndReturnUnqualifiedVersionlessIdValues(String uri) throws IOException {
|
||||
List<String> ids;
|
||||
|
|
|
@ -182,7 +182,7 @@ public class GiantTransactionPerfTest {
|
|||
myTransactionProcessor.setModelConfig(myDaoConfig.getModelConfig());
|
||||
myTransactionProcessor.setHapiTransactionService(myHapiTransactionService);
|
||||
myTransactionProcessor.setDaoRegistry(myDaoRegistry);
|
||||
myTransactionProcessor.setPartitionSettingsForUnitTest(myPartitionSettings);
|
||||
myTransactionProcessor.setPartitionSettingsForUnitTest(this.myPartitionSettings);
|
||||
myTransactionProcessor.setIdHelperServiceForUnitTest(myIdHelperService);
|
||||
myTransactionProcessor.setFhirContextForUnitTest(myCtx);
|
||||
myTransactionProcessor.start();
|
||||
|
@ -190,6 +190,7 @@ public class GiantTransactionPerfTest {
|
|||
mySystemDao = new FhirSystemDaoR4();
|
||||
mySystemDao.setTransactionProcessorForUnitTest(myTransactionProcessor);
|
||||
mySystemDao.setDaoConfigForUnitTest(myDaoConfig);
|
||||
mySystemDao.setPartitionSettingsForUnitTest(myPartitionSettings);
|
||||
mySystemDao.start();
|
||||
|
||||
when(myAppCtx.getBean(eq(IInstanceValidatorModule.class))).thenReturn(myInstanceValidatorSvc);
|
||||
|
@ -228,7 +229,7 @@ public class GiantTransactionPerfTest {
|
|||
mySearchParamExtractor = new SearchParamExtractorR4();
|
||||
mySearchParamExtractor.setContext(myCtx);
|
||||
mySearchParamExtractor.setSearchParamRegistry(mySearchParamRegistry);
|
||||
mySearchParamExtractor.setPartitionSettings(myPartitionSettings);
|
||||
mySearchParamExtractor.setPartitionSettings(this.myPartitionSettings);
|
||||
mySearchParamExtractor.setModelConfig(myDaoConfig.getModelConfig());
|
||||
mySearchParamExtractor.start();
|
||||
|
||||
|
@ -243,7 +244,7 @@ public class GiantTransactionPerfTest {
|
|||
mySearchParamWithInlineReferencesExtractor = new SearchParamWithInlineReferencesExtractor();
|
||||
mySearchParamWithInlineReferencesExtractor.setDaoConfig(myDaoConfig);
|
||||
mySearchParamWithInlineReferencesExtractor.setContext(myCtx);
|
||||
mySearchParamWithInlineReferencesExtractor.setPartitionSettings(myPartitionSettings);
|
||||
mySearchParamWithInlineReferencesExtractor.setPartitionSettings(this.myPartitionSettings);
|
||||
mySearchParamWithInlineReferencesExtractor.setSearchParamExtractorService(mySearchParamExtractorSvc);
|
||||
mySearchParamWithInlineReferencesExtractor.setSearchParamRegistry(mySearchParamRegistry);
|
||||
mySearchParamWithInlineReferencesExtractor.setDaoSearchParamSynchronizer(myDaoSearchParamSynchronizer);
|
||||
|
@ -263,6 +264,7 @@ public class GiantTransactionPerfTest {
|
|||
myEobDao.setDaoSearchParamSynchronizer(myDaoSearchParamSynchronizer);
|
||||
myEobDao.setDaoConfigForUnitTest(myDaoConfig);
|
||||
myEobDao.setIdHelperSvcForUnitTest(myIdHelperService);
|
||||
myEobDao.setPartitionSettingsForUnitTest(myPartitionSettings);
|
||||
myEobDao.start();
|
||||
|
||||
myDaoRegistry.setResourceDaos(Lists.newArrayList(myEobDao));
|
||||
|
|
|
@ -20,6 +20,8 @@ package ca.uhn.fhir.jpa.model.entity;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import org.apache.commons.lang3.ObjectUtils;
|
||||
|
||||
import javax.persistence.MappedSuperclass;
|
||||
import java.io.Serializable;
|
||||
|
||||
|
@ -32,6 +34,16 @@ public abstract class BaseResourceIndex extends BasePartitionable implements Ser
|
|||
|
||||
public abstract void calculateHashes();
|
||||
|
||||
public abstract void clearHashes();
|
||||
|
||||
@Override
|
||||
public void setPartitionId(PartitionablePartitionId thePartitionId) {
|
||||
if (ObjectUtils.notEqual(getPartitionId(), thePartitionId)) {
|
||||
super.setPartitionId(thePartitionId);
|
||||
clearHashes();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Subclasses must implement
|
||||
*/
|
||||
|
|
|
@ -31,6 +31,7 @@ import com.google.common.hash.HashCode;
|
|||
import com.google.common.hash.HashFunction;
|
||||
import com.google.common.hash.Hasher;
|
||||
import com.google.common.hash.Hashing;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField;
|
||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.GenericField;
|
||||
|
||||
|
@ -97,7 +98,10 @@ public abstract class BaseResourceIndexedSearchParam extends BaseResourceIndex {
|
|||
}
|
||||
|
||||
public void setParamName(String theName) {
|
||||
myParamName = theName;
|
||||
if (!StringUtils.equals(myParamName, theName)) {
|
||||
myParamName = theName;
|
||||
clearHashes();
|
||||
}
|
||||
}
|
||||
|
||||
public ResourceTable getResource() {
|
||||
|
@ -222,6 +226,7 @@ public abstract class BaseResourceIndexedSearchParam extends BaseResourceIndex {
|
|||
}
|
||||
|
||||
HashCode hashCode = hasher.hash();
|
||||
return hashCode.asLong();
|
||||
long retVal = hashCode.asLong();
|
||||
return retVal;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -50,12 +50,7 @@ public class ModelConfig {
|
|||
* <li><code>"http://hl7.org/fhir/StructureDefinition/*"</code></li>
|
||||
* </ul>
|
||||
*/
|
||||
public static final Set<String> DEFAULT_LOGICAL_BASE_URLS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(
|
||||
"http://hl7.org/fhir/ValueSet/*",
|
||||
"http://hl7.org/fhir/CodeSystem/*",
|
||||
"http://hl7.org/fhir/valueset-*",
|
||||
"http://hl7.org/fhir/codesystem-*",
|
||||
"http://hl7.org/fhir/StructureDefinition/*")));
|
||||
public static final Set<String> DEFAULT_LOGICAL_BASE_URLS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList("http://hl7.org/fhir/ValueSet/*", "http://hl7.org/fhir/CodeSystem/*", "http://hl7.org/fhir/valueset-*", "http://hl7.org/fhir/codesystem-*", "http://hl7.org/fhir/StructureDefinition/*")));
|
||||
|
||||
public static final String DEFAULT_WEBSOCKET_CONTEXT_PATH = "/websocket";
|
||||
|
||||
|
@ -101,6 +96,7 @@ public class ModelConfig {
|
|||
private boolean myIndexOnContainedResourcesRecursively = false;
|
||||
private boolean myAllowMdmExpansion = false;
|
||||
private boolean myAutoSupportDefaultSearchParams = true;
|
||||
private boolean myIndexIdentifierOfType = false;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
|
@ -111,6 +107,32 @@ public class ModelConfig {
|
|||
setNormalizedQuantitySearchLevel(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED);
|
||||
}
|
||||
|
||||
/**
|
||||
* If set to <code>true</code> (default is <code>false</code>) the
|
||||
* <code>:of-type</code> modifier on token search parameters for
|
||||
* identifiers will be supported. Enabling this causes additional
|
||||
* indexing overhead (although very minor) so it is disabled unless it is
|
||||
* actually needed.
|
||||
*
|
||||
* @since 5.7.0
|
||||
*/
|
||||
public boolean isIndexIdentifierOfType() {
|
||||
return myIndexIdentifierOfType;
|
||||
}
|
||||
|
||||
/**
|
||||
* If set to <code>true</code> (default is <code>false</code>) the
|
||||
* <code>:of-type</code> modifier on token search parameters for
|
||||
* identifiers will be supported. Enabling this causes additional
|
||||
* indexing overhead (although very minor) so it is disabled unless it is
|
||||
* actually needed.
|
||||
*
|
||||
* @since 5.7.0
|
||||
*/
|
||||
public void setIndexIdentifierOfType(boolean theIndexIdentifierOfType) {
|
||||
myIndexIdentifierOfType = theIndexIdentifierOfType;
|
||||
}
|
||||
|
||||
/**
|
||||
* If set to {@code true} the default search params (i.e. the search parameters that are
|
||||
* defined by the FHIR specification itself) may be overridden by uploading search
|
||||
|
@ -816,7 +838,7 @@ public class ModelConfig {
|
|||
* Disabling this obviously reduces the options for searching however.
|
||||
* </p>
|
||||
*
|
||||
* @since 6.0.0
|
||||
* @since 5.7.0
|
||||
*/
|
||||
public boolean isAutoSupportDefaultSearchParams() {
|
||||
return myAutoSupportDefaultSearchParams;
|
||||
|
@ -832,7 +854,7 @@ public class ModelConfig {
|
|||
* Disabling this obviously reduces the options for searching however.
|
||||
* </p>
|
||||
*
|
||||
* @since 6.0.0
|
||||
* @since 5.7.0
|
||||
*/
|
||||
public void setAutoSupportDefaultSearchParams(boolean theAutoSupportDefaultSearchParams) {
|
||||
myAutoSupportDefaultSearchParams = theAutoSupportDefaultSearchParams;
|
||||
|
|
|
@ -21,6 +21,8 @@ package ca.uhn.fhir.jpa.model.entity;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
|
@ -63,6 +65,21 @@ public class PartitionablePartitionId implements Cloneable {
|
|||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object theO) {
|
||||
if (!(theO instanceof PartitionablePartitionId)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
PartitionablePartitionId that = (PartitionablePartitionId) theO;
|
||||
return new EqualsBuilder().append(myPartitionId, that.myPartitionId).append(myPartitionDate, that.myPartitionDate).isEquals();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return new HashCodeBuilder(17, 37).append(myPartitionId).append(myPartitionDate).toHashCode();
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public LocalDate getPartitionDate() {
|
||||
return myPartitionDate;
|
||||
|
@ -85,6 +102,13 @@ public class PartitionablePartitionId implements Cloneable {
|
|||
return RequestPartitionId.fromPartitionId(getPartitionId(), getPartitionDate());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "[" +
|
||||
getPartitionId() +
|
||||
"]";
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static RequestPartitionId toRequestPartitionId(@Nullable PartitionablePartitionId theRequestPartitionId) {
|
||||
if (theRequestPartitionId != null) {
|
||||
|
|
|
@ -82,6 +82,7 @@ public class ResourceIndexedComboTokenNonUnique extends BaseResourceIndex implem
|
|||
myPartitionSettings = thePartitionSettings;
|
||||
myResource = theEntity;
|
||||
myIndexString = theQueryString;
|
||||
calculateHashes();
|
||||
}
|
||||
|
||||
public String getIndexString() {
|
||||
|
@ -125,8 +126,17 @@ public class ResourceIndexedComboTokenNonUnique extends BaseResourceIndex implem
|
|||
myId = theId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clearHashes() {
|
||||
myHashComplete = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void calculateHashes() {
|
||||
if (myHashComplete != null) {
|
||||
return;
|
||||
}
|
||||
|
||||
PartitionSettings partitionSettings = getPartitionSettings();
|
||||
PartitionablePartitionId partitionId = getPartitionId();
|
||||
String queryString = myIndexString;
|
||||
|
|
|
@ -60,12 +60,26 @@ public abstract class ResourceIndexedSearchParamBaseQuantity extends BaseResourc
|
|||
@Column(name = "HASH_IDENTITY", nullable = true)
|
||||
private Long myHashIdentity;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public ResourceIndexedSearchParamBaseQuantity() {
|
||||
super();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clearHashes() {
|
||||
myHashIdentity = null;
|
||||
myHashIdentityAndUnits = null;
|
||||
myHashIdentitySystemAndUnits = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void calculateHashes() {
|
||||
if (myHashIdentity != null || myHashIdentityAndUnits != null || myHashIdentitySystemAndUnits != null) {
|
||||
return;
|
||||
}
|
||||
|
||||
String resourceType = getResourceType();
|
||||
String paramName = getParamName();
|
||||
String units = getUnits();
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.apache.commons.lang3.builder.EqualsBuilder;
|
|||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField;
|
||||
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Embeddable;
|
||||
|
@ -79,8 +78,17 @@ public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchP
|
|||
calculateHashes();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clearHashes() {
|
||||
myHashIdentity = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void calculateHashes() {
|
||||
if (myHashIdentity != null) {
|
||||
return;
|
||||
}
|
||||
|
||||
String resourceType = getResourceType();
|
||||
String paramName = getParamName();
|
||||
setHashIdentity(calculateHashIdentity(getPartitionSettings(), getPartitionId(), resourceType, paramName));
|
||||
|
|
|
@ -22,7 +22,6 @@ package ca.uhn.fhir.jpa.model.entity;
|
|||
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
|
||||
import javax.persistence.Column;
|
||||
|
@ -129,7 +128,6 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar
|
|||
reComputeValueHighDate(theHigh, theHighString);
|
||||
myOriginalValue = theOriginalValue;
|
||||
calculateHashes();
|
||||
|
||||
}
|
||||
|
||||
private void computeValueHighDateOrdinal(String theHigh) {
|
||||
|
@ -202,8 +200,17 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar
|
|||
myHashIdentity = source.myHashIdentity;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clearHashes() {
|
||||
myHashIdentity = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void calculateHashes() {
|
||||
if (myHashIdentity != null) {
|
||||
return;
|
||||
}
|
||||
|
||||
String resourceType = getResourceType();
|
||||
String paramName = getParamName();
|
||||
setHashIdentity(calculateHashIdentity(getPartitionSettings(), getPartitionId(), resourceType, paramName));
|
||||
|
|
|
@ -86,9 +86,16 @@ public class ResourceIndexedSearchParamNumber extends BaseResourceIndexedSearchP
|
|||
myHashIdentity = source.myHashIdentity;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clearHashes() {
|
||||
myHashIdentity = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void calculateHashes() {
|
||||
if (myHashIdentity != null) {
|
||||
return;
|
||||
}
|
||||
String resourceType = getResourceType();
|
||||
String paramName = getParamName();
|
||||
setHashIdentity(calculateHashIdentity(getPartitionSettings(), getPartitionId(), resourceType, paramName));
|
||||
|
|
|
@ -39,7 +39,6 @@ import javax.persistence.Table;
|
|||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField;
|
||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.ScaledNumberField;
|
||||
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
|
|
|
@ -29,10 +29,6 @@ import org.apache.commons.lang3.builder.EqualsBuilder;
|
|||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
import org.hibernate.search.engine.backend.types.Projectable;
|
||||
import org.hibernate.search.engine.backend.types.Searchable;
|
||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField;
|
||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed;
|
||||
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Embeddable;
|
||||
|
@ -132,8 +128,20 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
|
|||
myHashNormalizedPrefix = source.myHashNormalizedPrefix;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clearHashes() {
|
||||
myHashIdentity = null;
|
||||
myHashNormalizedPrefix = null;
|
||||
myHashExact = null;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void calculateHashes() {
|
||||
if (myHashIdentity != null || myHashExact != null || myHashNormalizedPrefix != null) {
|
||||
return;
|
||||
}
|
||||
|
||||
String resourceType = getResourceType();
|
||||
String paramName = getParamName();
|
||||
String valueNormalized = getValueNormalized();
|
||||
|
@ -245,6 +253,7 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
|
|||
b.append("resourceId", getResourcePid());
|
||||
b.append("hashNormalizedPrefix", getHashNormalizedPrefix());
|
||||
b.append("valueNormalized", getValueNormalized());
|
||||
b.append("partitionId", getPartitionId());
|
||||
return b.build();
|
||||
}
|
||||
|
||||
|
@ -284,7 +293,8 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
|
|||
hashPrefixLength = 0;
|
||||
}
|
||||
|
||||
return hash(thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, StringUtil.left(theValueNormalized, hashPrefixLength));
|
||||
String value = StringUtil.left(theValueNormalized, hashPrefixLength);
|
||||
return hash(thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, value);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.model.entity;
|
|||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
|
@ -54,7 +55,7 @@ import static org.apache.commons.lang3.StringUtils.trim;
|
|||
* IDX_SP_TOKEN_UNQUAL
|
||||
*/
|
||||
|
||||
// TODO PERF Recommend to drop this index:
|
||||
// TODO PERF Recommend to drop this index (added by JA - I don't actually think we even need the identity hash for this type, we could potentially drop the column too):
|
||||
@Index(name = "IDX_SP_TOKEN_HASH", columnList = "HASH_IDENTITY"),
|
||||
@Index(name = "IDX_SP_TOKEN_HASH_S", columnList = "HASH_SYS"),
|
||||
@Index(name = "IDX_SP_TOKEN_HASH_SV", columnList = "HASH_SYS_AND_VALUE"),
|
||||
|
@ -140,17 +141,35 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
|
|||
myHashIdentity = source.myHashIdentity;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clearHashes() {
|
||||
myHashIdentity = null;
|
||||
myHashSystem = null;
|
||||
myHashSystemAndValue = null;
|
||||
myHashValue = null;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void calculateHashes() {
|
||||
if (myHashIdentity != null || myHashSystem != null || myHashValue != null || myHashSystemAndValue != null) {
|
||||
return;
|
||||
}
|
||||
|
||||
String resourceType = getResourceType();
|
||||
String paramName = getParamName();
|
||||
String system = getSystem();
|
||||
String value = getValue();
|
||||
setHashIdentity(calculateHashIdentity(getPartitionSettings(), getPartitionId(), resourceType, paramName));
|
||||
setHashSystem(calculateHashSystem(getPartitionSettings(), getPartitionId(), resourceType, paramName, system));
|
||||
setHashSystemAndValue(calculateHashSystemAndValue(getPartitionSettings(), getPartitionId(), resourceType, paramName, system, value));
|
||||
setHashValue(calculateHashValue(getPartitionSettings(), getPartitionId(), resourceType, paramName, value));
|
||||
|
||||
// Searches using the :of-type modifier can never be partial (system-only or value-only) so don't
|
||||
// bother saving these
|
||||
boolean calculatePartialHashes = !StringUtils.endsWith(paramName, Constants.PARAMQUALIFIER_TOKEN_OF_TYPE);
|
||||
if (calculatePartialHashes) {
|
||||
setHashSystem(calculateHashSystem(getPartitionSettings(), getPartitionId(), resourceType, paramName, system));
|
||||
setHashValue(calculateHashValue(getPartitionSettings(), getPartitionId(), resourceType, paramName, value));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -172,7 +191,7 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
|
|||
return b.isEquals();
|
||||
}
|
||||
|
||||
Long getHashSystem() {
|
||||
public Long getHashSystem() {
|
||||
return myHashSystem;
|
||||
}
|
||||
|
||||
|
@ -216,6 +235,7 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
|
|||
|
||||
public void setSystem(String theSystem) {
|
||||
mySystem = StringUtils.defaultIfBlank(theSystem, null);
|
||||
myHashSystemAndValue = null;
|
||||
}
|
||||
|
||||
public String getValue() {
|
||||
|
@ -224,6 +244,7 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
|
|||
|
||||
public ResourceIndexedSearchParamToken setValue(String theValue) {
|
||||
myValue = StringUtils.defaultIfBlank(theValue, null);
|
||||
myHashSystemAndValue = null;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -261,6 +282,7 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
|
|||
b.append("hashSystem", myHashSystem);
|
||||
b.append("hashValue", myHashValue);
|
||||
b.append("hashSysAndValue", myHashSystemAndValue);
|
||||
b.append("partition", getPartitionId());
|
||||
return b.build();
|
||||
}
|
||||
|
||||
|
@ -292,6 +314,7 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
|
|||
return retVal;
|
||||
}
|
||||
|
||||
|
||||
public static long calculateHashSystem(PartitionSettings thePartitionSettings, PartitionablePartitionId theRequestPartitionId, String theResourceType, String theParamName, String theSystem) {
|
||||
RequestPartitionId requestPartitionId = PartitionablePartitionId.toRequestPartitionId(theRequestPartitionId);
|
||||
return calculateHashSystem(thePartitionSettings, requestPartitionId, theResourceType, theParamName, theSystem);
|
||||
|
|
|
@ -107,9 +107,19 @@ public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchPara
|
|||
myHashIdentity = source.myHashIdentity;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clearHashes() {
|
||||
myHashIdentity = null;
|
||||
myHashUri = null;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void calculateHashes() {
|
||||
if (myHashIdentity != null || myHashUri != null) {
|
||||
return;
|
||||
}
|
||||
|
||||
String resourceType = getResourceType();
|
||||
String paramName = getParamName();
|
||||
String uri = getUri();
|
||||
|
|
|
@ -235,6 +235,11 @@ public class ResourceLink extends BaseResourceIndex {
|
|||
myId = theId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clearHashes() {
|
||||
// nothing right now
|
||||
}
|
||||
|
||||
@Override
|
||||
public void calculateHashes() {
|
||||
// nothing right now
|
||||
|
|
|
@ -42,6 +42,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri;
|
|||
import ca.uhn.fhir.jpa.model.util.UcumServiceUtil;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParamConstants;
|
||||
import ca.uhn.fhir.model.primitive.BoundCodeDt;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
|
@ -88,7 +89,6 @@ import java.util.stream.Collectors;
|
|||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.strip;
|
||||
import static org.apache.commons.lang3.StringUtils.trim;
|
||||
|
||||
public abstract class BaseSearchParamExtractor implements ISearchParamExtractor {
|
||||
|
@ -642,20 +642,35 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
|
|||
high.ifPresent(theIBase -> addQuantity_QuantityNormalized(theResourceType, theParams, theSearchParam, theIBase));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private void addToken_Identifier(String theResourceType, Set<BaseResourceIndexedSearchParam> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
|
||||
String system = extractValueAsString(myIdentifierSystemValueChild, theValue);
|
||||
String value = extractValueAsString(myIdentifierValueValueChild, theValue);
|
||||
if (isNotBlank(value)) {
|
||||
createTokenIndexIfNotBlank(theResourceType, theParams, theSearchParam, system, value);
|
||||
}
|
||||
createTokenIndexIfNotBlankAndAdd(theResourceType, theParams, theSearchParam, system, value);
|
||||
|
||||
if (shouldIndexTextComponentOfToken(theSearchParam)) {
|
||||
Optional<IBase> type = myIdentifierTypeValueChild.getAccessor().getFirstValueOrNull(theValue);
|
||||
if (type.isPresent()) {
|
||||
String text = extractValueAsString(myIdentifierTypeTextValueChild, type.get());
|
||||
createStringIndexIfNotBlank(theResourceType, theParams, theSearchParam, text);
|
||||
boolean indexIdentifierType = myModelConfig.isIndexIdentifierOfType();
|
||||
if (indexIdentifierType) {
|
||||
Optional<IBase> type = myIdentifierTypeValueChild.getAccessor().getFirstValueOrNull(theValue);
|
||||
if (type.isPresent()) {
|
||||
List<IBase> codings = myCodeableConceptCodingValueChild.getAccessor().getValues(type.get());
|
||||
for (IBase nextCoding : codings) {
|
||||
|
||||
String typeSystem = myCodingSystemValueChild.getAccessor().getFirstValueOrNull(nextCoding).map(t -> ((IPrimitiveType<String>) t).getValue()).orElse(null);
|
||||
String typeValue = myCodingCodeValueChild.getAccessor().getFirstValueOrNull(nextCoding).map(t -> ((IPrimitiveType<String>) t).getValue()).orElse(null);
|
||||
if (isNotBlank(typeSystem) && isNotBlank(typeValue)) {
|
||||
String paramName = theSearchParam.getName() + Constants.PARAMQUALIFIER_TOKEN_OF_TYPE;
|
||||
ResourceIndexedSearchParamToken token = createTokenIndexIfNotBlank(theResourceType, typeSystem, typeValue + "|" + value, paramName);
|
||||
if (token != null) {
|
||||
theParams.add(token);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
protected boolean shouldIndexTextComponentOfToken(RuntimeSearchParam theSearchParam) {
|
||||
|
@ -733,7 +748,7 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
|
|||
private void addToken_ContactPoint(String theResourceType, Set<BaseResourceIndexedSearchParam> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
|
||||
String system = extractValueAsString(myContactPointSystemValueChild, theValue);
|
||||
String value = extractValueAsString(myContactPointValueValueChild, theValue);
|
||||
createTokenIndexIfNotBlank(theResourceType, theParams, theSearchParam, system, value);
|
||||
createTokenIndexIfNotBlankAndAdd(theResourceType, theParams, theSearchParam, system, value);
|
||||
}
|
||||
|
||||
private void addToken_PatientCommunication(String theResourceType, Set<BaseResourceIndexedSearchParam> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
|
||||
|
@ -1062,7 +1077,7 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
|
|||
}
|
||||
}
|
||||
|
||||
private void createTokenIndexIfNotBlank(String theResourceType, Set<BaseResourceIndexedSearchParam> theParams, RuntimeSearchParam theSearchParam, String theSystem, String theValue) {
|
||||
private void createTokenIndexIfNotBlankAndAdd(String theResourceType, Set<BaseResourceIndexedSearchParam> theParams, RuntimeSearchParam theSearchParam, String theSystem, String theValue) {
|
||||
ResourceIndexedSearchParamToken nextEntity = createTokenIndexIfNotBlank(theResourceType, theSearchParam, theSystem, theValue);
|
||||
if (nextEntity != null) {
|
||||
theParams.add(nextEntity);
|
||||
|
@ -1075,6 +1090,11 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
|
|||
}
|
||||
|
||||
private ResourceIndexedSearchParamToken createTokenIndexIfNotBlank(String theResourceType, RuntimeSearchParam theSearchParam, String theSystem, String theValue) {
|
||||
String searchParamName = theSearchParam.getName();
|
||||
return createTokenIndexIfNotBlank(theResourceType, theSystem, theValue, searchParamName);
|
||||
}
|
||||
|
||||
private ResourceIndexedSearchParamToken createTokenIndexIfNotBlank(String theResourceType, String theSystem, String theValue, String searchParamName) {
|
||||
String system = theSystem;
|
||||
String value = theValue;
|
||||
ResourceIndexedSearchParamToken nextEntity = null;
|
||||
|
@ -1086,7 +1106,7 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
|
|||
value = value.substring(0, ResourceIndexedSearchParamToken.MAX_LENGTH);
|
||||
}
|
||||
|
||||
nextEntity = new ResourceIndexedSearchParamToken(myPartitionSettings, theResourceType, theSearchParam.getName(), system, value);
|
||||
nextEntity = new ResourceIndexedSearchParamToken(myPartitionSettings, theResourceType, searchParamName, system, value);
|
||||
}
|
||||
|
||||
return nextEntity;
|
||||
|
@ -1239,6 +1259,93 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
|
|||
|
||||
}
|
||||
|
||||
/**
|
||||
* Note that this should only be called for R4+ servers. Prior to
|
||||
* R4 the paths could be separated by the word "or" or by a "|"
|
||||
* character, so we used a slower splitting mechanism.
|
||||
*/
|
||||
@Nonnull
|
||||
public static String[] splitPathsR4(@Nonnull String thePaths) {
|
||||
StringTokenizer tok = new StringTokenizer(thePaths, " |");
|
||||
tok.setTrimmerMatcher(new StringTrimmingTrimmerMatcher());
|
||||
return tok.getTokenArray();
|
||||
}
|
||||
|
||||
public static boolean tokenTextIndexingEnabledForSearchParam(ModelConfig theModelConfig, RuntimeSearchParam theSearchParam) {
|
||||
Optional<Boolean> noSuppressForSearchParam = theSearchParam.getExtensions(HapiExtensions.EXT_SEARCHPARAM_TOKEN_SUPPRESS_TEXT_INDEXING).stream()
|
||||
.map(IBaseExtension::getValue)
|
||||
.map(val -> (IPrimitiveType<?>) val)
|
||||
.map(IPrimitiveType::getValueAsString)
|
||||
.map(Boolean::parseBoolean)
|
||||
.findFirst();
|
||||
|
||||
//if the SP doesn't care, use the system default.
|
||||
if (!noSuppressForSearchParam.isPresent()) {
|
||||
return !theModelConfig.isSuppressStringIndexingInTokens();
|
||||
//If the SP does care, use its value.
|
||||
} else {
|
||||
boolean suppressForSearchParam = noSuppressForSearchParam.get();
|
||||
ourLog.trace("Text indexing for SearchParameter {}: {}", theSearchParam.getName(), suppressForSearchParam);
|
||||
return !suppressForSearchParam;
|
||||
}
|
||||
}
|
||||
|
||||
private static void addIgnoredType(FhirContext theCtx, String theType, Set<String> theIgnoredTypes) {
|
||||
BaseRuntimeElementDefinition<?> elementDefinition = theCtx.getElementDefinition(theType);
|
||||
if (elementDefinition != null) {
|
||||
theIgnoredTypes.add(elementDefinition.getName());
|
||||
}
|
||||
}
|
||||
|
||||
protected static String extractValueAsString(BaseRuntimeChildDefinition theChildDefinition, IBase theElement) {
|
||||
return theChildDefinition
|
||||
.getAccessor()
|
||||
.<IPrimitiveType<?>>getFirstValueOrNull(theElement)
|
||||
.map(IPrimitiveType::getValueAsString)
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
protected static Date extractValueAsDate(BaseRuntimeChildDefinition theChildDefinition, IBase theElement) {
|
||||
return theChildDefinition
|
||||
.getAccessor()
|
||||
.<IPrimitiveType<Date>>getFirstValueOrNull(theElement)
|
||||
.map(IPrimitiveType::getValue)
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
protected static BigDecimal extractValueAsBigDecimal(BaseRuntimeChildDefinition theChildDefinition, IBase theElement) {
|
||||
return theChildDefinition
|
||||
.getAccessor()
|
||||
.<IPrimitiveType<BigDecimal>>getFirstValueOrNull(theElement)
|
||||
.map(IPrimitiveType::getValue)
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected static List<IPrimitiveType<Date>> extractValuesAsFhirDates(BaseRuntimeChildDefinition theChildDefinition, IBase theElement) {
|
||||
return (List) theChildDefinition
|
||||
.getAccessor()
|
||||
.getValues(theElement);
|
||||
}
|
||||
|
||||
protected static List<String> extractValuesAsStrings(BaseRuntimeChildDefinition theChildDefinition, IBase theValue) {
|
||||
return theChildDefinition
|
||||
.getAccessor()
|
||||
.getValues(theValue)
|
||||
.stream()
|
||||
.map(t -> (IPrimitiveType) t)
|
||||
.map(IPrimitiveType::getValueAsString)
|
||||
.filter(StringUtils::isNotBlank)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
protected static <T extends Enum<?>> String extractSystem(IBaseEnumeration<T> theBoundCode) {
|
||||
if (theBoundCode.getValue() != null) {
|
||||
return theBoundCode.getEnumFactory().toSystem(theBoundCode.getValue());
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private class ResourceLinkExtractor implements IExtractor<PathAndRef> {
|
||||
|
||||
private PathAndRef myPathAndRef = null;
|
||||
|
@ -1313,7 +1420,7 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
|
|||
}
|
||||
|
||||
private boolean isOrCanBeTreatedAsLocal(IIdType theId) {
|
||||
boolean acceptableAsLocalReference = !theId.isAbsolute() || myModelConfig.getTreatBaseUrlsAsLocal().contains(theId.getBaseUrl());
|
||||
boolean acceptableAsLocalReference = !theId.isAbsolute() || myModelConfig.getTreatBaseUrlsAsLocal().contains(theId.getBaseUrl());
|
||||
return acceptableAsLocalReference;
|
||||
}
|
||||
|
||||
|
@ -1459,7 +1566,7 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
|
|||
IBaseEnumeration<?> obj = (IBaseEnumeration<?>) value;
|
||||
String system = extractSystem(obj);
|
||||
String code = obj.getValueAsString();
|
||||
BaseSearchParamExtractor.this.createTokenIndexIfNotBlank(myResourceTypeName, params, searchParam, system, code);
|
||||
BaseSearchParamExtractor.this.createTokenIndexIfNotBlankAndAdd(myResourceTypeName, params, searchParam, system, code);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -1473,7 +1580,7 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
|
|||
system = boundCode.getBinder().toSystemString(valueAsEnum);
|
||||
}
|
||||
String code = boundCode.getValueAsString();
|
||||
BaseSearchParamExtractor.this.createTokenIndexIfNotBlank(myResourceTypeName, params, searchParam, system, code);
|
||||
BaseSearchParamExtractor.this.createTokenIndexIfNotBlankAndAdd(myResourceTypeName, params, searchParam, system, code);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -1491,7 +1598,7 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
|
|||
valueAsString = ((IIdType) value).getIdPart();
|
||||
}
|
||||
|
||||
BaseSearchParamExtractor.this.createTokenIndexIfNotBlank(myResourceTypeName, params, searchParam, systemAsString, valueAsString);
|
||||
BaseSearchParamExtractor.this.createTokenIndexIfNotBlankAndAdd(myResourceTypeName, params, searchParam, systemAsString, valueAsString);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -1552,87 +1659,4 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
|
|||
}
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static String[] splitPathsR4(@Nonnull String thePaths) {
|
||||
StringTokenizer tok = new StringTokenizer(thePaths, " |");
|
||||
tok.setTrimmerMatcher(new StringTrimmingTrimmerMatcher());
|
||||
return tok.getTokenArray();
|
||||
}
|
||||
|
||||
|
||||
public static boolean tokenTextIndexingEnabledForSearchParam(ModelConfig theModelConfig, RuntimeSearchParam theSearchParam) {
|
||||
Optional<Boolean> noSuppressForSearchParam = theSearchParam.getExtensions(HapiExtensions.EXT_SEARCHPARAM_TOKEN_SUPPRESS_TEXT_INDEXING).stream()
|
||||
.map(IBaseExtension::getValue)
|
||||
.map(val -> (IPrimitiveType<?>) val)
|
||||
.map(IPrimitiveType::getValueAsString)
|
||||
.map(Boolean::parseBoolean)
|
||||
.findFirst();
|
||||
|
||||
//if the SP doesn't care, use the system default.
|
||||
if (!noSuppressForSearchParam.isPresent()) {
|
||||
return !theModelConfig.isSuppressStringIndexingInTokens();
|
||||
//If the SP does care, use its value.
|
||||
} else {
|
||||
boolean suppressForSearchParam = noSuppressForSearchParam.get();
|
||||
ourLog.trace("Text indexing for SearchParameter {}: {}", theSearchParam.getName(), suppressForSearchParam);
|
||||
return !suppressForSearchParam;
|
||||
}
|
||||
}
|
||||
|
||||
private static void addIgnoredType(FhirContext theCtx, String theType, Set<String> theIgnoredTypes) {
|
||||
BaseRuntimeElementDefinition<?> elementDefinition = theCtx.getElementDefinition(theType);
|
||||
if (elementDefinition != null) {
|
||||
theIgnoredTypes.add(elementDefinition.getName());
|
||||
}
|
||||
}
|
||||
|
||||
private static String extractValueAsString(BaseRuntimeChildDefinition theChildDefinition, IBase theElement) {
|
||||
return theChildDefinition
|
||||
.getAccessor()
|
||||
.<IPrimitiveType<?>>getFirstValueOrNull(theElement)
|
||||
.map(IPrimitiveType::getValueAsString)
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
private static Date extractValueAsDate(BaseRuntimeChildDefinition theChildDefinition, IBase theElement) {
|
||||
return theChildDefinition
|
||||
.getAccessor()
|
||||
.<IPrimitiveType<Date>>getFirstValueOrNull(theElement)
|
||||
.map(IPrimitiveType::getValue)
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
private static BigDecimal extractValueAsBigDecimal(BaseRuntimeChildDefinition theChildDefinition, IBase theElement) {
|
||||
return theChildDefinition
|
||||
.getAccessor()
|
||||
.<IPrimitiveType<BigDecimal>>getFirstValueOrNull(theElement)
|
||||
.map(IPrimitiveType::getValue)
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static List<IPrimitiveType<Date>> extractValuesAsFhirDates(BaseRuntimeChildDefinition theChildDefinition, IBase theElement) {
|
||||
return (List) theChildDefinition
|
||||
.getAccessor()
|
||||
.getValues(theElement);
|
||||
}
|
||||
|
||||
private static List<String> extractValuesAsStrings(BaseRuntimeChildDefinition theChildDefinition, IBase theValue) {
|
||||
return theChildDefinition
|
||||
.getAccessor()
|
||||
.getValues(theValue)
|
||||
.stream()
|
||||
.map(t -> (IPrimitiveType) t)
|
||||
.map(IPrimitiveType::getValueAsString)
|
||||
.filter(StringUtils::isNotBlank)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
private static <T extends Enum<?>> String extractSystem(IBaseEnumeration<T> theBoundCode) {
|
||||
if (theBoundCode.getValue() != null) {
|
||||
return theBoundCode.getEnumFactory().toSystem(theBoundCode.getValue());
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -22,17 +22,16 @@ package ca.uhn.fhir.jpa.searchparam.extractor;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.jpa.model.entity.*;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamNumber;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantity;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantityNormalized;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.utils.FHIRPathEngine;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
|
|
|
@ -147,15 +147,15 @@ public final class ResourceIndexedSearchParams {
|
|||
theEntity.setResourceLinks(myLinks);
|
||||
}
|
||||
|
||||
public void updateSpnamePrefixForIndexedOnContainedResource(String theSpnamePrefix) {
|
||||
updateSpnamePrefixForIndexedOnContainedResource(myNumberParams, theSpnamePrefix);
|
||||
updateSpnamePrefixForIndexedOnContainedResource(myQuantityParams, theSpnamePrefix);
|
||||
updateSpnamePrefixForIndexedOnContainedResource(myQuantityNormalizedParams, theSpnamePrefix);
|
||||
updateSpnamePrefixForIndexedOnContainedResource(myDateParams, theSpnamePrefix);
|
||||
updateSpnamePrefixForIndexedOnContainedResource(myUriParams, theSpnamePrefix);
|
||||
updateSpnamePrefixForIndexedOnContainedResource(myTokenParams, theSpnamePrefix);
|
||||
updateSpnamePrefixForIndexedOnContainedResource(myStringParams, theSpnamePrefix);
|
||||
updateSpnamePrefixForIndexedOnContainedResource(myCoordsParams, theSpnamePrefix);
|
||||
public void updateSpnamePrefixForIndexedOnContainedResource(String theContainingType, String theSpnamePrefix) {
|
||||
updateSpnamePrefixForIndexedOnContainedResource(theContainingType, myNumberParams, theSpnamePrefix);
|
||||
updateSpnamePrefixForIndexedOnContainedResource(theContainingType, myQuantityParams, theSpnamePrefix);
|
||||
updateSpnamePrefixForIndexedOnContainedResource(theContainingType, myQuantityNormalizedParams, theSpnamePrefix);
|
||||
updateSpnamePrefixForIndexedOnContainedResource(theContainingType, myDateParams, theSpnamePrefix);
|
||||
updateSpnamePrefixForIndexedOnContainedResource(theContainingType, myUriParams, theSpnamePrefix);
|
||||
updateSpnamePrefixForIndexedOnContainedResource(theContainingType, myTokenParams, theSpnamePrefix);
|
||||
updateSpnamePrefixForIndexedOnContainedResource(theContainingType, myStringParams, theSpnamePrefix);
|
||||
updateSpnamePrefixForIndexedOnContainedResource(theContainingType, myCoordsParams, theSpnamePrefix);
|
||||
}
|
||||
|
||||
public void updateSpnamePrefixForLinksOnContainedResource(String theSpNamePrefix) {
|
||||
|
@ -190,11 +190,14 @@ public final class ResourceIndexedSearchParams {
|
|||
}
|
||||
}
|
||||
|
||||
private void updateSpnamePrefixForIndexedOnContainedResource(Collection<? extends BaseResourceIndexedSearchParam> theParams, @Nonnull String theSpnamePrefix) {
|
||||
private void updateSpnamePrefixForIndexedOnContainedResource(String theContainingType, Collection<? extends BaseResourceIndexedSearchParam> theParams, @Nonnull String theSpnamePrefix) {
|
||||
|
||||
for (BaseResourceIndexedSearchParam param : theParams) {
|
||||
param.setResourceType(theContainingType);
|
||||
param.setParamName(theSpnamePrefix + "." + param.getParamName());
|
||||
param.calculateHashes(); // re-calculuteHashes
|
||||
|
||||
// re-calculate hashes
|
||||
param.calculateHashes();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -43,16 +43,15 @@ import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri;
|
|||
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.parser.DataFormatException;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.util.FhirTerser;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBaseReference;
|
||||
|
@ -176,7 +175,7 @@ public class SearchParamExtractorService {
|
|||
// 3.5 added reference name as a prefix for the contained resource if any
|
||||
// e.g. for Observation.subject contained reference
|
||||
// the SP_NAME = subject.family
|
||||
currParams.updateSpnamePrefixForIndexedOnContainedResource(spnamePrefix);
|
||||
currParams.updateSpnamePrefixForIndexedOnContainedResource(theEntity.getResourceType(), spnamePrefix);
|
||||
|
||||
// 3.6 merge to the mainParams
|
||||
// NOTE: the spname prefix is different
|
||||
|
@ -239,7 +238,8 @@ public class SearchParamExtractorService {
|
|||
|
||||
// Tokens (can result in both Token and String, as we index the display name for
|
||||
// the types: Coding, CodeableConcept)
|
||||
for (BaseResourceIndexedSearchParam next : extractSearchParamTokens(theResource)) {
|
||||
ISearchParamExtractor.SearchParamSet<BaseResourceIndexedSearchParam> tokens = extractSearchParamTokens(theResource);
|
||||
for (BaseResourceIndexedSearchParam next : tokens) {
|
||||
if (next instanceof ResourceIndexedSearchParamToken) {
|
||||
theParams.myTokenParams.add((ResourceIndexedSearchParamToken) next);
|
||||
} else if (next instanceof ResourceIndexedSearchParamCoords) {
|
||||
|
@ -250,7 +250,8 @@ public class SearchParamExtractorService {
|
|||
}
|
||||
|
||||
// Specials
|
||||
for (BaseResourceIndexedSearchParam next : extractSearchParamSpecial(theResource)) {
|
||||
ISearchParamExtractor.SearchParamSet<BaseResourceIndexedSearchParam> specials = extractSearchParamSpecial(theResource);
|
||||
for (BaseResourceIndexedSearchParam next : specials) {
|
||||
if (next instanceof ResourceIndexedSearchParamCoords) {
|
||||
theParams.myCoordsParams.add((ResourceIndexedSearchParamCoords) next);
|
||||
}
|
||||
|
@ -437,7 +438,7 @@ public class SearchParamExtractorService {
|
|||
// 2. Find referenced search parameters
|
||||
ISearchParamExtractor.SearchParamSet<PathAndRef> referencedSearchParamSet = mySearchParamExtractor.extractResourceLinks(theResource, true);
|
||||
|
||||
String spNamePrefix = null;
|
||||
String spNamePrefix;
|
||||
ResourceIndexedSearchParams currParams;
|
||||
// 3. for each referenced search parameter, create an index
|
||||
for (PathAndRef nextPathAndRef : referencedSearchParamSet) {
|
||||
|
@ -482,6 +483,7 @@ public class SearchParamExtractorService {
|
|||
}
|
||||
|
||||
private ResourceLink resolveTargetAndCreateResourceLinkOrReturnNull(@Nonnull RequestPartitionId theRequestPartitionId, ResourceTable theEntity, Date theUpdateTime, RuntimeSearchParam nextSpDef, String theNextPathsUnsplit, PathAndRef nextPathAndRef, IIdType theNextId, String theTypeString, Class<? extends IBaseResource> theType, IBaseReference theReference, RequestDetails theRequest, TransactionDetails theTransactionDetails) {
|
||||
assert theRequestPartitionId != null;
|
||||
|
||||
ResourcePersistentId resolvedResourceId = theTransactionDetails.getResolvedResourceId(theNextId);
|
||||
if (resolvedResourceId != null) {
|
||||
|
|
|
@ -30,10 +30,24 @@ import java.util.Date;
|
|||
|
||||
public interface IJpaDao<T extends IBaseResource> {
|
||||
@SuppressWarnings("unchecked")
|
||||
IBasePersistedResource updateEntity(RequestDetails theRequest, IBaseResource theResource, IBasePersistedResource
|
||||
theEntity, Date theDeletedTimestampOrNull, boolean thePerformIndexing,
|
||||
boolean theUpdateVersion, TransactionDetails theTransactionDetails, boolean theForceUpdate, boolean theCreateNewHistoryEntry);
|
||||
IBasePersistedResource updateEntity(
|
||||
RequestDetails theRequest,
|
||||
IBaseResource theResource,
|
||||
IBasePersistedResource theEntity,
|
||||
Date theDeletedTimestampOrNull,
|
||||
boolean thePerformIndexing,
|
||||
boolean theUpdateVersion,
|
||||
TransactionDetails theTransactionDetails,
|
||||
boolean theForceUpdate,
|
||||
boolean theCreateNewHistoryEntry);
|
||||
|
||||
IBasePersistedResource updateInternal(RequestDetails theRequestDetails, T theResource, boolean thePerformIndexing, boolean theForceUpdateVersion,
|
||||
IBasePersistedResource theEntity, IIdType theResourceId, IBaseResource theOldResource, TransactionDetails theTransactionDetails);
|
||||
IBasePersistedResource updateInternal(
|
||||
RequestDetails theRequestDetails,
|
||||
T theResource,
|
||||
boolean thePerformIndexing,
|
||||
boolean theForceUpdateVersion,
|
||||
IBasePersistedResource theEntity,
|
||||
IIdType theResourceId,
|
||||
IBaseResource theOldResource,
|
||||
TransactionDetails theTransactionDetails);
|
||||
}
|
||||
|
|
|
@ -259,7 +259,7 @@ public abstract class BaseStorageDao {
|
|||
* <p>
|
||||
* We only do this if thePerformIndexing is true because if it's false, that means
|
||||
* we're in a FHIR transaction during the first phase of write operation processing,
|
||||
* meaning that the versions of other resources may not have need updated yet. For example
|
||||
* meaning that the versions of other resources may not have need updatd yet. For example
|
||||
* we're about to store an Observation with a reference to a Patient, and that Patient
|
||||
* is also being updated in the same transaction, during the first "no index" phase,
|
||||
* the Patient will not yet have its version number incremented, so it would be wrong
|
||||
|
|
|
@ -1,10 +1,17 @@
|
|||
package ca.uhn.fhir.rest.param;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
public class TokenParamTest {
|
||||
private static final FhirContext ourCtx = FhirContext.forR4Cached();
|
||||
|
||||
@Test
|
||||
public void testEquals() {
|
||||
TokenParam tokenParam1 = new TokenParam("foo", "bar");
|
||||
|
@ -32,4 +39,13 @@ public class TokenParamTest {
|
|||
assertEquals("", new TokenParam().getValueNotNull());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testOfType() {
|
||||
TokenParam param = new TokenParam();
|
||||
param.setValueAsQueryToken(ourCtx, "identifier", Constants.PARAMQUALIFIER_TOKEN_OF_TYPE, "http://type-system|type-value|identifier-value");
|
||||
assertEquals(TokenParamModifier.OF_TYPE, param.getModifier());
|
||||
assertEquals("http://type-system", param.getSystem());
|
||||
assertEquals("type-value|identifier-value", param.getValue());
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in New Issue