Add inline tag mode (#2748)

* Start work on tag mode 3

* Add changelog

* Test fix
This commit is contained in:
James Agnew 2021-06-22 23:08:09 -04:00 committed by GitHub
parent 912b44c3fa
commit 8379392e16
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 628 additions and 217 deletions

View File

@ -225,14 +225,23 @@ public class FhirContext {
} }
/**
* @since 5.5.0
*/
public static FhirContext forDstu3Cached() { public static FhirContext forDstu3Cached() {
return forCached(FhirVersionEnum.DSTU3); return forCached(FhirVersionEnum.DSTU3);
} }
/**
* @since 5.5.0
*/
public static FhirContext forR4Cached() { public static FhirContext forR4Cached() {
return forCached(FhirVersionEnum.R4); return forCached(FhirVersionEnum.R4);
} }
/**
* @since 5.5.0
*/
public static FhirContext forR5Cached() { public static FhirContext forR5Cached() {
return forCached(FhirVersionEnum.R5); return forCached(FhirVersionEnum.R5);
} }

View File

@ -27,7 +27,8 @@ import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target; import java.lang.annotation.Target;
/** /**
* This annotation declares a bean as a subscription interceptor * This annotation declares a bean as a subscription interceptor. This interceptor
* is not mandatory for interceptor classes, but is added as a marker by convention.
*/ */
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE) @Target(ElementType.TYPE)

View File

@ -0,0 +1,33 @@
package ca.uhn.hapi.fhir.docs.interceptor;
import ca.uhn.fhir.interceptor.api.Hook;
import ca.uhn.fhir.interceptor.api.Interceptor;
import ca.uhn.fhir.interceptor.api.Pointcut;
import org.hl7.fhir.instance.model.api.IBaseResource;
// START SNIPPET: TagTrimmingInterceptor
/**
* This is a simple interceptor for the JPA server that trims all tags, profiles, and security labels from
* resources before they are saved.
*/
@Interceptor
public class TagTrimmingInterceptor {
/** Handle creates */
@Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED)
public void insert(IBaseResource theResource) {
theResource.getMeta().getTag().clear();
theResource.getMeta().getProfile().clear();
theResource.getMeta().getSecurity().clear();
}
/** Handle updates */
@Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED)
public void update(IBaseResource theOldResource, IBaseResource theResource) {
theResource.getMeta().getTag().clear();
theResource.getMeta().getProfile().clear();
theResource.getMeta().getSecurity().clear();
}
}
// END SNIPPET: TagTrimmingInterceptor

View File

@ -0,0 +1,7 @@
---
type: add
issue: 2748
title: "A new tag storage mode called **Inline Tag Mode** tas been added. In this mode, all tags are stored directly
in the serialized resource body in the database, instead of using dedicated tables. This has significant performance
advantages when storing resources with many distinct tags (i.e. many tags that are unique to each resource, as opposed
to being reused across multiple resources)."

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 2748
title: "The SQL generated for the `_profile` search parameter did not use all of the columns on the tag index table,
resulting on poor performance on MySQL. This has been corrected."

View File

@ -6,3 +6,10 @@ In the case of a Plain Server, HAPI FHIR itself performs the role of the Server
In the case of a JPA Server, HAPI FHIR itself performs both roles. This means that **SERVER_xxx** Pointcuts may be intercepted by interceptors on any HAPI FHIR server. However, if you want to intercept **STORAGE_xxx** Pointcuts on a plain server, you will need to trigger them yourself. In the case of a JPA Server, HAPI FHIR itself performs both roles. This means that **SERVER_xxx** Pointcuts may be intercepted by interceptors on any HAPI FHIR server. However, if you want to intercept **STORAGE_xxx** Pointcuts on a plain server, you will need to trigger them yourself.
# Example: Clearing Tags
The following example shows an interceptor that clears all tags, profiles, and security labels from a resource prior to storage in the JPA server.
```java
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/interceptor/TagTrimmingInterceptor.java|TagTrimmingInterceptor}}
```

View File

@ -2640,7 +2640,14 @@ public class DaoConfig {
/** /**
* A single set of tags is shared by all resource versions * A single set of tags is shared by all resource versions
*/ */
NON_VERSIONED NON_VERSIONED,
/**
* Tags are stored directly in the resource body (in the {@link ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable}
* entry for the resource, meaning that they are not indexed separately, and are versioned with the rest
* of the resource.
*/
INLINE
} }
} }

View File

@ -119,6 +119,7 @@ import org.springframework.transaction.support.TransactionSynchronization;
import org.springframework.transaction.support.TransactionSynchronizationManager; import org.springframework.transaction.support.TransactionSynchronizationManager;
import javax.annotation.Nonnull; import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.annotation.PostConstruct; import javax.annotation.PostConstruct;
import javax.persistence.EntityManager; import javax.persistence.EntityManager;
import javax.persistence.NoResultException; import javax.persistence.NoResultException;
@ -128,6 +129,7 @@ import javax.persistence.TypedQuery;
import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Root; import javax.persistence.criteria.Root;
import javax.validation.constraints.Null;
import javax.xml.stream.events.Characters; import javax.xml.stream.events.Characters;
import javax.xml.stream.events.XMLEvent; import javax.xml.stream.events.XMLEvent;
import java.util.ArrayList; import java.util.ArrayList;
@ -568,10 +570,14 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
} }
} }
if (hasExtensions) {
boolean inlineTagMode = getConfig().getTagStorageMode() == DaoConfig.TagStorageModeEnum.INLINE;
if (hasExtensions || inlineTagMode) {
if (!inlineTagMode) {
excludeElements.add(resourceType + ".meta.profile"); excludeElements.add(resourceType + ".meta.profile");
excludeElements.add(resourceType + ".meta.tag"); excludeElements.add(resourceType + ".meta.tag");
excludeElements.add(resourceType + ".meta.security"); excludeElements.add(resourceType + ".meta.security");
}
excludeElements.add(resourceType + ".meta.versionId"); excludeElements.add(resourceType + ".meta.versionId");
excludeElements.add(resourceType + ".meta.lastUpdated"); excludeElements.add(resourceType + ".meta.lastUpdated");
excludeElements.add(resourceType + ".meta.source"); excludeElements.add(resourceType + ".meta.source");
@ -609,6 +615,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
} }
boolean skipUpdatingTags = myConfig.isMassIngestionMode() && theEntity.isHasTags(); boolean skipUpdatingTags = myConfig.isMassIngestionMode() && theEntity.isHasTags();
skipUpdatingTags |= myConfig.getTagStorageMode() == DaoConfig.TagStorageModeEnum.INLINE;
if (!skipUpdatingTags) { if (!skipUpdatingTags) {
Set<ResourceTag> allDefs = new HashSet<>(); Set<ResourceTag> allDefs = new HashSet<>();
@ -695,7 +702,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private <R extends IBaseResource> R populateResourceMetadataHapi(Class<R> theResourceType, IBaseResourceEntity theEntity, Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, IResource res, Long theVersion) { private <R extends IBaseResource> R populateResourceMetadataHapi(Class<R> theResourceType, IBaseResourceEntity theEntity, @Nullable Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, IResource res, Long theVersion) {
R retVal = (R) res; R retVal = (R) res;
if (theEntity.getDeleted() != null) { if (theEntity.getDeleted() != null) {
res = (IResource) myContext.getResourceDefinition(theResourceType).newInstance(); res = (IResource) myContext.getResourceDefinition(theResourceType).newInstance();
@ -724,12 +731,12 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
ResourceMetadataKeyEnum.UPDATED.put(res, theEntity.getUpdated()); ResourceMetadataKeyEnum.UPDATED.put(res, theEntity.getUpdated());
IDao.RESOURCE_PID.put(res, theEntity.getResourceId()); IDao.RESOURCE_PID.put(res, theEntity.getResourceId());
Collection<? extends BaseTag> tags = theTagList; if (theTagList != null) {
if (theEntity.isHasTags()) { if (theEntity.isHasTags()) {
TagList tagList = new TagList(); TagList tagList = new TagList();
List<IBaseCoding> securityLabels = new ArrayList<>(); List<IBaseCoding> securityLabels = new ArrayList<>();
List<IdDt> profiles = new ArrayList<>(); List<IdDt> profiles = new ArrayList<>();
for (BaseTag next : tags) { for (BaseTag next : theTagList) {
switch (next.getTag().getTagType()) { switch (next.getTag().getTagType()) {
case PROFILE: case PROFILE:
profiles.add(new IdDt(next.getTag().getCode())); profiles.add(new IdDt(next.getTag().getCode()));
@ -756,13 +763,13 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
ResourceMetadataKeyEnum.PROFILES.put(res, profiles); ResourceMetadataKeyEnum.PROFILES.put(res, profiles);
} }
} }
}
return retVal; return retVal;
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private <R extends IBaseResource> R populateResourceMetadataRi(Class<R> theResourceType, IBaseResourceEntity theEntity, Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, IAnyResource res, Long theVersion) { private <R extends IBaseResource> R populateResourceMetadataRi(Class<R> theResourceType, IBaseResourceEntity theEntity, @Nullable Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, IAnyResource res, Long theVersion) {
R retVal = (R) res; R retVal = (R) res;
if (theEntity.getDeleted() != null) { if (theEntity.getDeleted() != null) {
res = (IAnyResource) myContext.getResourceDefinition(theResourceType).newInstance(); res = (IAnyResource) myContext.getResourceDefinition(theResourceType).newInstance();
@ -784,9 +791,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
} }
} }
res.getMeta().getTag().clear();
res.getMeta().getProfile().clear();
res.getMeta().getSecurity().clear();
res.getMeta().setLastUpdated(null); res.getMeta().setLastUpdated(null);
res.getMeta().setVersionId(null); res.getMeta().setVersionId(null);
@ -796,6 +800,10 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
res.getMeta().setLastUpdated(theEntity.getUpdatedDate()); res.getMeta().setLastUpdated(theEntity.getUpdatedDate());
IDao.RESOURCE_PID.put(res, theEntity.getResourceId()); IDao.RESOURCE_PID.put(res, theEntity.getResourceId());
if (theTagList != null) {
res.getMeta().getTag().clear();
res.getMeta().getProfile().clear();
res.getMeta().getSecurity().clear();
for (BaseTag next : theTagList) { for (BaseTag next : theTagList) {
switch (next.getTag().getTagType()) { switch (next.getTag().getTagType()) {
case PROFILE: case PROFILE:
@ -815,6 +823,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
break; break;
} }
} }
}
return retVal; return retVal;
} }
@ -913,6 +922,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
// 1. get resource, it's encoding and the tags if any // 1. get resource, it's encoding and the tags if any
byte[] resourceBytes; byte[] resourceBytes;
ResourceEncodingEnum resourceEncoding; ResourceEncodingEnum resourceEncoding;
@Nullable
Collection<? extends BaseTag> tagList = Collections.emptyList(); Collection<? extends BaseTag> tagList = Collections.emptyList();
long version; long version;
String provenanceSourceUri = null; String provenanceSourceUri = null;
@ -922,14 +932,20 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
ResourceHistoryTable history = (ResourceHistoryTable) theEntity; ResourceHistoryTable history = (ResourceHistoryTable) theEntity;
resourceBytes = history.getResource(); resourceBytes = history.getResource();
resourceEncoding = history.getEncoding(); resourceEncoding = history.getEncoding();
if (getConfig().getTagStorageMode() == DaoConfig.TagStorageModeEnum.VERSIONED) { switch (getConfig().getTagStorageMode()) {
case VERSIONED:
default:
if (history.isHasTags()) { if (history.isHasTags()) {
tagList = history.getTags(); tagList = history.getTags();
} }
} else { break;
case NON_VERSIONED:
if (history.getResourceTable().isHasTags()) { if (history.getResourceTable().isHasTags()) {
tagList = history.getResourceTable().getTags(); tagList = history.getResourceTable().getTags();
} }
break;
case INLINE:
tagList = null;
} }
version = history.getVersion(); version = history.getVersion();
if (history.getProvenance() != null) { if (history.getProvenance() != null) {
@ -952,11 +968,19 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
} }
resourceBytes = history.getResource(); resourceBytes = history.getResource();
resourceEncoding = history.getEncoding(); resourceEncoding = history.getEncoding();
switch (getConfig().getTagStorageMode()) {
case VERSIONED:
case NON_VERSIONED:
if (resource.isHasTags()) { if (resource.isHasTags()) {
tagList = resource.getTags(); tagList = resource.getTags();
} else { } else {
tagList = Collections.emptyList(); tagList = Collections.emptyList();
} }
break;
case INLINE:
tagList = null;
break;
}
version = history.getVersion(); version = history.getVersion();
if (history.getProvenance() != null) { if (history.getProvenance() != null) {
provenanceRequestId = history.getProvenance().getRequestId(); provenanceRequestId = history.getProvenance().getRequestId();
@ -970,10 +994,19 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
version = view.getVersion(); version = view.getVersion();
provenanceRequestId = view.getProvenanceRequestId(); provenanceRequestId = view.getProvenanceRequestId();
provenanceSourceUri = view.getProvenanceSourceUri(); provenanceSourceUri = view.getProvenanceSourceUri();
if (theTagList == null) switch (getConfig().getTagStorageMode()) {
tagList = new HashSet<>(); case VERSIONED:
else case NON_VERSIONED:
if (theTagList != null) {
tagList = theTagList; tagList = theTagList;
} else {
tagList = Collections.emptyList();
}
break;
case INLINE:
tagList = null;
break;
}
} else { } else {
// something wrong // something wrong
return null; return null;
@ -984,6 +1017,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
// 3. Use the appropriate custom type if one is specified in the context // 3. Use the appropriate custom type if one is specified in the context
Class<R> resourceType = theResourceType; Class<R> resourceType = theResourceType;
if (tagList != null) {
if (myContext.hasDefaultTypeForProfile()) { if (myContext.hasDefaultTypeForProfile()) {
for (BaseTag nextTag : tagList) { for (BaseTag nextTag : tagList) {
if (nextTag.getTag().getTagType() == TagTypeEnum.PROFILE) { if (nextTag.getTag().getTagType() == TagTypeEnum.PROFILE) {
@ -999,6 +1033,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
} }
} }
} }
}
// 4. parse the text to FHIR // 4. parse the text to FHIR
R retVal; R retVal;

View File

@ -21,7 +21,6 @@ package ca.uhn.fhir.jpa.search.builder;
*/ */
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.config.DaoConfig;
@ -1079,13 +1078,24 @@ public class QueryStack {
case Constants.PARAM_TAG: case Constants.PARAM_TAG:
case Constants.PARAM_PROFILE: case Constants.PARAM_PROFILE:
case Constants.PARAM_SECURITY: case Constants.PARAM_SECURITY:
if (myDaoConfig.getTagStorageMode() == DaoConfig.TagStorageModeEnum.INLINE) {
return createPredicateSearchParameter(theSourceJoinColumn, theResourceName, theParamName, theAndOrParams, theRequest, theRequestPartitionId, theSearchContainedMode);
} else {
return createPredicateTag(theSourceJoinColumn, theAndOrParams, theParamName, theRequestPartitionId); return createPredicateTag(theSourceJoinColumn, theAndOrParams, theParamName, theRequestPartitionId);
}
case Constants.PARAM_SOURCE: case Constants.PARAM_SOURCE:
return createPredicateSourceForAndList(theSourceJoinColumn, theAndOrParams); return createPredicateSourceForAndList(theSourceJoinColumn, theAndOrParams);
default: default:
return createPredicateSearchParameter(theSourceJoinColumn, theResourceName, theParamName, theAndOrParams, theRequest, theRequestPartitionId, theSearchContainedMode);
}
}
@Nullable
private Condition createPredicateSearchParameter(@Nullable DbColumn theSourceJoinColumn, String theResourceName, String theParamName, List<List<IQueryParameterType>> theAndOrParams, RequestDetails theRequest, RequestPartitionId theRequestPartitionId, SearchContainedModeEnum theSearchContainedMode) {
List<Condition> andPredicates = new ArrayList<>(); List<Condition> andPredicates = new ArrayList<>();
RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam(theResourceName, theParamName); RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam(theResourceName, theParamName);
if (nextParamDef != null) { if (nextParamDef != null) {
@ -1201,9 +1211,6 @@ public class QueryStack {
} }
return toAndPredicate(andPredicates); return toAndPredicate(andPredicates);
}
} }
public void addPredicateCompositeUnique(String theIndexString, RequestPartitionId theRequestPartitionId) { public void addPredicateCompositeUnique(String theIndexString, RequestPartitionId theRequestPartitionId) {

View File

@ -21,24 +21,23 @@ package ca.uhn.fhir.jpa.search.builder.predicate;
*/ */
import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum; import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.healthmarketscience.sqlbuilder.BinaryCondition; import com.healthmarketscience.sqlbuilder.BinaryCondition;
import com.healthmarketscience.sqlbuilder.ComboCondition; import com.healthmarketscience.sqlbuilder.ComboCondition;
import com.healthmarketscience.sqlbuilder.Condition; import com.healthmarketscience.sqlbuilder.Condition;
import com.healthmarketscience.sqlbuilder.UnaryCondition;
import com.healthmarketscience.sqlbuilder.dbspec.basic.DbColumn; import com.healthmarketscience.sqlbuilder.dbspec.basic.DbColumn;
import com.healthmarketscience.sqlbuilder.dbspec.basic.DbTable; import com.healthmarketscience.sqlbuilder.dbspec.basic.DbTable;
import org.apache.commons.lang3.tuple.Pair; import org.apache.commons.lang3.tuple.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List; import java.util.List;
import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank;
public class TagPredicateBuilder extends BaseJoiningPredicateBuilder { public class TagPredicateBuilder extends BaseJoiningPredicateBuilder {
private static final Logger ourLog = LoggerFactory.getLogger(TagPredicateBuilder.class);
private final DbColumn myColumnResId; private final DbColumn myColumnResId;
private final DbTable myTagDefinitionTable; private final DbTable myTagDefinitionTable;
@ -72,11 +71,20 @@ public class TagPredicateBuilder extends BaseJoiningPredicateBuilder {
List<Condition> orPredicates = Lists.newArrayList(); List<Condition> orPredicates = Lists.newArrayList();
for (Pair<String, String> next : theTokens) { for (Pair<String, String> next : theTokens) {
Condition codePredicate = BinaryCondition.equalTo(myTagDefinitionColumnTagCode, generatePlaceholder(next.getRight())); String system = next.getLeft();
if (isNotBlank(next.getLeft())) { String code = next.getRight();
Condition systemPredicate = BinaryCondition.equalTo(myTagDefinitionColumnTagSystem, generatePlaceholder(next.getLeft()));
if (theTagType == TagTypeEnum.PROFILE) {
system = BaseHapiFhirDao.NS_JPA_PROFILE;
}
Condition codePredicate = BinaryCondition.equalTo(myTagDefinitionColumnTagCode, generatePlaceholder(code));
if (isNotBlank(system)) {
Condition systemPredicate = BinaryCondition.equalTo(myTagDefinitionColumnTagSystem, generatePlaceholder(system));
orPredicates.add(ComboCondition.and(typePredicate, systemPredicate, codePredicate)); orPredicates.add(ComboCondition.and(typePredicate, systemPredicate, codePredicate));
} else { } else {
// Note: We don't have an index for this combo, which means that this may not perform
// well on MySQL (and maybe others) without an added index
orPredicates.add(ComboCondition.and(typePredicate, codePredicate)); orPredicates.add(ComboCondition.and(typePredicate, codePredicate));
} }
} }

View File

@ -2298,7 +2298,11 @@ public class FhirResourceDaoDstu2SearchNoFtTest extends BaseJpaDstu2Test {
{ {
SearchParameterMap params = new SearchParameterMap(); SearchParameterMap params = new SearchParameterMap();
params.add("_profile", new UriParam("http://" + methodName)); params.add("_profile", new UriParam("http://" + methodName));
logAllTokenIndexes();
myCaptureQueriesListener.clear();
List<IIdType> patients = toUnqualifiedVersionlessIds(myOrganizationDao.search(params)); List<IIdType> patients = toUnqualifiedVersionlessIds(myOrganizationDao.search(params));
myCaptureQueriesListener.logSelectQueries();
assertThat(patients, containsInAnyOrder(tag2id)); assertThat(patients, containsInAnyOrder(tag2id));
} }
} }

View File

@ -1,15 +1,35 @@
package ca.uhn.fhir.jpa.dao.r4; package ca.uhn.fhir.jpa.dao.r4;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.param.StringParam; import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenParam; import ca.uhn.fhir.rest.param.TokenParam;
import org.hamcrest.Matchers;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Patient; import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.SearchParameter;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.UUID;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test { public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
private static final Logger ourLog = LoggerFactory.getLogger(FhirResourceDaoR4SearchSqlTest.class);
@AfterEach
public void after() {
myDaoConfig.setTagStorageMode(DaoConfig.DEFAULT_TAG_STORAGE_MODE);
}
/** /**
* One regular search params - Doesn't need HFJ_RESOURCE as root * One regular search params - Doesn't need HFJ_RESOURCE as root
*/ */
@ -43,5 +63,68 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
} }
@Test
public void testSearchByProfile_VersionedMode() {
// Put a tag in so we can search for it
String code = "http://" + UUID.randomUUID();
Patient p = new Patient();
p.getMeta().addProfile(code);
IIdType id = myPatientDao.create(p).getId().toUnqualifiedVersionless();
myMemoryCacheService.invalidateAllCaches();
// Search
myCaptureQueriesListener.clear();
SearchParameterMap map = SearchParameterMap.newSynchronous()
.add(Constants.PARAM_PROFILE, new TokenParam(code));
IBundleProvider outcome = myPatientDao.search(map);
assertEquals(3, myCaptureQueriesListener.countSelectQueries());
// Query 1 - Find resources: Make sure we search for tag type+system+code always
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false);
assertEquals("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_TAG t1 ON (t0.RES_ID = t1.RES_ID) LEFT OUTER JOIN HFJ_TAG_DEF t2 ON (t1.TAG_ID = t2.TAG_ID) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t2.TAG_TYPE = ?) AND (t2.TAG_SYSTEM = ?) AND (t2.TAG_CODE = ?)))", sql);
// Query 2 - Load resourece contents
sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(false, false);
assertThat(sql, containsString("where resourcese0_.RES_ID in (?)"));
// Query 3 - Load tags and defintions
sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(2).getSql(false, false);
assertThat(sql, containsString("from HFJ_RES_TAG resourceta0_ inner join HFJ_TAG_DEF"));
assertThat(toUnqualifiedVersionlessIds(outcome), Matchers.contains(id));
}
@Test
public void testSearchByProfile_InlineMode() {
myDaoConfig.setTagStorageMode(DaoConfig.TagStorageModeEnum.INLINE);
SearchParameter searchParameter = FhirResourceDaoR4TagsTest.createSearchParamForInlineResourceProfile();
ourLog.info("SearchParam:\n{}", myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(searchParameter));
mySearchParameterDao.update(searchParameter, mySrd);
mySearchParamRegistry.forceRefresh();
// Put a tag in so we can search for it
String code = "http://" + UUID.randomUUID();
Patient p = new Patient();
p.getMeta().addProfile(code);
IIdType id = myPatientDao.create(p).getId().toUnqualifiedVersionless();
myMemoryCacheService.invalidateAllCaches();
// Search
myCaptureQueriesListener.clear();
SearchParameterMap map = SearchParameterMap.newSynchronous()
.add(Constants.PARAM_PROFILE, new TokenParam(code));
IBundleProvider outcome = myPatientDao.search(map);
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
// Query 1 - Find resources: Just a standard token search in this mode
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false);
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)", sql);
// Query 2 - Load resourece contents
sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(false, false);
assertThat(sql, containsString("where resourcese0_.RES_ID in (?)"));
assertThat(toUnqualifiedVersionlessIds(outcome), Matchers.contains(id));
}
} }

View File

@ -1,10 +1,15 @@
package ca.uhn.fhir.jpa.dao.r4; package ca.uhn.fhir.jpa.dao.r4;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.param.TokenParam;
import org.hl7.fhir.r4.model.Enumerations;
import org.hl7.fhir.r4.model.IdType; import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.Patient; import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.SearchParameter;
import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
@ -142,6 +147,175 @@ public class FhirResourceDaoR4TagsTest extends BaseJpaR4Test {
} }
@Test
public void testInlineTags_StoreAndRetrieve() {
myDaoConfig.setTagStorageMode(DaoConfig.TagStorageModeEnum.INLINE);
// Store a first version
Patient patient = new Patient();
patient.setId("Patient/A");
patient.getMeta().addProfile("http://profile1");
patient.getMeta().addTag("http://tag1", "vtag1", "dtag1");
patient.getMeta().addSecurity("http://sec1", "vsec1", "dsec1");
patient.setActive(true);
myPatientDao.update(patient, mySrd);
runInTransaction(()->{
assertEquals(0, myResourceTagDao.count());
assertEquals(0, myResourceHistoryTagDao.count());
assertEquals(0, myTagDefinitionDao.count());
});
// Read it back
patient = myPatientDao.read(new IdType("Patient/A/_history/1"), mySrd);
assertThat(toProfiles(patient).toString(), toProfiles(patient), contains("http://profile1"));
assertThat(toTags(patient).toString(), toTags(patient), contains("http://tag1|vtag1|dtag1"));
assertThat(toSecurityLabels(patient).toString(), toSecurityLabels(patient), contains("http://sec1|vsec1|dsec1"));
// Store a second version
patient = new Patient();
patient.setId("Patient/A");
patient.getMeta().addProfile("http://profile2");
patient.getMeta().addTag("http://tag2", "vtag2", "dtag2");
patient.getMeta().addSecurity("http://sec2", "vsec2", "dsec2");
patient.setActive(true);
myPatientDao.update(patient, mySrd);
runInTransaction(()->{
assertEquals(0, myResourceTagDao.count());
assertEquals(0, myResourceHistoryTagDao.count());
assertEquals(0, myTagDefinitionDao.count());
});
// First version should have only the initial tags
patient = myPatientDao.read(new IdType("Patient/A/_history/1"), mySrd);
assertThat(toProfiles(patient).toString(), toProfiles(patient), contains("http://profile1"));
assertThat(toTags(patient).toString(), toTags(patient), contains("http://tag1|vtag1|dtag1"));
assertThat(toSecurityLabels(patient).toString(), toSecurityLabels(patient), contains("http://sec1|vsec1|dsec1"));
// Second version should have the new set of tags
// TODO: We could copy these forward like we do for non-inline mode. Perhaps in the future.
patient = myPatientDao.read(new IdType("Patient/A/_history/2"), mySrd);
assertThat(toProfiles(patient).toString(), toProfiles(patient), contains("http://profile2"));
assertThat(toTags(patient).toString(), toTags(patient), containsInAnyOrder("http://tag2|vtag2|dtag2"));
assertThat(toSecurityLabels(patient).toString(), toSecurityLabels(patient), containsInAnyOrder("http://sec2|vsec2|dsec2"));
}
@Test
public void testInlineTags_Search_Tag() {
myDaoConfig.setTagStorageMode(DaoConfig.TagStorageModeEnum.INLINE);
SearchParameter searchParameter = createResourceTagSearchParameter();
ourLog.info("SearchParam:\n{}", myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(searchParameter));
mySearchParameterDao.update(searchParameter, mySrd);
mySearchParamRegistry.forceRefresh();
createPatientsForInlineSearchTests();
logAllTokenIndexes();
// Perform a search
SearchParameterMap map = SearchParameterMap.newSynchronous();
map.add("_tag", new TokenParam("http://tag1", "vtag1"));
IBundleProvider outcome = myPatientDao.search(map, mySrd);
assertThat(toUnqualifiedVersionlessIdValues(outcome), containsInAnyOrder("Patient/A", "Patient/B"));
validatePatientSearchResultsForInlineTags(outcome);
}
@Test
public void testInlineTags_Search_Profile() {
myDaoConfig.setTagStorageMode(DaoConfig.TagStorageModeEnum.INLINE);
SearchParameter searchParameter = createSearchParamForInlineResourceProfile();
ourLog.info("SearchParam:\n{}", myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(searchParameter));
mySearchParameterDao.update(searchParameter, mySrd);
mySearchParamRegistry.forceRefresh();
createPatientsForInlineSearchTests();
logAllTokenIndexes();
// Perform a search
SearchParameterMap map = SearchParameterMap.newSynchronous();
map.add("_profile", new TokenParam("http://profile1"));
IBundleProvider outcome = myPatientDao.search(map, mySrd);
assertThat(toUnqualifiedVersionlessIdValues(outcome), containsInAnyOrder("Patient/A", "Patient/B"));
validatePatientSearchResultsForInlineTags(outcome);
}
@Test
public void testInlineTags_Search_Security() {
myDaoConfig.setTagStorageMode(DaoConfig.TagStorageModeEnum.INLINE);
SearchParameter searchParameter = new SearchParameter();
searchParameter.setId("SearchParameter/resource-security");
for (String next : myFhirCtx.getResourceTypes().stream().sorted().collect(Collectors.toList())) {
searchParameter.addBase(next);
}
searchParameter.setStatus(Enumerations.PublicationStatus.ACTIVE);
searchParameter.setType(Enumerations.SearchParamType.TOKEN);
searchParameter.setCode("_security");
searchParameter.setName("Security");
searchParameter.setExpression("meta.security");
ourLog.info("SearchParam:\n{}", myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(searchParameter));
mySearchParameterDao.update(searchParameter, mySrd);
mySearchParamRegistry.forceRefresh();
createPatientsForInlineSearchTests();
logAllTokenIndexes();
// Perform a search
SearchParameterMap map = SearchParameterMap.newSynchronous();
map.add("_security", new TokenParam("http://sec1", "vsec1"));
IBundleProvider outcome = myPatientDao.search(map, mySrd);
assertThat(toUnqualifiedVersionlessIdValues(outcome), containsInAnyOrder("Patient/A", "Patient/B"));
validatePatientSearchResultsForInlineTags(outcome);
}
private void validatePatientSearchResultsForInlineTags(IBundleProvider outcome) {
Patient patient;
patient = (Patient) outcome.getResources(0, 1).get(0);
assertThat(toProfiles(patient).toString(), toProfiles(patient), contains("http://profile1"));
assertThat(toTags(patient).toString(), toTags(patient), contains("http://tag1|vtag1|dtag1"));
assertThat(toSecurityLabels(patient).toString(), toSecurityLabels(patient), contains("http://sec1|vsec1|dsec1"));
patient = (Patient) outcome.getResources(1, 2).get(0);
assertThat(toProfiles(patient).toString(), toProfiles(patient), contains("http://profile1"));
assertThat(toTags(patient).toString(), toTags(patient), contains("http://tag1|vtag1|dtag1"));
assertThat(toSecurityLabels(patient).toString(), toSecurityLabels(patient), contains("http://sec1|vsec1|dsec1"));
}
private void createPatientsForInlineSearchTests() {
Patient patient = new Patient();
patient.setId("Patient/A");
patient.getMeta().addProfile("http://profile1");
patient.getMeta().addTag("http://tag1", "vtag1", "dtag1");
patient.getMeta().addSecurity("http://sec1", "vsec1", "dsec1");
patient.setActive(true);
myPatientDao.update(patient, mySrd);
patient = new Patient();
patient.setId("Patient/B");
patient.getMeta().addProfile("http://profile1");
patient.getMeta().addTag("http://tag1", "vtag1", "dtag1");
patient.getMeta().addSecurity("http://sec1", "vsec1", "dsec1");
patient.setActive(true);
myPatientDao.update(patient, mySrd);
patient = new Patient();
patient.setId("Patient/NO");
patient.getMeta().addProfile("http://profile99");
patient.getMeta().addTag("http://tag99", "vtag99", "dtag99");
patient.getMeta().addSecurity("http://sec99", "vsec99", "dsec99");
patient.setActive(true);
myPatientDao.update(patient, mySrd);
}
private void initializeNonVersioned() { private void initializeNonVersioned() {
myDaoConfig.setTagStorageMode(DaoConfig.TagStorageModeEnum.NON_VERSIONED); myDaoConfig.setTagStorageMode(DaoConfig.TagStorageModeEnum.NON_VERSIONED);
@ -183,9 +357,44 @@ public class FhirResourceDaoR4TagsTest extends BaseJpaR4Test {
return patient.getMeta().getTag().stream().map(t -> t.getSystem() + "|" + t.getCode() + "|" + t.getDisplay()).collect(Collectors.toList()); return patient.getMeta().getTag().stream().map(t -> t.getSystem() + "|" + t.getCode() + "|" + t.getDisplay()).collect(Collectors.toList());
} }
@Nonnull
private List<String> toSecurityLabels(Patient patient) {
return patient.getMeta().getSecurity().stream().map(t -> t.getSystem() + "|" + t.getCode() + "|" + t.getDisplay()).collect(Collectors.toList());
}
@Nonnull @Nonnull
private List<String> toProfiles(Patient patient) { private List<String> toProfiles(Patient patient) {
return patient.getMeta().getProfile().stream().map(t -> t.getValue()).collect(Collectors.toList()); return patient.getMeta().getProfile().stream().map(t -> t.getValue()).collect(Collectors.toList());
} }
@Nonnull
public static SearchParameter createSearchParamForInlineResourceProfile() {
SearchParameter searchParameter = new SearchParameter();
searchParameter.setId("SearchParameter/resource-profile");
for (String next : FhirContext.forR4Cached().getResourceTypes().stream().sorted().collect(Collectors.toList())) {
searchParameter.addBase(next);
}
searchParameter.setStatus(Enumerations.PublicationStatus.ACTIVE);
searchParameter.setType(Enumerations.SearchParamType.TOKEN);
searchParameter.setCode("_profile");
searchParameter.setName("Profile");
searchParameter.setExpression("meta.profile");
return searchParameter;
}
@Nonnull
public static SearchParameter createResourceTagSearchParameter() {
SearchParameter searchParameter = new SearchParameter();
searchParameter.setId("SearchParameter/resource-tag");
for (String next : FhirContext.forR4Cached().getResourceTypes().stream().sorted().collect(Collectors.toList())) {
searchParameter.addBase(next);
}
searchParameter.setStatus(Enumerations.PublicationStatus.ACTIVE);
searchParameter.setType(Enumerations.SearchParamType.TOKEN);
searchParameter.setCode("_tag");
searchParameter.setName("Tag");
searchParameter.setExpression("meta.tag");
return searchParameter;
}
} }

View File

@ -48,36 +48,32 @@ import java.util.Collection;
public class TagDefinition implements Serializable { public class TagDefinition implements Serializable {
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
@Column(name = "TAG_CODE", length = 200) @Column(name = "TAG_CODE", length = 200)
private String myCode; private String myCode;
@Column(name = "TAG_DISPLAY", length = 200) @Column(name = "TAG_DISPLAY", length = 200)
private String myDisplay; private String myDisplay;
@Id @Id
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_TAGDEF_ID") @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_TAGDEF_ID")
@SequenceGenerator(name = "SEQ_TAGDEF_ID", sequenceName = "SEQ_TAGDEF_ID") @SequenceGenerator(name = "SEQ_TAGDEF_ID", sequenceName = "SEQ_TAGDEF_ID")
@Column(name = "TAG_ID") @Column(name = "TAG_ID")
private Long myId; private Long myId;
@OneToMany(cascade = {}, fetch = FetchType.LAZY, mappedBy = "myTag") @OneToMany(cascade = {}, fetch = FetchType.LAZY, mappedBy = "myTag")
private Collection<ResourceTag> myResources; private Collection<ResourceTag> myResources;
@OneToMany(cascade = {}, fetch = FetchType.LAZY, mappedBy = "myTag") @OneToMany(cascade = {}, fetch = FetchType.LAZY, mappedBy = "myTag")
private Collection<ResourceHistoryTag> myResourceVersions; private Collection<ResourceHistoryTag> myResourceVersions;
@Column(name = "TAG_SYSTEM", length = 200) @Column(name = "TAG_SYSTEM", length = 200)
private String mySystem; private String mySystem;
@Column(name = "TAG_TYPE", nullable = false) @Column(name = "TAG_TYPE", nullable = false)
@Enumerated(EnumType.ORDINAL) @Enumerated(EnumType.ORDINAL)
private TagTypeEnum myTagType; private TagTypeEnum myTagType;
@Transient @Transient
private transient Integer myHashCode; private transient Integer myHashCode;
/**
* Constructor
*/
public TagDefinition() { public TagDefinition() {
super();
} }
public TagDefinition(TagTypeEnum theTagType, String theSystem, String theCode, String theDisplay) { public TagDefinition(TagTypeEnum theTagType, String theSystem, String theCode, String theDisplay) {