Add inline tag mode (#2748)

* Start work on tag mode 3

* Add changelog

* Test fix
This commit is contained in:
James Agnew 2021-06-22 23:08:09 -04:00 committed by GitHub
parent 912b44c3fa
commit 8379392e16
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 628 additions and 217 deletions

View File

@ -225,14 +225,23 @@ public class FhirContext {
}
/**
* @since 5.5.0
*/
public static FhirContext forDstu3Cached() {
return forCached(FhirVersionEnum.DSTU3);
}
/**
* @since 5.5.0
*/
public static FhirContext forR4Cached() {
return forCached(FhirVersionEnum.R4);
}
/**
* @since 5.5.0
*/
public static FhirContext forR5Cached() {
return forCached(FhirVersionEnum.R5);
}

View File

@ -27,7 +27,8 @@ import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* This annotation declares a bean as a subscription interceptor
* This annotation declares a bean as a subscription interceptor. This interceptor
* is not mandatory for interceptor classes, but is added as a marker by convention.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)

View File

@ -0,0 +1,33 @@
package ca.uhn.hapi.fhir.docs.interceptor;
import ca.uhn.fhir.interceptor.api.Hook;
import ca.uhn.fhir.interceptor.api.Interceptor;
import ca.uhn.fhir.interceptor.api.Pointcut;
import org.hl7.fhir.instance.model.api.IBaseResource;
// START SNIPPET: TagTrimmingInterceptor
/**
* This is a simple interceptor for the JPA server that trims all tags, profiles, and security labels from
* resources before they are saved.
*/
@Interceptor
public class TagTrimmingInterceptor {
/** Handle creates */
@Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED)
public void insert(IBaseResource theResource) {
theResource.getMeta().getTag().clear();
theResource.getMeta().getProfile().clear();
theResource.getMeta().getSecurity().clear();
}
/** Handle updates */
@Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED)
public void update(IBaseResource theOldResource, IBaseResource theResource) {
theResource.getMeta().getTag().clear();
theResource.getMeta().getProfile().clear();
theResource.getMeta().getSecurity().clear();
}
}
// END SNIPPET: TagTrimmingInterceptor

View File

@ -0,0 +1,7 @@
---
type: add
issue: 2748
title: "A new tag storage mode called **Inline Tag Mode** tas been added. In this mode, all tags are stored directly
in the serialized resource body in the database, instead of using dedicated tables. This has significant performance
advantages when storing resources with many distinct tags (i.e. many tags that are unique to each resource, as opposed
to being reused across multiple resources)."

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 2748
title: "The SQL generated for the `_profile` search parameter did not use all of the columns on the tag index table,
resulting on poor performance on MySQL. This has been corrected."

View File

@ -6,3 +6,10 @@ In the case of a Plain Server, HAPI FHIR itself performs the role of the Server
In the case of a JPA Server, HAPI FHIR itself performs both roles. This means that **SERVER_xxx** Pointcuts may be intercepted by interceptors on any HAPI FHIR server. However, if you want to intercept **STORAGE_xxx** Pointcuts on a plain server, you will need to trigger them yourself.
# Example: Clearing Tags
The following example shows an interceptor that clears all tags, profiles, and security labels from a resource prior to storage in the JPA server.
```java
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/interceptor/TagTrimmingInterceptor.java|TagTrimmingInterceptor}}
```

View File

@ -2640,7 +2640,14 @@ public class DaoConfig {
/**
* A single set of tags is shared by all resource versions
*/
NON_VERSIONED
NON_VERSIONED,
/**
* Tags are stored directly in the resource body (in the {@link ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable}
* entry for the resource, meaning that they are not indexed separately, and are versioned with the rest
* of the resource.
*/
INLINE
}
}

View File

@ -119,6 +119,7 @@ import org.springframework.transaction.support.TransactionSynchronization;
import org.springframework.transaction.support.TransactionSynchronizationManager;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.annotation.PostConstruct;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
@ -128,6 +129,7 @@ import javax.persistence.TypedQuery;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Root;
import javax.validation.constraints.Null;
import javax.xml.stream.events.Characters;
import javax.xml.stream.events.XMLEvent;
import java.util.ArrayList;
@ -568,10 +570,14 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
}
}
if (hasExtensions) {
excludeElements.add(resourceType + ".meta.profile");
excludeElements.add(resourceType + ".meta.tag");
excludeElements.add(resourceType + ".meta.security");
boolean inlineTagMode = getConfig().getTagStorageMode() == DaoConfig.TagStorageModeEnum.INLINE;
if (hasExtensions || inlineTagMode) {
if (!inlineTagMode) {
excludeElements.add(resourceType + ".meta.profile");
excludeElements.add(resourceType + ".meta.tag");
excludeElements.add(resourceType + ".meta.security");
}
excludeElements.add(resourceType + ".meta.versionId");
excludeElements.add(resourceType + ".meta.lastUpdated");
excludeElements.add(resourceType + ".meta.source");
@ -609,6 +615,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
}
boolean skipUpdatingTags = myConfig.isMassIngestionMode() && theEntity.isHasTags();
skipUpdatingTags |= myConfig.getTagStorageMode() == DaoConfig.TagStorageModeEnum.INLINE;
if (!skipUpdatingTags) {
Set<ResourceTag> allDefs = new HashSet<>();
@ -695,7 +702,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
}
@SuppressWarnings("unchecked")
private <R extends IBaseResource> R populateResourceMetadataHapi(Class<R> theResourceType, IBaseResourceEntity theEntity, Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, IResource res, Long theVersion) {
private <R extends IBaseResource> R populateResourceMetadataHapi(Class<R> theResourceType, IBaseResourceEntity theEntity, @Nullable Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, IResource res, Long theVersion) {
R retVal = (R) res;
if (theEntity.getDeleted() != null) {
res = (IResource) myContext.getResourceDefinition(theResourceType).newInstance();
@ -724,45 +731,45 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
ResourceMetadataKeyEnum.UPDATED.put(res, theEntity.getUpdated());
IDao.RESOURCE_PID.put(res, theEntity.getResourceId());
Collection<? extends BaseTag> tags = theTagList;
if (theEntity.isHasTags()) {
TagList tagList = new TagList();
List<IBaseCoding> securityLabels = new ArrayList<>();
List<IdDt> profiles = new ArrayList<>();
for (BaseTag next : tags) {
switch (next.getTag().getTagType()) {
case PROFILE:
profiles.add(new IdDt(next.getTag().getCode()));
break;
case SECURITY_LABEL:
IBaseCoding secLabel = (IBaseCoding) myContext.getVersion().newCodingDt();
secLabel.setSystem(next.getTag().getSystem());
secLabel.setCode(next.getTag().getCode());
secLabel.setDisplay(next.getTag().getDisplay());
securityLabels.add(secLabel);
break;
case TAG:
tagList.add(new Tag(next.getTag().getSystem(), next.getTag().getCode(), next.getTag().getDisplay()));
break;
if (theTagList != null) {
if (theEntity.isHasTags()) {
TagList tagList = new TagList();
List<IBaseCoding> securityLabels = new ArrayList<>();
List<IdDt> profiles = new ArrayList<>();
for (BaseTag next : theTagList) {
switch (next.getTag().getTagType()) {
case PROFILE:
profiles.add(new IdDt(next.getTag().getCode()));
break;
case SECURITY_LABEL:
IBaseCoding secLabel = (IBaseCoding) myContext.getVersion().newCodingDt();
secLabel.setSystem(next.getTag().getSystem());
secLabel.setCode(next.getTag().getCode());
secLabel.setDisplay(next.getTag().getDisplay());
securityLabels.add(secLabel);
break;
case TAG:
tagList.add(new Tag(next.getTag().getSystem(), next.getTag().getCode(), next.getTag().getDisplay()));
break;
}
}
if (tagList.size() > 0) {
ResourceMetadataKeyEnum.TAG_LIST.put(res, tagList);
}
if (securityLabels.size() > 0) {
ResourceMetadataKeyEnum.SECURITY_LABELS.put(res, toBaseCodingList(securityLabels));
}
if (profiles.size() > 0) {
ResourceMetadataKeyEnum.PROFILES.put(res, profiles);
}
}
if (tagList.size() > 0) {
ResourceMetadataKeyEnum.TAG_LIST.put(res, tagList);
}
if (securityLabels.size() > 0) {
ResourceMetadataKeyEnum.SECURITY_LABELS.put(res, toBaseCodingList(securityLabels));
}
if (profiles.size() > 0) {
ResourceMetadataKeyEnum.PROFILES.put(res, profiles);
}
}
return retVal;
}
@SuppressWarnings("unchecked")
private <R extends IBaseResource> R populateResourceMetadataRi(Class<R> theResourceType, IBaseResourceEntity theEntity, Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, IAnyResource res, Long theVersion) {
private <R extends IBaseResource> R populateResourceMetadataRi(Class<R> theResourceType, IBaseResourceEntity theEntity, @Nullable Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, IAnyResource res, Long theVersion) {
R retVal = (R) res;
if (theEntity.getDeleted() != null) {
res = (IAnyResource) myContext.getResourceDefinition(theResourceType).newInstance();
@ -784,9 +791,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
}
}
res.getMeta().getTag().clear();
res.getMeta().getProfile().clear();
res.getMeta().getSecurity().clear();
res.getMeta().setLastUpdated(null);
res.getMeta().setVersionId(null);
@ -796,23 +800,28 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
res.getMeta().setLastUpdated(theEntity.getUpdatedDate());
IDao.RESOURCE_PID.put(res, theEntity.getResourceId());
for (BaseTag next : theTagList) {
switch (next.getTag().getTagType()) {
case PROFILE:
res.getMeta().addProfile(next.getTag().getCode());
break;
case SECURITY_LABEL:
IBaseCoding sec = res.getMeta().addSecurity();
sec.setSystem(next.getTag().getSystem());
sec.setCode(next.getTag().getCode());
sec.setDisplay(next.getTag().getDisplay());
break;
case TAG:
IBaseCoding tag = res.getMeta().addTag();
tag.setSystem(next.getTag().getSystem());
tag.setCode(next.getTag().getCode());
tag.setDisplay(next.getTag().getDisplay());
break;
if (theTagList != null) {
res.getMeta().getTag().clear();
res.getMeta().getProfile().clear();
res.getMeta().getSecurity().clear();
for (BaseTag next : theTagList) {
switch (next.getTag().getTagType()) {
case PROFILE:
res.getMeta().addProfile(next.getTag().getCode());
break;
case SECURITY_LABEL:
IBaseCoding sec = res.getMeta().addSecurity();
sec.setSystem(next.getTag().getSystem());
sec.setCode(next.getTag().getCode());
sec.setDisplay(next.getTag().getDisplay());
break;
case TAG:
IBaseCoding tag = res.getMeta().addTag();
tag.setSystem(next.getTag().getSystem());
tag.setCode(next.getTag().getCode());
tag.setDisplay(next.getTag().getDisplay());
break;
}
}
}
@ -913,6 +922,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
// 1. get resource, it's encoding and the tags if any
byte[] resourceBytes;
ResourceEncodingEnum resourceEncoding;
@Nullable
Collection<? extends BaseTag> tagList = Collections.emptyList();
long version;
String provenanceSourceUri = null;
@ -922,14 +932,20 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
ResourceHistoryTable history = (ResourceHistoryTable) theEntity;
resourceBytes = history.getResource();
resourceEncoding = history.getEncoding();
if (getConfig().getTagStorageMode() == DaoConfig.TagStorageModeEnum.VERSIONED) {
if (history.isHasTags()) {
tagList = history.getTags();
}
} else {
if (history.getResourceTable().isHasTags()) {
tagList = history.getResourceTable().getTags();
}
switch (getConfig().getTagStorageMode()) {
case VERSIONED:
default:
if (history.isHasTags()) {
tagList = history.getTags();
}
break;
case NON_VERSIONED:
if (history.getResourceTable().isHasTags()) {
tagList = history.getResourceTable().getTags();
}
break;
case INLINE:
tagList = null;
}
version = history.getVersion();
if (history.getProvenance() != null) {
@ -952,10 +968,18 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
}
resourceBytes = history.getResource();
resourceEncoding = history.getEncoding();
if (resource.isHasTags()) {
tagList = resource.getTags();
} else {
tagList = Collections.emptyList();
switch (getConfig().getTagStorageMode()) {
case VERSIONED:
case NON_VERSIONED:
if (resource.isHasTags()) {
tagList = resource.getTags();
} else {
tagList = Collections.emptyList();
}
break;
case INLINE:
tagList = null;
break;
}
version = history.getVersion();
if (history.getProvenance() != null) {
@ -970,10 +994,19 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
version = view.getVersion();
provenanceRequestId = view.getProvenanceRequestId();
provenanceSourceUri = view.getProvenanceSourceUri();
if (theTagList == null)
tagList = new HashSet<>();
else
tagList = theTagList;
switch (getConfig().getTagStorageMode()) {
case VERSIONED:
case NON_VERSIONED:
if (theTagList != null) {
tagList = theTagList;
} else {
tagList = Collections.emptyList();
}
break;
case INLINE:
tagList = null;
break;
}
} else {
// something wrong
return null;
@ -984,16 +1017,18 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
// 3. Use the appropriate custom type if one is specified in the context
Class<R> resourceType = theResourceType;
if (myContext.hasDefaultTypeForProfile()) {
for (BaseTag nextTag : tagList) {
if (nextTag.getTag().getTagType() == TagTypeEnum.PROFILE) {
String profile = nextTag.getTag().getCode();
if (isNotBlank(profile)) {
Class<? extends IBaseResource> newType = myContext.getDefaultTypeForProfile(profile);
if (newType != null && theResourceType.isAssignableFrom(newType)) {
ourLog.debug("Using custom type {} for profile: {}", newType.getName(), profile);
resourceType = (Class<R>) newType;
break;
if (tagList != null) {
if (myContext.hasDefaultTypeForProfile()) {
for (BaseTag nextTag : tagList) {
if (nextTag.getTag().getTagType() == TagTypeEnum.PROFILE) {
String profile = nextTag.getTag().getCode();
if (isNotBlank(profile)) {
Class<? extends IBaseResource> newType = myContext.getDefaultTypeForProfile(profile);
if (newType != null && theResourceType.isAssignableFrom(newType)) {
ourLog.debug("Using custom type {} for profile: {}", newType.getName(), profile);
resourceType = (Class<R>) newType;
break;
}
}
}
}

View File

@ -21,7 +21,6 @@ package ca.uhn.fhir.jpa.search.builder;
*/
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
@ -1079,133 +1078,141 @@ public class QueryStack {
case Constants.PARAM_TAG:
case Constants.PARAM_PROFILE:
case Constants.PARAM_SECURITY:
return createPredicateTag(theSourceJoinColumn, theAndOrParams, theParamName, theRequestPartitionId);
if (myDaoConfig.getTagStorageMode() == DaoConfig.TagStorageModeEnum.INLINE) {
return createPredicateSearchParameter(theSourceJoinColumn, theResourceName, theParamName, theAndOrParams, theRequest, theRequestPartitionId, theSearchContainedMode);
} else {
return createPredicateTag(theSourceJoinColumn, theAndOrParams, theParamName, theRequestPartitionId);
}
case Constants.PARAM_SOURCE:
return createPredicateSourceForAndList(theSourceJoinColumn, theAndOrParams);
default:
List<Condition> andPredicates = new ArrayList<>();
RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam(theResourceName, theParamName);
if (nextParamDef != null) {
if (myPartitionSettings.isPartitioningEnabled() && myPartitionSettings.isIncludePartitionInSearchHashes()) {
if (theRequestPartitionId.isAllPartitions()) {
throw new PreconditionFailedException("This server is not configured to support search against all partitions");
}
}
switch (nextParamDef.getParamType()) {
case DATE:
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
// FT: 2021-01-18 use operation 'gt', 'ge', 'le' or 'lt'
// to create the predicateDate instead of generic one with operation = null
SearchFilterParser.CompareOperation operation = null;
if (nextAnd.size() > 0) {
DateParam param = (DateParam) nextAnd.get(0);
operation = toOperation(param.getPrefix());
}
andPredicates.add(createPredicateDate(theSourceJoinColumn, theResourceName, null, nextParamDef, nextAnd, operation, theRequestPartitionId));
//andPredicates.add(createPredicateDate(theSourceJoinColumn, theResourceName, nextParamDef, nextAnd, null, theRequestPartitionId));
}
break;
case QUANTITY:
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
SearchFilterParser.CompareOperation operation = null;
if (nextAnd.size() > 0) {
QuantityParam param = (QuantityParam) nextAnd.get(0);
operation = toOperation(param.getPrefix());
}
andPredicates.add(createPredicateQuantity(theSourceJoinColumn, theResourceName, null, nextParamDef, nextAnd, operation, theRequestPartitionId));
}
break;
case REFERENCE:
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
if (theSearchContainedMode.equals(SearchContainedModeEnum.TRUE))
andPredicates.add(createPredicateReferenceForContainedResource(theSourceJoinColumn, theResourceName, theParamName, nextParamDef, nextAnd, null, theRequest, theRequestPartitionId));
else
andPredicates.add(createPredicateReference(theSourceJoinColumn, theResourceName, theParamName, nextAnd, null, theRequest, theRequestPartitionId));
}
break;
case STRING:
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
andPredicates.add(createPredicateString(theSourceJoinColumn, theResourceName, null, nextParamDef, nextAnd, SearchFilterParser.CompareOperation.sw, theRequestPartitionId));
}
break;
case TOKEN:
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
if ("Location.position".equals(nextParamDef.getPath())) {
andPredicates.add(createPredicateCoords(theSourceJoinColumn, theResourceName, nextParamDef, nextAnd, theRequestPartitionId));
} else {
andPredicates.add(createPredicateToken(theSourceJoinColumn, theResourceName, null, nextParamDef, nextAnd, null, theRequestPartitionId));
}
}
break;
case NUMBER:
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
andPredicates.add(createPredicateNumber(theSourceJoinColumn, theResourceName, null, nextParamDef, nextAnd, null, theRequestPartitionId));
}
break;
case COMPOSITE:
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
andPredicates.add(createPredicateComposite(theSourceJoinColumn, theResourceName, null, nextParamDef, nextAnd, theRequestPartitionId));
}
break;
case URI:
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
andPredicates.add(createPredicateUri(theSourceJoinColumn, theResourceName, null, nextParamDef, nextAnd, SearchFilterParser.CompareOperation.eq, theRequest, theRequestPartitionId));
}
break;
case HAS:
case SPECIAL:
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
if ("Location.position".equals(nextParamDef.getPath())) {
andPredicates.add(createPredicateCoords(theSourceJoinColumn, theResourceName, nextParamDef, nextAnd, theRequestPartitionId));
}
}
break;
}
} else {
// These are handled later
if (!Constants.PARAM_CONTENT.equals(theParamName) && !Constants.PARAM_TEXT.equals(theParamName)) {
if (Constants.PARAM_FILTER.equals(theParamName)) {
// Parse the predicates enumerated in the _filter separated by AND or OR...
if (theAndOrParams.get(0).get(0) instanceof StringParam) {
String filterString = ((StringParam) theAndOrParams.get(0).get(0)).getValue();
SearchFilterParser.Filter filter;
try {
filter = SearchFilterParser.parse(filterString);
} catch (SearchFilterParser.FilterSyntaxException theE) {
throw new InvalidRequestException("Error parsing _filter syntax: " + theE.getMessage());
}
if (filter != null) {
if (!myDaoConfig.isFilterParameterEnabled()) {
throw new InvalidRequestException(Constants.PARAM_FILTER + " parameter is disabled on this server");
}
Condition predicate = createPredicateFilter(this, filter, theResourceName, theRequest, theRequestPartitionId);
if (predicate != null) {
mySqlBuilder.addPredicate(predicate);
}
}
}
} else {
String msg = myFhirContext.getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "invalidSearchParameter", theParamName, theResourceName, mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(theResourceName));
throw new InvalidRequestException(msg);
}
}
}
return toAndPredicate(andPredicates);
return createPredicateSearchParameter(theSourceJoinColumn, theResourceName, theParamName, theAndOrParams, theRequest, theRequestPartitionId, theSearchContainedMode);
}
}
@Nullable
private Condition createPredicateSearchParameter(@Nullable DbColumn theSourceJoinColumn, String theResourceName, String theParamName, List<List<IQueryParameterType>> theAndOrParams, RequestDetails theRequest, RequestPartitionId theRequestPartitionId, SearchContainedModeEnum theSearchContainedMode) {
List<Condition> andPredicates = new ArrayList<>();
RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam(theResourceName, theParamName);
if (nextParamDef != null) {
if (myPartitionSettings.isPartitioningEnabled() && myPartitionSettings.isIncludePartitionInSearchHashes()) {
if (theRequestPartitionId.isAllPartitions()) {
throw new PreconditionFailedException("This server is not configured to support search against all partitions");
}
}
switch (nextParamDef.getParamType()) {
case DATE:
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
// FT: 2021-01-18 use operation 'gt', 'ge', 'le' or 'lt'
// to create the predicateDate instead of generic one with operation = null
SearchFilterParser.CompareOperation operation = null;
if (nextAnd.size() > 0) {
DateParam param = (DateParam) nextAnd.get(0);
operation = toOperation(param.getPrefix());
}
andPredicates.add(createPredicateDate(theSourceJoinColumn, theResourceName, null, nextParamDef, nextAnd, operation, theRequestPartitionId));
//andPredicates.add(createPredicateDate(theSourceJoinColumn, theResourceName, nextParamDef, nextAnd, null, theRequestPartitionId));
}
break;
case QUANTITY:
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
SearchFilterParser.CompareOperation operation = null;
if (nextAnd.size() > 0) {
QuantityParam param = (QuantityParam) nextAnd.get(0);
operation = toOperation(param.getPrefix());
}
andPredicates.add(createPredicateQuantity(theSourceJoinColumn, theResourceName, null, nextParamDef, nextAnd, operation, theRequestPartitionId));
}
break;
case REFERENCE:
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
if (theSearchContainedMode.equals(SearchContainedModeEnum.TRUE))
andPredicates.add(createPredicateReferenceForContainedResource(theSourceJoinColumn, theResourceName, theParamName, nextParamDef, nextAnd, null, theRequest, theRequestPartitionId));
else
andPredicates.add(createPredicateReference(theSourceJoinColumn, theResourceName, theParamName, nextAnd, null, theRequest, theRequestPartitionId));
}
break;
case STRING:
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
andPredicates.add(createPredicateString(theSourceJoinColumn, theResourceName, null, nextParamDef, nextAnd, SearchFilterParser.CompareOperation.sw, theRequestPartitionId));
}
break;
case TOKEN:
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
if ("Location.position".equals(nextParamDef.getPath())) {
andPredicates.add(createPredicateCoords(theSourceJoinColumn, theResourceName, nextParamDef, nextAnd, theRequestPartitionId));
} else {
andPredicates.add(createPredicateToken(theSourceJoinColumn, theResourceName, null, nextParamDef, nextAnd, null, theRequestPartitionId));
}
}
break;
case NUMBER:
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
andPredicates.add(createPredicateNumber(theSourceJoinColumn, theResourceName, null, nextParamDef, nextAnd, null, theRequestPartitionId));
}
break;
case COMPOSITE:
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
andPredicates.add(createPredicateComposite(theSourceJoinColumn, theResourceName, null, nextParamDef, nextAnd, theRequestPartitionId));
}
break;
case URI:
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
andPredicates.add(createPredicateUri(theSourceJoinColumn, theResourceName, null, nextParamDef, nextAnd, SearchFilterParser.CompareOperation.eq, theRequest, theRequestPartitionId));
}
break;
case HAS:
case SPECIAL:
for (List<? extends IQueryParameterType> nextAnd : theAndOrParams) {
if ("Location.position".equals(nextParamDef.getPath())) {
andPredicates.add(createPredicateCoords(theSourceJoinColumn, theResourceName, nextParamDef, nextAnd, theRequestPartitionId));
}
}
break;
}
} else {
// These are handled later
if (!Constants.PARAM_CONTENT.equals(theParamName) && !Constants.PARAM_TEXT.equals(theParamName)) {
if (Constants.PARAM_FILTER.equals(theParamName)) {
// Parse the predicates enumerated in the _filter separated by AND or OR...
if (theAndOrParams.get(0).get(0) instanceof StringParam) {
String filterString = ((StringParam) theAndOrParams.get(0).get(0)).getValue();
SearchFilterParser.Filter filter;
try {
filter = SearchFilterParser.parse(filterString);
} catch (SearchFilterParser.FilterSyntaxException theE) {
throw new InvalidRequestException("Error parsing _filter syntax: " + theE.getMessage());
}
if (filter != null) {
if (!myDaoConfig.isFilterParameterEnabled()) {
throw new InvalidRequestException(Constants.PARAM_FILTER + " parameter is disabled on this server");
}
Condition predicate = createPredicateFilter(this, filter, theResourceName, theRequest, theRequestPartitionId);
if (predicate != null) {
mySqlBuilder.addPredicate(predicate);
}
}
}
} else {
String msg = myFhirContext.getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "invalidSearchParameter", theParamName, theResourceName, mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(theResourceName));
throw new InvalidRequestException(msg);
}
}
}
return toAndPredicate(andPredicates);
}
public void addPredicateCompositeUnique(String theIndexString, RequestPartitionId theRequestPartitionId) {
CompositeUniqueSearchParameterPredicateBuilder predicateBuilder = mySqlBuilder.addCompositeUniquePredicateBuilder();
Condition predicate = predicateBuilder.createPredicateIndexString(theRequestPartitionId, theIndexString);

View File

@ -21,24 +21,23 @@ package ca.uhn.fhir.jpa.search.builder.predicate;
*/
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
import com.google.common.collect.Lists;
import com.healthmarketscience.sqlbuilder.BinaryCondition;
import com.healthmarketscience.sqlbuilder.ComboCondition;
import com.healthmarketscience.sqlbuilder.Condition;
import com.healthmarketscience.sqlbuilder.UnaryCondition;
import com.healthmarketscience.sqlbuilder.dbspec.basic.DbColumn;
import com.healthmarketscience.sqlbuilder.dbspec.basic.DbTable;
import org.apache.commons.lang3.tuple.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
public class TagPredicateBuilder extends BaseJoiningPredicateBuilder {
private static final Logger ourLog = LoggerFactory.getLogger(TagPredicateBuilder.class);
private final DbColumn myColumnResId;
private final DbTable myTagDefinitionTable;
@ -72,11 +71,20 @@ public class TagPredicateBuilder extends BaseJoiningPredicateBuilder {
List<Condition> orPredicates = Lists.newArrayList();
for (Pair<String, String> next : theTokens) {
Condition codePredicate = BinaryCondition.equalTo(myTagDefinitionColumnTagCode, generatePlaceholder(next.getRight()));
if (isNotBlank(next.getLeft())) {
Condition systemPredicate = BinaryCondition.equalTo(myTagDefinitionColumnTagSystem, generatePlaceholder(next.getLeft()));
String system = next.getLeft();
String code = next.getRight();
if (theTagType == TagTypeEnum.PROFILE) {
system = BaseHapiFhirDao.NS_JPA_PROFILE;
}
Condition codePredicate = BinaryCondition.equalTo(myTagDefinitionColumnTagCode, generatePlaceholder(code));
if (isNotBlank(system)) {
Condition systemPredicate = BinaryCondition.equalTo(myTagDefinitionColumnTagSystem, generatePlaceholder(system));
orPredicates.add(ComboCondition.and(typePredicate, systemPredicate, codePredicate));
} else {
// Note: We don't have an index for this combo, which means that this may not perform
// well on MySQL (and maybe others) without an added index
orPredicates.add(ComboCondition.and(typePredicate, codePredicate));
}
}

View File

@ -2298,7 +2298,11 @@ public class FhirResourceDaoDstu2SearchNoFtTest extends BaseJpaDstu2Test {
{
SearchParameterMap params = new SearchParameterMap();
params.add("_profile", new UriParam("http://" + methodName));
logAllTokenIndexes();
myCaptureQueriesListener.clear();
List<IIdType> patients = toUnqualifiedVersionlessIds(myOrganizationDao.search(params));
myCaptureQueriesListener.logSelectQueries();
assertThat(patients, containsInAnyOrder(tag2id));
}
}

View File

@ -1,15 +1,35 @@
package ca.uhn.fhir.jpa.dao.r4;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenParam;
import org.hamcrest.Matchers;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.SearchParameter;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.UUID;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
private static final Logger ourLog = LoggerFactory.getLogger(FhirResourceDaoR4SearchSqlTest.class);
@AfterEach
public void after() {
myDaoConfig.setTagStorageMode(DaoConfig.DEFAULT_TAG_STORAGE_MODE);
}
/**
* One regular search params - Doesn't need HFJ_RESOURCE as root
*/
@ -27,7 +47,7 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
/**
* Two regular search params - Should use HFJ_RESOURCE as root
*/
*/
@Test
public void testTwoRegularSearchParams() {
@ -43,5 +63,68 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
}
@Test
public void testSearchByProfile_VersionedMode() {
// Put a tag in so we can search for it
String code = "http://" + UUID.randomUUID();
Patient p = new Patient();
p.getMeta().addProfile(code);
IIdType id = myPatientDao.create(p).getId().toUnqualifiedVersionless();
myMemoryCacheService.invalidateAllCaches();
// Search
myCaptureQueriesListener.clear();
SearchParameterMap map = SearchParameterMap.newSynchronous()
.add(Constants.PARAM_PROFILE, new TokenParam(code));
IBundleProvider outcome = myPatientDao.search(map);
assertEquals(3, myCaptureQueriesListener.countSelectQueries());
// Query 1 - Find resources: Make sure we search for tag type+system+code always
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false);
assertEquals("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_TAG t1 ON (t0.RES_ID = t1.RES_ID) LEFT OUTER JOIN HFJ_TAG_DEF t2 ON (t1.TAG_ID = t2.TAG_ID) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t2.TAG_TYPE = ?) AND (t2.TAG_SYSTEM = ?) AND (t2.TAG_CODE = ?)))", sql);
// Query 2 - Load resourece contents
sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(false, false);
assertThat(sql, containsString("where resourcese0_.RES_ID in (?)"));
// Query 3 - Load tags and defintions
sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(2).getSql(false, false);
assertThat(sql, containsString("from HFJ_RES_TAG resourceta0_ inner join HFJ_TAG_DEF"));
assertThat(toUnqualifiedVersionlessIds(outcome), Matchers.contains(id));
}
@Test
public void testSearchByProfile_InlineMode() {
myDaoConfig.setTagStorageMode(DaoConfig.TagStorageModeEnum.INLINE);
SearchParameter searchParameter = FhirResourceDaoR4TagsTest.createSearchParamForInlineResourceProfile();
ourLog.info("SearchParam:\n{}", myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(searchParameter));
mySearchParameterDao.update(searchParameter, mySrd);
mySearchParamRegistry.forceRefresh();
// Put a tag in so we can search for it
String code = "http://" + UUID.randomUUID();
Patient p = new Patient();
p.getMeta().addProfile(code);
IIdType id = myPatientDao.create(p).getId().toUnqualifiedVersionless();
myMemoryCacheService.invalidateAllCaches();
// Search
myCaptureQueriesListener.clear();
SearchParameterMap map = SearchParameterMap.newSynchronous()
.add(Constants.PARAM_PROFILE, new TokenParam(code));
IBundleProvider outcome = myPatientDao.search(map);
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
// Query 1 - Find resources: Just a standard token search in this mode
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false);
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)", sql);
// Query 2 - Load resourece contents
sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(false, false);
assertThat(sql, containsString("where resourcese0_.RES_ID in (?)"));
assertThat(toUnqualifiedVersionlessIds(outcome), Matchers.contains(id));
}
}

View File

@ -1,10 +1,15 @@
package ca.uhn.fhir.jpa.dao.r4;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.param.TokenParam;
import org.hl7.fhir.r4.model.Enumerations;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.SearchParameter;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
@ -142,6 +147,175 @@ public class FhirResourceDaoR4TagsTest extends BaseJpaR4Test {
}
@Test
public void testInlineTags_StoreAndRetrieve() {
myDaoConfig.setTagStorageMode(DaoConfig.TagStorageModeEnum.INLINE);
// Store a first version
Patient patient = new Patient();
patient.setId("Patient/A");
patient.getMeta().addProfile("http://profile1");
patient.getMeta().addTag("http://tag1", "vtag1", "dtag1");
patient.getMeta().addSecurity("http://sec1", "vsec1", "dsec1");
patient.setActive(true);
myPatientDao.update(patient, mySrd);
runInTransaction(()->{
assertEquals(0, myResourceTagDao.count());
assertEquals(0, myResourceHistoryTagDao.count());
assertEquals(0, myTagDefinitionDao.count());
});
// Read it back
patient = myPatientDao.read(new IdType("Patient/A/_history/1"), mySrd);
assertThat(toProfiles(patient).toString(), toProfiles(patient), contains("http://profile1"));
assertThat(toTags(patient).toString(), toTags(patient), contains("http://tag1|vtag1|dtag1"));
assertThat(toSecurityLabels(patient).toString(), toSecurityLabels(patient), contains("http://sec1|vsec1|dsec1"));
// Store a second version
patient = new Patient();
patient.setId("Patient/A");
patient.getMeta().addProfile("http://profile2");
patient.getMeta().addTag("http://tag2", "vtag2", "dtag2");
patient.getMeta().addSecurity("http://sec2", "vsec2", "dsec2");
patient.setActive(true);
myPatientDao.update(patient, mySrd);
runInTransaction(()->{
assertEquals(0, myResourceTagDao.count());
assertEquals(0, myResourceHistoryTagDao.count());
assertEquals(0, myTagDefinitionDao.count());
});
// First version should have only the initial tags
patient = myPatientDao.read(new IdType("Patient/A/_history/1"), mySrd);
assertThat(toProfiles(patient).toString(), toProfiles(patient), contains("http://profile1"));
assertThat(toTags(patient).toString(), toTags(patient), contains("http://tag1|vtag1|dtag1"));
assertThat(toSecurityLabels(patient).toString(), toSecurityLabels(patient), contains("http://sec1|vsec1|dsec1"));
// Second version should have the new set of tags
// TODO: We could copy these forward like we do for non-inline mode. Perhaps in the future.
patient = myPatientDao.read(new IdType("Patient/A/_history/2"), mySrd);
assertThat(toProfiles(patient).toString(), toProfiles(patient), contains("http://profile2"));
assertThat(toTags(patient).toString(), toTags(patient), containsInAnyOrder("http://tag2|vtag2|dtag2"));
assertThat(toSecurityLabels(patient).toString(), toSecurityLabels(patient), containsInAnyOrder("http://sec2|vsec2|dsec2"));
}
@Test
public void testInlineTags_Search_Tag() {
myDaoConfig.setTagStorageMode(DaoConfig.TagStorageModeEnum.INLINE);
SearchParameter searchParameter = createResourceTagSearchParameter();
ourLog.info("SearchParam:\n{}", myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(searchParameter));
mySearchParameterDao.update(searchParameter, mySrd);
mySearchParamRegistry.forceRefresh();
createPatientsForInlineSearchTests();
logAllTokenIndexes();
// Perform a search
SearchParameterMap map = SearchParameterMap.newSynchronous();
map.add("_tag", new TokenParam("http://tag1", "vtag1"));
IBundleProvider outcome = myPatientDao.search(map, mySrd);
assertThat(toUnqualifiedVersionlessIdValues(outcome), containsInAnyOrder("Patient/A", "Patient/B"));
validatePatientSearchResultsForInlineTags(outcome);
}
@Test
public void testInlineTags_Search_Profile() {
myDaoConfig.setTagStorageMode(DaoConfig.TagStorageModeEnum.INLINE);
SearchParameter searchParameter = createSearchParamForInlineResourceProfile();
ourLog.info("SearchParam:\n{}", myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(searchParameter));
mySearchParameterDao.update(searchParameter, mySrd);
mySearchParamRegistry.forceRefresh();
createPatientsForInlineSearchTests();
logAllTokenIndexes();
// Perform a search
SearchParameterMap map = SearchParameterMap.newSynchronous();
map.add("_profile", new TokenParam("http://profile1"));
IBundleProvider outcome = myPatientDao.search(map, mySrd);
assertThat(toUnqualifiedVersionlessIdValues(outcome), containsInAnyOrder("Patient/A", "Patient/B"));
validatePatientSearchResultsForInlineTags(outcome);
}
@Test
public void testInlineTags_Search_Security() {
myDaoConfig.setTagStorageMode(DaoConfig.TagStorageModeEnum.INLINE);
SearchParameter searchParameter = new SearchParameter();
searchParameter.setId("SearchParameter/resource-security");
for (String next : myFhirCtx.getResourceTypes().stream().sorted().collect(Collectors.toList())) {
searchParameter.addBase(next);
}
searchParameter.setStatus(Enumerations.PublicationStatus.ACTIVE);
searchParameter.setType(Enumerations.SearchParamType.TOKEN);
searchParameter.setCode("_security");
searchParameter.setName("Security");
searchParameter.setExpression("meta.security");
ourLog.info("SearchParam:\n{}", myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(searchParameter));
mySearchParameterDao.update(searchParameter, mySrd);
mySearchParamRegistry.forceRefresh();
createPatientsForInlineSearchTests();
logAllTokenIndexes();
// Perform a search
SearchParameterMap map = SearchParameterMap.newSynchronous();
map.add("_security", new TokenParam("http://sec1", "vsec1"));
IBundleProvider outcome = myPatientDao.search(map, mySrd);
assertThat(toUnqualifiedVersionlessIdValues(outcome), containsInAnyOrder("Patient/A", "Patient/B"));
validatePatientSearchResultsForInlineTags(outcome);
}
private void validatePatientSearchResultsForInlineTags(IBundleProvider outcome) {
Patient patient;
patient = (Patient) outcome.getResources(0, 1).get(0);
assertThat(toProfiles(patient).toString(), toProfiles(patient), contains("http://profile1"));
assertThat(toTags(patient).toString(), toTags(patient), contains("http://tag1|vtag1|dtag1"));
assertThat(toSecurityLabels(patient).toString(), toSecurityLabels(patient), contains("http://sec1|vsec1|dsec1"));
patient = (Patient) outcome.getResources(1, 2).get(0);
assertThat(toProfiles(patient).toString(), toProfiles(patient), contains("http://profile1"));
assertThat(toTags(patient).toString(), toTags(patient), contains("http://tag1|vtag1|dtag1"));
assertThat(toSecurityLabels(patient).toString(), toSecurityLabels(patient), contains("http://sec1|vsec1|dsec1"));
}
private void createPatientsForInlineSearchTests() {
Patient patient = new Patient();
patient.setId("Patient/A");
patient.getMeta().addProfile("http://profile1");
patient.getMeta().addTag("http://tag1", "vtag1", "dtag1");
patient.getMeta().addSecurity("http://sec1", "vsec1", "dsec1");
patient.setActive(true);
myPatientDao.update(patient, mySrd);
patient = new Patient();
patient.setId("Patient/B");
patient.getMeta().addProfile("http://profile1");
patient.getMeta().addTag("http://tag1", "vtag1", "dtag1");
patient.getMeta().addSecurity("http://sec1", "vsec1", "dsec1");
patient.setActive(true);
myPatientDao.update(patient, mySrd);
patient = new Patient();
patient.setId("Patient/NO");
patient.getMeta().addProfile("http://profile99");
patient.getMeta().addTag("http://tag99", "vtag99", "dtag99");
patient.getMeta().addSecurity("http://sec99", "vsec99", "dsec99");
patient.setActive(true);
myPatientDao.update(patient, mySrd);
}
private void initializeNonVersioned() {
myDaoConfig.setTagStorageMode(DaoConfig.TagStorageModeEnum.NON_VERSIONED);
@ -183,9 +357,44 @@ public class FhirResourceDaoR4TagsTest extends BaseJpaR4Test {
return patient.getMeta().getTag().stream().map(t -> t.getSystem() + "|" + t.getCode() + "|" + t.getDisplay()).collect(Collectors.toList());
}
@Nonnull
private List<String> toSecurityLabels(Patient patient) {
return patient.getMeta().getSecurity().stream().map(t -> t.getSystem() + "|" + t.getCode() + "|" + t.getDisplay()).collect(Collectors.toList());
}
@Nonnull
private List<String> toProfiles(Patient patient) {
return patient.getMeta().getProfile().stream().map(t -> t.getValue()).collect(Collectors.toList());
}
@Nonnull
public static SearchParameter createSearchParamForInlineResourceProfile() {
SearchParameter searchParameter = new SearchParameter();
searchParameter.setId("SearchParameter/resource-profile");
for (String next : FhirContext.forR4Cached().getResourceTypes().stream().sorted().collect(Collectors.toList())) {
searchParameter.addBase(next);
}
searchParameter.setStatus(Enumerations.PublicationStatus.ACTIVE);
searchParameter.setType(Enumerations.SearchParamType.TOKEN);
searchParameter.setCode("_profile");
searchParameter.setName("Profile");
searchParameter.setExpression("meta.profile");
return searchParameter;
}
@Nonnull
public static SearchParameter createResourceTagSearchParameter() {
SearchParameter searchParameter = new SearchParameter();
searchParameter.setId("SearchParameter/resource-tag");
for (String next : FhirContext.forR4Cached().getResourceTypes().stream().sorted().collect(Collectors.toList())) {
searchParameter.addBase(next);
}
searchParameter.setStatus(Enumerations.PublicationStatus.ACTIVE);
searchParameter.setType(Enumerations.SearchParamType.TOKEN);
searchParameter.setCode("_tag");
searchParameter.setName("Tag");
searchParameter.setExpression("meta.tag");
return searchParameter;
}
}

View File

@ -48,36 +48,32 @@ import java.util.Collection;
public class TagDefinition implements Serializable {
private static final long serialVersionUID = 1L;
@Column(name = "TAG_CODE", length = 200)
private String myCode;
@Column(name = "TAG_DISPLAY", length = 200)
private String myDisplay;
@Id
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_TAGDEF_ID")
@SequenceGenerator(name = "SEQ_TAGDEF_ID", sequenceName = "SEQ_TAGDEF_ID")
@Column(name = "TAG_ID")
private Long myId;
@OneToMany(cascade = {}, fetch = FetchType.LAZY, mappedBy = "myTag")
private Collection<ResourceTag> myResources;
@OneToMany(cascade = {}, fetch = FetchType.LAZY, mappedBy = "myTag")
private Collection<ResourceHistoryTag> myResourceVersions;
@Column(name = "TAG_SYSTEM", length = 200)
private String mySystem;
@Column(name = "TAG_TYPE", nullable = false)
@Enumerated(EnumType.ORDINAL)
private TagTypeEnum myTagType;
@Transient
private transient Integer myHashCode;
/**
* Constructor
*/
public TagDefinition() {
super();
}
public TagDefinition(TagTypeEnum theTagType, String theSystem, String theCode, String theDisplay) {