Transaction create performance improvement (#1899)
* Work on perf issue * Improve write performance for large bundles with tags * Add changelog * Update hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_1_0/1899-improve-write-xact-perf-with-tags.yaml Co-authored-by: IanMMarshall <49525404+IanMMarshall@users.noreply.github.com> * Update hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/MemoryCacheService.java Co-authored-by: IanMMarshall <49525404+IanMMarshall@users.noreply.github.com> * Update hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/MemoryCacheService.java Co-authored-by: IanMMarshall <49525404+IanMMarshall@users.noreply.github.com> * Test fix * Test fixes * Test fixes Co-authored-by: IanMMarshall <49525404+IanMMarshall@users.noreply.github.com>
This commit is contained in:
parent
eb7b8e816b
commit
21330a0a22
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: perf
|
||||
issue: 1899
|
||||
title: "When submitting a transaction bundle containing a large number of resources being written, where the
|
||||
resources had tags or profile definitions, a number of redundant database calls have been optimized out. This should
|
||||
significantly improve performance for these scenarios."
|
|
@ -193,6 +193,34 @@ public class DaoConfig {
|
|||
*/
|
||||
private boolean myDeleteEnabled = true;
|
||||
|
||||
/**
|
||||
* If set to <code>true</code> (default is <code>false</code>) the <code>$lastn</code> operation will be enabled for
|
||||
* indexing Observation resources. This operation involves creating a special set of tables in ElasticSearch for
|
||||
* discovering Observation resources. Enabling this setting increases the amount of storage space required, and can
|
||||
* slow write operations, but can be very useful for searching for collections of Observations for some applications.
|
||||
*
|
||||
* @since 5.1.0
|
||||
*/
|
||||
public boolean isLastNEnabled() {
|
||||
return myLastNEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* If set to <code>true</code> (default is <code>false</code>) the <code>$lastn</code> operation will be enabled for
|
||||
* indexing Observation resources. This operation involves creating a special set of tables in ElasticSearch for
|
||||
* discovering Observation resources. Enabling this setting increases the amount of storage space required, and can
|
||||
* slow write operations, but can be very useful for searching for collections of Observations for some applications.
|
||||
*
|
||||
* @since 5.1.0
|
||||
*/
|
||||
public void setLastNEnabled(boolean theLastNEnabled) {
|
||||
myLastNEnabled = theLastNEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* @since 5.1.0
|
||||
*/
|
||||
private boolean myLastNEnabled = false;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
|
|
|
@ -93,8 +93,6 @@ public interface IFhirResourceDao<T extends IBaseResource> extends IDao {
|
|||
/**
|
||||
* This method does not throw an exception if there are delete conflicts, but populates them
|
||||
* in the provided list
|
||||
*
|
||||
* @param theRequestDetails TODO
|
||||
*/
|
||||
DaoMethodOutcome delete(IIdType theResource, DeleteConflictList theDeleteConflictsListToPopulate, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails);
|
||||
|
||||
|
|
|
@ -47,6 +47,7 @@ import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
|||
import ca.uhn.fhir.jpa.search.reindex.ResourceReindexingSvcImpl;
|
||||
import ca.uhn.fhir.jpa.searchparam.config.SearchParamConfig;
|
||||
import ca.uhn.fhir.jpa.searchparam.extractor.IResourceLinkResolver;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.interceptor.consent.IConsentContextServices;
|
||||
import ca.uhn.fhir.rest.server.interceptor.partition.RequestTenantPartitionInterceptor;
|
||||
|
@ -182,6 +183,11 @@ public abstract class BaseConfig {
|
|||
return new BinaryStorageInterceptor();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public MemoryCacheService memoryCacheService() {
|
||||
return new MemoryCacheService();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Primary
|
||||
public IResourceLinkResolver daoResourceLinkResolver() {
|
||||
|
|
|
@ -47,7 +47,6 @@ import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
|
|||
import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
|
||||
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
|
||||
import ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProviderFactory;
|
||||
|
@ -59,6 +58,7 @@ import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
|
|||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import ca.uhn.fhir.jpa.util.AddRemoveCount;
|
||||
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.model.api.IResource;
|
||||
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
|
||||
import ca.uhn.fhir.model.api.Tag;
|
||||
|
@ -76,6 +76,7 @@ import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
|||
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
|
@ -91,6 +92,7 @@ import com.google.common.hash.HashFunction;
|
|||
import com.google.common.hash.Hashing;
|
||||
import org.apache.commons.lang3.NotImplementedException;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseCoding;
|
||||
|
@ -233,6 +235,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
private PersistedJpaBundleProviderFactory myPersistedJpaBundleProviderFactory;
|
||||
@Autowired
|
||||
private IPartitionLookupSvc myPartitionLookupSvc;
|
||||
@Autowired
|
||||
private MemoryCacheService myMemoryCacheService;
|
||||
|
||||
@Override
|
||||
protected IInterceptorBroadcaster getInterceptorBroadcaster() {
|
||||
|
@ -393,37 +397,45 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* <code>null</code> will only be returned if the scheme and tag are both blank
|
||||
*/
|
||||
protected TagDefinition getTagOrNull(TagTypeEnum theTagType, String theScheme, String theTerm, String theLabel) {
|
||||
if (isBlank(theScheme) && isBlank(theTerm) && isBlank(theLabel)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
|
||||
CriteriaQuery<TagDefinition> cq = builder.createQuery(TagDefinition.class);
|
||||
Root<TagDefinition> from = cq.from(TagDefinition.class);
|
||||
Pair<String, String> key = Pair.of(theScheme, theTerm);
|
||||
return myMemoryCacheService.get(MemoryCacheService.CacheEnum.TAG_DEFINITION, key, k -> {
|
||||
|
||||
if (isNotBlank(theScheme)) {
|
||||
cq.where(
|
||||
builder.and(
|
||||
builder.equal(from.get("myTagType"), theTagType),
|
||||
builder.equal(from.get("mySystem"), theScheme),
|
||||
builder.equal(from.get("myCode"), theTerm)));
|
||||
} else {
|
||||
cq.where(
|
||||
builder.and(
|
||||
builder.equal(from.get("myTagType"), theTagType),
|
||||
builder.isNull(from.get("mySystem")),
|
||||
builder.equal(from.get("myCode"), theTerm)));
|
||||
}
|
||||
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
|
||||
CriteriaQuery<TagDefinition> cq = builder.createQuery(TagDefinition.class);
|
||||
Root<TagDefinition> from = cq.from(TagDefinition.class);
|
||||
|
||||
TypedQuery<TagDefinition> q = myEntityManager.createQuery(cq);
|
||||
try {
|
||||
return q.getSingleResult();
|
||||
} catch (NoResultException e) {
|
||||
TagDefinition retVal = new TagDefinition(theTagType, theScheme, theTerm, theLabel);
|
||||
myEntityManager.persist(retVal);
|
||||
return retVal;
|
||||
}
|
||||
if (isNotBlank(theScheme)) {
|
||||
cq.where(
|
||||
builder.and(
|
||||
builder.equal(from.get("myTagType"), theTagType),
|
||||
builder.equal(from.get("mySystem"), theScheme),
|
||||
builder.equal(from.get("myCode"), theTerm)));
|
||||
} else {
|
||||
cq.where(
|
||||
builder.and(
|
||||
builder.equal(from.get("myTagType"), theTagType),
|
||||
builder.isNull(from.get("mySystem")),
|
||||
builder.equal(from.get("myCode"), theTerm)));
|
||||
}
|
||||
|
||||
TypedQuery<TagDefinition> q = myEntityManager.createQuery(cq);
|
||||
try {
|
||||
return q.getSingleResult();
|
||||
} catch (NoResultException e) {
|
||||
TagDefinition retVal = new TagDefinition(theTagType, theScheme, theTerm, theLabel);
|
||||
myEntityManager.persist(retVal);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
protected IBundleProvider history(RequestDetails theRequest, String theResourceType, Long theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive) {
|
||||
|
|
|
@ -54,12 +54,14 @@ public abstract class BaseHapiFhirResourceDaoObservation<T extends IBaseResource
|
|||
ResourceTable retVal = super.updateEntity(theRequest, theResource, theEntity, theDeletedTimestampOrNull, thePerformIndexing, theUpdateVersion,
|
||||
theTransactionDetails, theForceUpdate, theCreateNewHistoryEntry);
|
||||
|
||||
if (!retVal.isUnchangedInCurrentOperation()) {
|
||||
if (retVal.getDeleted() == null) {
|
||||
// Update indexes here for LastN operation.
|
||||
myObservationLastNIndexPersistSvc.indexObservation(theResource);
|
||||
} else {
|
||||
myObservationLastNIndexPersistSvc.deleteObservationIndex(theEntity);
|
||||
if (myDaoConfig.isLastNEnabled()) {
|
||||
if (!retVal.isUnchangedInCurrentOperation()) {
|
||||
if (retVal.getDeleted() == null) {
|
||||
// Update indexes here for LastN operation.
|
||||
myObservationLastNIndexPersistSvc.indexObservation(theResource);
|
||||
} else {
|
||||
myObservationLastNIndexPersistSvc.deleteObservationIndex(theEntity);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.dao.expunge;
|
|||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
|
||||
|
@ -105,6 +106,8 @@ class ResourceExpungeService implements IResourceExpungeService {
|
|||
private IResourceProvenanceDao myResourceHistoryProvenanceTableDao;
|
||||
@Autowired
|
||||
private ISearchParamPresentDao mySearchParamPresentDao;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
|
@ -238,19 +241,42 @@ class ResourceExpungeService implements IResourceExpungeService {
|
|||
@Override
|
||||
@Transactional
|
||||
public void deleteAllSearchParams(Long theResourceId) {
|
||||
myResourceIndexedSearchParamUriDao.deleteByResourceId(theResourceId);
|
||||
myResourceIndexedSearchParamCoordsDao.deleteByResourceId(theResourceId);
|
||||
myResourceIndexedSearchParamDateDao.deleteByResourceId(theResourceId);
|
||||
myResourceIndexedSearchParamNumberDao.deleteByResourceId(theResourceId);
|
||||
myResourceIndexedSearchParamQuantityDao.deleteByResourceId(theResourceId);
|
||||
myResourceIndexedSearchParamStringDao.deleteByResourceId(theResourceId);
|
||||
myResourceIndexedSearchParamTokenDao.deleteByResourceId(theResourceId);
|
||||
myResourceIndexedCompositeStringUniqueDao.deleteByResourceId(theResourceId);
|
||||
mySearchParamPresentDao.deleteByResourceId(theResourceId);
|
||||
myResourceLinkDao.deleteByResourceId(theResourceId);
|
||||
ResourceTable resource = myResourceTableDao.findById(theResourceId).orElse(null);
|
||||
|
||||
if (resource == null || resource.isParamsUriPopulated()) {
|
||||
myResourceIndexedSearchParamUriDao.deleteByResourceId(theResourceId);
|
||||
}
|
||||
if (resource == null || resource.isParamsCoordsPopulated()) {
|
||||
myResourceIndexedSearchParamCoordsDao.deleteByResourceId(theResourceId);
|
||||
}
|
||||
if (resource == null || resource.isParamsDatePopulated()) {
|
||||
myResourceIndexedSearchParamDateDao.deleteByResourceId(theResourceId);
|
||||
}
|
||||
if (resource == null || resource.isParamsNumberPopulated()) {
|
||||
myResourceIndexedSearchParamNumberDao.deleteByResourceId(theResourceId);
|
||||
}
|
||||
if (resource == null || resource.isParamsQuantityPopulated()) {
|
||||
myResourceIndexedSearchParamQuantityDao.deleteByResourceId(theResourceId);
|
||||
}
|
||||
if (resource == null || resource.isParamsStringPopulated()) {
|
||||
myResourceIndexedSearchParamStringDao.deleteByResourceId(theResourceId);
|
||||
}
|
||||
if (resource == null || resource.isParamsTokenPopulated()) {
|
||||
myResourceIndexedSearchParamTokenDao.deleteByResourceId(theResourceId);
|
||||
}
|
||||
if (resource == null || resource.isParamsCompositeStringUniquePresent()) {
|
||||
myResourceIndexedCompositeStringUniqueDao.deleteByResourceId(theResourceId);
|
||||
}
|
||||
if (myDaoConfig.getIndexMissingFields() == DaoConfig.IndexEnabledEnum.ENABLED) {
|
||||
mySearchParamPresentDao.deleteByResourceId(theResourceId);
|
||||
}
|
||||
if (resource == null || resource.isHasLinks()) {
|
||||
myResourceLinkDao.deleteByResourceId(theResourceId);
|
||||
}
|
||||
|
||||
myResourceTagDao.deleteByResourceId(theResourceId);
|
||||
if (resource == null || resource.isHasTags()) {
|
||||
myResourceTagDao.deleteByResourceId(theResourceId);
|
||||
}
|
||||
}
|
||||
|
||||
private void expungeHistoricalVersionsOfId(RequestDetails theRequestDetails, Long myResourceId, AtomicInteger theRemainingCount) {
|
||||
|
|
|
@ -30,6 +30,7 @@ import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
|
|||
import ca.uhn.fhir.jpa.model.cross.ResourceLookup;
|
||||
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.jpa.util.QueryChunker;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
|
@ -105,17 +106,6 @@ public class IdHelperService {
|
|||
@Autowired
|
||||
private FhirContext myFhirCtx;
|
||||
|
||||
private Cache<String, Long> myPersistentIdCache;
|
||||
private Cache<String, IResourceLookup> myResourceLookupCache;
|
||||
private Cache<Long, Optional<String>> myForcedIdCache;
|
||||
|
||||
@PostConstruct
|
||||
public void start() {
|
||||
myPersistentIdCache = newCache();
|
||||
myResourceLookupCache = newCache();
|
||||
myForcedIdCache = newCache();
|
||||
}
|
||||
|
||||
public void delete(ForcedId forcedId) {
|
||||
myForcedIdDao.deleteByPid(forcedId.getId());
|
||||
}
|
||||
|
@ -138,6 +128,9 @@ public class IdHelperService {
|
|||
return matches.iterator().next();
|
||||
}
|
||||
|
||||
@Autowired
|
||||
private MemoryCacheService myMemoryCacheService;
|
||||
|
||||
/**
|
||||
* Given a resource type and ID, determines the internal persistent ID for the resource.
|
||||
*
|
||||
|
@ -151,7 +144,7 @@ public class IdHelperService {
|
|||
retVal = resolveResourceIdentity(theRequestPartitionId, theResourceType, theId);
|
||||
} else {
|
||||
String key = RequestPartitionId.stringifyForKey(theRequestPartitionId) + "/" + theResourceType + "/" + theId;
|
||||
retVal = myPersistentIdCache.get(key, t -> resolveResourceIdentity(theRequestPartitionId, theResourceType, theId));
|
||||
retVal = myMemoryCacheService.get(MemoryCacheService.CacheEnum.PERSISTENT_ID, key, t -> resolveResourceIdentity(theRequestPartitionId, theResourceType, theId));
|
||||
}
|
||||
|
||||
} else {
|
||||
|
@ -201,7 +194,7 @@ public class IdHelperService {
|
|||
for (Iterator<String> idIterator = nextIds.iterator(); idIterator.hasNext(); ) {
|
||||
String nextId = idIterator.next();
|
||||
String key = RequestPartitionId.stringifyForKey(theRequestPartitionId) + "/" + nextResourceType + "/" + nextId;
|
||||
Long nextCachedPid = myPersistentIdCache.getIfPresent(key);
|
||||
Long nextCachedPid = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.PERSISTENT_ID, key);
|
||||
if (nextCachedPid != null) {
|
||||
idIterator.remove();
|
||||
retVal.add(new ResourcePersistentId(nextCachedPid));
|
||||
|
@ -226,7 +219,7 @@ public class IdHelperService {
|
|||
retVal.add(new ResourcePersistentId(pid));
|
||||
|
||||
String key = RequestPartitionId.stringifyForKey(theRequestPartitionId) + "/" + nextResourceType + "/" + forcedId;
|
||||
myPersistentIdCache.put(key, pid);
|
||||
myMemoryCacheService.put(MemoryCacheService.CacheEnum.PERSISTENT_ID, key, pid);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -255,7 +248,7 @@ public class IdHelperService {
|
|||
|
||||
|
||||
public Optional<String> translatePidIdToForcedId(ResourcePersistentId theId) {
|
||||
return myForcedIdCache.get(theId.getIdAsLong(), pid -> myForcedIdDao.findByResourcePid(pid).map(t -> t.getForcedId()));
|
||||
return myMemoryCacheService.get(MemoryCacheService.CacheEnum.FORCED_ID, theId.getIdAsLong(), pid -> myForcedIdDao.findByResourcePid(pid).map(t -> t.getForcedId()));
|
||||
}
|
||||
|
||||
private ListMultimap<String, String> organizeIdsByResourceType(Collection<IIdType> theIds) {
|
||||
|
@ -329,7 +322,7 @@ public class IdHelperService {
|
|||
for (Iterator<String> forcedIdIterator = nextIds.iterator(); forcedIdIterator.hasNext(); ) {
|
||||
String nextForcedId = forcedIdIterator.next();
|
||||
String nextKey = nextResourceType + "/" + nextForcedId;
|
||||
IResourceLookup cachedLookup = myResourceLookupCache.getIfPresent(nextKey);
|
||||
IResourceLookup cachedLookup = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, nextKey);
|
||||
if (cachedLookup != null) {
|
||||
forcedIdIterator.remove();
|
||||
retVal.add(cachedLookup);
|
||||
|
@ -361,7 +354,7 @@ public class IdHelperService {
|
|||
|
||||
if (!myDaoConfig.isDeleteEnabled()) {
|
||||
String key = resourceType + "/" + forcedId;
|
||||
myResourceLookupCache.put(key, lookup);
|
||||
myMemoryCacheService.put(MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, key, lookup);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -377,7 +370,7 @@ public class IdHelperService {
|
|||
for (Iterator<Long> forcedIdIterator = thePidsToResolve.iterator(); forcedIdIterator.hasNext(); ) {
|
||||
Long nextPid = forcedIdIterator.next();
|
||||
String nextKey = Long.toString(nextPid);
|
||||
IResourceLookup cachedLookup = myResourceLookupCache.getIfPresent(nextKey);
|
||||
IResourceLookup cachedLookup = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, nextKey);
|
||||
if (cachedLookup != null) {
|
||||
forcedIdIterator.remove();
|
||||
theTarget.add(cachedLookup);
|
||||
|
@ -403,30 +396,16 @@ public class IdHelperService {
|
|||
theTarget.add(t);
|
||||
if (!myDaoConfig.isDeleteEnabled()) {
|
||||
String nextKey = Long.toString(t.getResourceId());
|
||||
myResourceLookupCache.put(nextKey, t);
|
||||
myMemoryCacheService.put(MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, nextKey, t);
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public void clearCache() {
|
||||
myPersistentIdCache.invalidateAll();
|
||||
myResourceLookupCache.invalidateAll();
|
||||
myForcedIdCache.invalidateAll();
|
||||
}
|
||||
|
||||
private <T, V> @NonNull Cache<T, V> newCache() {
|
||||
return Caffeine
|
||||
.newBuilder()
|
||||
.maximumSize(10000)
|
||||
.expireAfterWrite(10, TimeUnit.MINUTES)
|
||||
.build();
|
||||
}
|
||||
|
||||
public Map<Long, Optional<String>> translatePidsToForcedIds(Set<Long> thePids) {
|
||||
|
||||
Map<Long, Optional<String>> retVal = new HashMap<>(myForcedIdCache.getAllPresent(thePids));
|
||||
Map<Long, Optional<String>> retVal = new HashMap<>(myMemoryCacheService.getAllPresent(MemoryCacheService.CacheEnum.FORCED_ID, thePids));
|
||||
|
||||
List<Long> remainingPids = thePids
|
||||
.stream()
|
||||
|
@ -440,7 +419,7 @@ public class IdHelperService {
|
|||
Long nextResourcePid = forcedId.getResourceId();
|
||||
Optional<String> nextForcedId = Optional.of(forcedId.getForcedId());
|
||||
retVal.put(nextResourcePid, nextForcedId);
|
||||
myForcedIdCache.put(nextResourcePid, nextForcedId);
|
||||
myMemoryCacheService.put(MemoryCacheService.CacheEnum.FORCED_ID, nextResourcePid, nextForcedId);
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -450,7 +429,7 @@ public class IdHelperService {
|
|||
.collect(Collectors.toList());
|
||||
for (Long nextResourcePid : remainingPids) {
|
||||
retVal.put(nextResourcePid, Optional.empty());
|
||||
myForcedIdCache.put(nextResourcePid, Optional.empty());
|
||||
myMemoryCacheService.put(MemoryCacheService.CacheEnum.FORCED_ID, nextResourcePid, Optional.empty());
|
||||
}
|
||||
|
||||
return retVal;
|
||||
|
|
|
@ -117,6 +117,7 @@ public class CascadingDeleteInterceptor {
|
|||
String nextSourceId = nextSource.toUnqualifiedVersionless().getValue();
|
||||
|
||||
if (!cascadedDeletes.contains(nextSourceId)) {
|
||||
cascadedDeletes.add(nextSourceId);
|
||||
|
||||
IFhirResourceDao dao = myDaoRegistry.getResourceDao(nextSource.getResourceType());
|
||||
|
||||
|
@ -132,9 +133,6 @@ public class CascadingDeleteInterceptor {
|
|||
// Actually perform the delete
|
||||
ourLog.info("Have delete conflict {} - Cascading delete", next);
|
||||
dao.delete(nextSource, theConflictList, theRequest, theTransactionDetails);
|
||||
|
||||
cascadedDeletes.add(nextSourceId);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,72 @@
|
|||
package ca.uhn.fhir.jpa.util;
|
||||
|
||||
import com.github.benmanes.caffeine.cache.Cache;
|
||||
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import java.util.EnumMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Function;
|
||||
|
||||
/**
|
||||
* This class acts as a central spot for all of the many Caffeine caches we use in HAPI FHIR.
|
||||
* <p>
|
||||
* The API is super simplistic, and caches are all 1-minute, max 10000 entries for starters. We could definitely add nuance to this,
|
||||
* which will be much easier now that this is being centralized. Some logging/monitoring would be good too.
|
||||
*/
|
||||
public class MemoryCacheService {
|
||||
|
||||
private EnumMap<CacheEnum, Cache<?, ?>> myCaches;
|
||||
|
||||
@PostConstruct
|
||||
public void start() {
|
||||
|
||||
myCaches = new EnumMap<>(CacheEnum.class);
|
||||
|
||||
for (CacheEnum next : CacheEnum.values()) {
|
||||
Cache<Object, Object> nextCache = Caffeine.newBuilder().expireAfterWrite(1, TimeUnit.MINUTES).maximumSize(10000).build();
|
||||
myCaches.put(next, nextCache);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
public <K, T> T get(CacheEnum theCache, K theKey, Function<K, T> theSupplier) {
|
||||
Cache<K, T> cache = getCache(theCache);
|
||||
return cache.get(theKey, theSupplier);
|
||||
}
|
||||
|
||||
public <K, V> V getIfPresent(CacheEnum theCache, K theKey) {
|
||||
return (V) getCache(theCache).getIfPresent(theKey);
|
||||
}
|
||||
|
||||
public <K, V> void put(CacheEnum theCache, K theKey, V theValue) {
|
||||
getCache(theCache).put(theKey, theValue);
|
||||
}
|
||||
|
||||
public <K, V> Map<K, V> getAllPresent(CacheEnum theCache, Iterable<K> theKeys) {
|
||||
return (Map<K, V>) getCache(theCache).getAllPresent(theKeys);
|
||||
}
|
||||
|
||||
public void invalidateAllCaches() {
|
||||
myCaches.values().forEach(t -> t.invalidateAll());
|
||||
}
|
||||
|
||||
private <K, T> Cache<K, T> getCache(CacheEnum theCache) {
|
||||
return (Cache<K, T>) myCaches.get(theCache);
|
||||
}
|
||||
|
||||
public enum CacheEnum {
|
||||
|
||||
TAG_DEFINITION,
|
||||
PERSISTENT_ID,
|
||||
RESOURCE_LOOKUP,
|
||||
FORCED_ID,
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -21,6 +21,7 @@ import ca.uhn.fhir.jpa.search.cache.ISearchResultCacheSvc;
|
|||
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Bundle;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Bundle.Entry;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
|
@ -122,6 +123,8 @@ public abstract class BaseJpaTest extends BaseTest {
|
|||
protected IPartitionLookupSvc myPartitionConfigSvc;
|
||||
@Autowired
|
||||
private IdHelperService myIdHelperService;
|
||||
@Autowired
|
||||
private MemoryCacheService myMemoryCacheService;
|
||||
|
||||
@After
|
||||
public void afterPerformCleanup() {
|
||||
|
@ -132,10 +135,9 @@ public abstract class BaseJpaTest extends BaseTest {
|
|||
if (myPartitionConfigSvc != null) {
|
||||
myPartitionConfigSvc.clearCaches();
|
||||
}
|
||||
if (myIdHelperService != null) {
|
||||
myIdHelperService.clearCache();
|
||||
if (myMemoryCacheService != null) {
|
||||
myMemoryCacheService.invalidateAllCaches();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@After
|
||||
|
|
|
@ -1,9 +1,13 @@
|
|||
package ca.uhn.fhir.jpa.dao.dstu3;
|
||||
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorService;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||
import ca.uhn.fhir.jpa.dao.GZipUtil;
|
||||
import ca.uhn.fhir.jpa.dao.r4.FhirSystemDaoR4;
|
||||
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTag;
|
||||
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
|
||||
import ca.uhn.fhir.jpa.provider.SystemProviderDstu2Test;
|
||||
|
@ -21,8 +25,10 @@ import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
|||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.collect.Maps;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.hl7.fhir.dstu3.model.Appointment;
|
||||
import org.hl7.fhir.dstu3.model.Attachment;
|
||||
|
@ -60,6 +66,8 @@ import org.junit.AfterClass;
|
|||
import org.junit.Before;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.mockito.internal.stubbing.answers.CallsRealMethods;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.TransactionStatus;
|
||||
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
@ -68,8 +76,11 @@ import java.io.IOException;
|
|||
import java.io.InputStream;
|
||||
import java.math.BigDecimal;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
@ -90,15 +101,25 @@ import static org.junit.Assert.assertNull;
|
|||
import static org.junit.Assert.assertThat;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.doAnswer;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class FhirSystemDaoDstu3Test extends BaseJpaDstu3SystemTest {
|
||||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirSystemDaoDstu3Test.class);
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
@Autowired
|
||||
private IInterceptorService myInterceptorBroadcaster;
|
||||
|
||||
@After
|
||||
public void after() {
|
||||
myDaoConfig.setAllowInlineMatchUrlReferences(false);
|
||||
myDaoConfig.setAllowMultipleDelete(new DaoConfig().isAllowMultipleDelete());
|
||||
myDaoConfig.setIndexMissingFields(new DaoConfig().getIndexMissingFields());
|
||||
myDaoConfig.setMaximumDeleteConflictQueryCount(new DaoConfig().getMaximumDeleteConflictQueryCount());
|
||||
|
||||
}
|
||||
|
||||
@Before
|
||||
|
@ -1826,6 +1847,42 @@ public class FhirSystemDaoDstu3Test extends BaseJpaDstu3SystemTest {
|
|||
assertEquals("201 Created", resp.getEntry().get(1).getResponse().getStatus());
|
||||
}
|
||||
|
||||
/**
|
||||
* There is nothing here that isn't tested elsewhere, but it's useful for testing a large transaction followed
|
||||
* by a large cascading delete
|
||||
*/
|
||||
@Test
|
||||
@Ignore
|
||||
public void testTransactionFromBundle_Slow() throws Exception {
|
||||
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED);
|
||||
myDaoConfig.setMaximumDeleteConflictQueryCount(10000);
|
||||
|
||||
StopWatch sw = new StopWatch();
|
||||
sw.startTask("Parse Bundle");
|
||||
Bundle bundle = loadBundle("/dstu3/slow_bundle.xml");
|
||||
|
||||
sw.startTask("Process transaction");
|
||||
Bundle resp = mySystemDao.transaction(mySrd, bundle);
|
||||
ourLog.info("Tasks: {}", sw.formatTaskDurations());
|
||||
|
||||
assertEquals("201 Created", resp.getEntry().get(0).getResponse().getStatus());
|
||||
|
||||
|
||||
doAnswer(new CallsRealMethods()).when(mySrd).setParameters(any());
|
||||
when(mySrd.getParameters()).thenCallRealMethod();
|
||||
when(mySrd.getUserData()).thenReturn(new HashMap<>());
|
||||
Map<String, String[]> params = Maps.newHashMap();
|
||||
params.put(Constants.PARAMETER_CASCADE_DELETE, new String[]{Constants.CASCADE_DELETE});
|
||||
mySrd.setParameters(params);
|
||||
|
||||
CascadingDeleteInterceptor deleteInterceptor = new CascadingDeleteInterceptor(myFhirCtx, myDaoRegistry, myInterceptorBroadcaster);
|
||||
myInterceptorBroadcaster.registerInterceptor(deleteInterceptor);
|
||||
|
||||
|
||||
myPatientDao.deleteByUrl("Patient?identifier=http://fhir.nl/fhir/NamingSystem/bsn|900197341", mySrd);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTransactionOrdering() {
|
||||
String methodName = "testTransactionOrdering";
|
||||
|
|
|
@ -21,6 +21,7 @@ import org.hl7.fhir.r4.model.DateTimeType;
|
|||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
@ -78,6 +79,16 @@ public class BaseR4SearchLastN extends BaseJpaTest {
|
|||
return myPlatformTransactionManager;
|
||||
}
|
||||
|
||||
@Before
|
||||
public void beforeEnableLastN() {
|
||||
myDaoConfig.setLastNEnabled(true);
|
||||
}
|
||||
|
||||
@After
|
||||
public void afterDisableLastN() {
|
||||
myDaoConfig.setLastNEnabled(new DaoConfig().isLastNEnabled());
|
||||
}
|
||||
|
||||
protected final String observationCd0 = "code0";
|
||||
protected final String observationCd1 = "code1";
|
||||
protected final String observationCd2 = "code2";
|
||||
|
|
|
@ -1055,6 +1055,49 @@ public class FhirResourceDaoR4QueryCountTest extends BaseJpaR4Test {
|
|||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testTransactionWithMultipleProfiles() {
|
||||
myDaoConfig.setDeleteEnabled(true);
|
||||
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED);
|
||||
|
||||
// Create transaction
|
||||
|
||||
Bundle input = new Bundle();
|
||||
for (int i = 0; i < 5; i++) {
|
||||
Patient patient = new Patient();
|
||||
patient.getMeta().addProfile("http://example.com/profile");
|
||||
patient.getMeta().addTag().setSystem("http://example.com/tags").setCode("tag-1");
|
||||
patient.getMeta().addTag().setSystem("http://example.com/tags").setCode("tag-2");
|
||||
input.addEntry()
|
||||
.setResource(patient)
|
||||
.getRequest()
|
||||
.setMethod(Bundle.HTTPVerb.POST)
|
||||
.setUrl("Patient");
|
||||
}
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
mySystemDao.transaction(mySrd, input);
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(3, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(8, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||
assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
|
||||
// Do the same a second time
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
mySystemDao.transaction(mySrd, input);
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(5, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
|
||||
}
|
||||
|
||||
|
||||
@AfterClass
|
||||
public static void afterClassClearContext() {
|
||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticsearchClient;
|
||||
import ca.uhn.fhir.jpa.dao.ObservationLastNIndexPersistSvc;
|
||||
|
@ -15,6 +16,7 @@ import ca.uhn.fhir.rest.param.*;
|
|||
import com.google.common.base.Charsets;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.hl7.fhir.r4.model.*;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
@ -56,6 +58,9 @@ public class PersistObservationIndexedSearchParamLastNR4IT {
|
|||
@Autowired
|
||||
protected FhirContext myFhirCtx;
|
||||
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
|
||||
@Before
|
||||
public void before() throws IOException {
|
||||
|
||||
|
@ -63,8 +68,20 @@ public class PersistObservationIndexedSearchParamLastNR4IT {
|
|||
elasticsearchSvc.deleteAllDocumentsForTest(ElasticsearchSvcImpl.OBSERVATION_CODE_INDEX);
|
||||
elasticsearchSvc.refreshIndex(ElasticsearchSvcImpl.OBSERVATION_INDEX);
|
||||
elasticsearchSvc.refreshIndex(ElasticsearchSvcImpl.OBSERVATION_CODE_INDEX);
|
||||
|
||||
}
|
||||
|
||||
@Before
|
||||
public void beforeEnableLastN() {
|
||||
myDaoConfig.setLastNEnabled(true);
|
||||
}
|
||||
|
||||
@After
|
||||
public void afterDisableLastN() {
|
||||
myDaoConfig.setLastNEnabled(new DaoConfig().isLastNEnabled());
|
||||
}
|
||||
|
||||
|
||||
private final String SINGLE_SUBJECT_ID = "4567";
|
||||
private final String SINGLE_OBSERVATION_PID = "123";
|
||||
private final Date SINGLE_EFFECTIVEDTM = new Date();
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -102,6 +102,7 @@ abstract public class BaseEmpiR4Test extends BaseJpaR4Test {
|
|||
|
||||
protected ServletRequestDetails myRequestDetails = new ServletRequestDetails(null);
|
||||
|
||||
@Override
|
||||
@After
|
||||
public void after() {
|
||||
myEmpiLinkDao.deleteAll();
|
||||
|
|
|
@ -24,6 +24,7 @@ public class EmpiLinkSvcTest extends BaseEmpiR4Test {
|
|||
@Autowired
|
||||
IEmpiLinkSvc myEmpiLinkSvc;
|
||||
|
||||
@Override
|
||||
@After
|
||||
public void after() {
|
||||
myExpungeEverythingService.expungeEverythingByType(EmpiLink.class);
|
||||
|
|
|
@ -78,6 +78,7 @@ public class EmpiPersonMergerSvcTest extends BaseEmpiR4Test {
|
|||
myInterceptorService.registerInterceptor(myEmpiStorageInterceptor);
|
||||
}
|
||||
|
||||
@Override
|
||||
@After
|
||||
public void after() {
|
||||
myInterceptorService.unregisterInterceptor(myEmpiStorageInterceptor);
|
||||
|
|
|
@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.test;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.expunge.ExpungeEverythingService;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.test.utilities.UnregisterScheduledProcessor;
|
||||
|
@ -66,10 +67,14 @@ public abstract class BaseJpaTest {
|
|||
@Autowired
|
||||
ApplicationContext myApplicationContext;
|
||||
|
||||
@Autowired
|
||||
MemoryCacheService myMemoryCacheService;
|
||||
|
||||
@After
|
||||
public void after() {
|
||||
ourLog.info("\n --- @After ---");
|
||||
myExpungeEverythingService.expungeEverything(null);
|
||||
myMemoryCacheService.invalidateAllCaches();
|
||||
}
|
||||
|
||||
public TransactionTemplate newTxTemplate() {
|
||||
|
|
Loading…
Reference in New Issue