4598: Possible creation of resource duplicates when ingesting large quantity of resources. (#4609)
* Creating test to reproduce the issue. * rebasing development branch from release to master. * adding migration task. * providing solution to duplicate resource creation and passing initial test. * making initial test more readable. * Implementing test for scheduled deletion of stale searchUrl entries. * Implementing test for scheduled deletion of stale searchUrl entries. * Removing interface declaration since it was not needed. * Reverting changes. * Adding changelog. * Adding comments for clarity. * Adding purgeDatabase operation in IT to delete all ResourceSearchUrlEntity [before|after}Each tests. * Adding pom.xml exclusion to get all tests passing. * Adjusting query counts and adding pointcut name to log in interceptor. * Fixing typos. * WIP * implementing edge case tests and solutions. * wip: passing all tests. * wip: passing more tests . * wip: passing tests. * wip: passing tests. * enhancing the test. * changes following code review. * modifications to tests. * minor modifications following second code review. * passing all tests. --------- Co-authored-by: peartree <etienne.poirier@smilecdr.com>
This commit is contained in:
parent
59123cf0f0
commit
fdf7450096
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 4597
|
||||
jira: SMILE-5993
|
||||
title: "Simultaneous conditional create or create-on-update operations no longer create duplicate matching resources."
|
|
@ -25,6 +25,7 @@ import ca.uhn.fhir.i18n.Msg;
|
|||
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedComboStringUnique;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceSearchUrlEntity;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.PessimisticLockException;
|
||||
|
@ -88,6 +89,9 @@ public class HapiFhirHibernateJpaDialect extends HibernateJpaDialect {
|
|||
if (constraintName.contains(ForcedId.IDX_FORCEDID_TYPE_FID)) {
|
||||
throw new ResourceVersionConflictException(Msg.code(825) + messageToPrepend + myLocalizer.getMessage(HapiFhirHibernateJpaDialect.class, "forcedIdConstraintFailure"));
|
||||
}
|
||||
if (constraintName.contains(ResourceSearchUrlEntity.RES_SEARCH_URL_COLUMN_NAME)) {
|
||||
throw super.convertHibernateAccessException(theException);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@ import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
|||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.api.svc.ISearchUrlJobMaintenanceSvc;
|
||||
import ca.uhn.fhir.jpa.binary.interceptor.BinaryStorageInterceptor;
|
||||
import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
|
||||
|
@ -34,6 +35,7 @@ import ca.uhn.fhir.jpa.dao.MatchResourceUrlService;
|
|||
import ca.uhn.fhir.jpa.dao.ObservationLastNIndexPersistSvc;
|
||||
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
|
||||
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceSearchUrlDao;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.ExpungeEverythingService;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.ExpungeOperation;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.ExpungeService;
|
||||
|
@ -85,7 +87,9 @@ import ca.uhn.fhir.jpa.search.ISynchronousSearchSvc;
|
|||
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
|
||||
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProviderFactory;
|
||||
import ca.uhn.fhir.jpa.search.PersistedJpaSearchFirstPageBundleProvider;
|
||||
import ca.uhn.fhir.jpa.search.ResourceSearchUrlSvc;
|
||||
import ca.uhn.fhir.jpa.search.SearchStrategyFactory;
|
||||
import ca.uhn.fhir.jpa.search.SearchUrlJobMaintenanceSvcImpl;
|
||||
import ca.uhn.fhir.jpa.search.SynchronousSearchSvcImpl;
|
||||
import ca.uhn.fhir.jpa.search.builder.QueryStack;
|
||||
import ca.uhn.fhir.jpa.search.builder.predicate.ComboNonUniqueSearchParameterPredicateBuilder;
|
||||
|
@ -120,6 +124,7 @@ import ca.uhn.fhir.jpa.search.reindex.ResourceReindexer;
|
|||
import ca.uhn.fhir.jpa.search.reindex.ResourceReindexingSvcImpl;
|
||||
import ca.uhn.fhir.jpa.search.warm.CacheWarmingSvcImpl;
|
||||
import ca.uhn.fhir.jpa.search.warm.ICacheWarmingSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.config.SearchParamConfig;
|
||||
import ca.uhn.fhir.jpa.searchparam.extractor.IResourceLinkResolver;
|
||||
import ca.uhn.fhir.jpa.searchparam.nickname.NicknameInterceptor;
|
||||
|
@ -137,6 +142,7 @@ import ca.uhn.fhir.jpa.term.api.ITermReindexingSvc;
|
|||
import ca.uhn.fhir.jpa.term.config.TermCodeSystemConfig;
|
||||
import ca.uhn.fhir.jpa.util.JpaHapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.jpa.util.PersistenceContextProvider;
|
||||
import ca.uhn.fhir.jpa.validation.ResourceLoaderImpl;
|
||||
import ca.uhn.fhir.jpa.validation.ValidationSettings;
|
||||
import ca.uhn.fhir.mdm.dao.IMdmLinkDao;
|
||||
|
@ -781,7 +787,6 @@ public class JpaConfig {
|
|||
return new TermCodeSystemStorageSvcImpl();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public ITermReindexingSvc termReindexingSvc() {
|
||||
return new TermReindexingSvcImpl();
|
||||
|
@ -793,7 +798,7 @@ public class JpaConfig {
|
|||
}
|
||||
|
||||
@Bean
|
||||
public IMdmLinkDao<JpaPid, MdmLink> mdmLinkDao() {
|
||||
public IMdmLinkDao<JpaPid, MdmLink> mdmLinkDao(){
|
||||
return new MdmLinkDaoJpaImpl();
|
||||
}
|
||||
|
||||
|
@ -801,4 +806,20 @@ public class JpaConfig {
|
|||
IMdmLinkImplFactory<MdmLink> mdmLinkImplFactory() {
|
||||
return new JpaMdmLinkImplFactory();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Scope("prototype")
|
||||
public PersistenceContextProvider persistenceContextProvider(){
|
||||
return new PersistenceContextProvider();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ResourceSearchUrlSvc resourceSearchUrlSvc(PersistenceContextProvider thePersistenceContextProvider, IResourceSearchUrlDao theResourceSearchUrlDao, MatchUrlService theMatchUrlService, FhirContext theFhirContext){
|
||||
return new ResourceSearchUrlSvc(thePersistenceContextProvider.getEntityManager(), theResourceSearchUrlDao, theMatchUrlService, theFhirContext);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ISearchUrlJobMaintenanceSvc searchUrlJobMaintenanceSvc(ResourceSearchUrlSvc theResourceSearchUrlSvc){
|
||||
return new SearchUrlJobMaintenanceSvcImpl(theResourceSearchUrlSvc);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -147,7 +147,6 @@ import java.util.StringTokenizer;
|
|||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.util.Objects.nonNull;
|
||||
import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.left;
|
||||
|
@ -896,11 +895,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
return myContext.getResourceType(theResource);
|
||||
}
|
||||
|
||||
protected ResourceTable updateEntityForDelete(RequestDetails theRequest, TransactionDetails theTransactionDetails, ResourceTable entity) {
|
||||
Date updateTime = new Date();
|
||||
return updateEntity(theRequest, null, entity, updateTime, true, true, theTransactionDetails, false, true);
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setEntityManager(EntityManager theEntityManager) {
|
||||
myEntityManager = theEntityManager;
|
||||
|
|
|
@ -60,6 +60,7 @@ import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
|||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
|
||||
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProviderFactory;
|
||||
import ca.uhn.fhir.jpa.search.ResourceSearchUrlSvc;
|
||||
import ca.uhn.fhir.jpa.search.cache.SearchCacheStatusEnum;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
|
||||
|
@ -192,6 +193,9 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
@Autowired
|
||||
private UrlPartitioner myUrlPartitioner;
|
||||
|
||||
@Autowired
|
||||
private ResourceSearchUrlSvc myResourceSearchUrlSvc;
|
||||
|
||||
@Override
|
||||
protected HapiTransactionService getTransactionService() {
|
||||
return myTransactionService;
|
||||
|
@ -413,6 +417,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
// Pre-cache the match URL
|
||||
if (theMatchUrl != null) {
|
||||
myResourceSearchUrlSvc.enforceMatchUrlResourceUniqueness(getResourceName(), theMatchUrl, jpaPid);
|
||||
myMatchResourceUrlService.matchUrlResolved(theTransactionDetails, getResourceName(), theMatchUrl, jpaPid);
|
||||
}
|
||||
|
||||
|
@ -727,6 +732,12 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
return retVal;
|
||||
}
|
||||
|
||||
protected ResourceTable updateEntityForDelete(RequestDetails theRequest, TransactionDetails theTransactionDetails, ResourceTable theEntity) {
|
||||
myResourceSearchUrlSvc.deleteByResId(theEntity.getId());
|
||||
Date updateTime = new Date();
|
||||
return updateEntity(theRequest, null, theEntity, updateTime, true, true, theTransactionDetails, false, true);
|
||||
}
|
||||
|
||||
private void validateDeleteEnabled() {
|
||||
if (!getStorageSettings().isDeleteEnabled()) {
|
||||
String msg = getContext().getLocalizer().getMessage(BaseStorageDao.class, "deleteBlockedBecauseDisabled");
|
||||
|
@ -1751,10 +1762,20 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
|
||||
// Start
|
||||
|
||||
return doUpdateForUpdateOrPatch(theRequest, resourceId, theMatchUrl, thePerformIndexing, theForceUpdateVersion, resource, entity, update, theTransactionDetails);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DaoMethodOutcome doUpdateForUpdateOrPatch(RequestDetails theRequest, IIdType theResourceId, String theMatchUrl, boolean thePerformIndexing, boolean theForceUpdateVersion, T theResource, IBasePersistedResource theEntity, RestOperationTypeEnum theOperationType, TransactionDetails theTransactionDetails) {
|
||||
|
||||
// we stored a resource searchUrl at creation time to prevent resource duplication. Let's remove the entry on the
|
||||
// first update but guard against unnecessary trips to the database on subsequent ones.
|
||||
if(theEntity.getVersion() < 2){
|
||||
myResourceSearchUrlSvc.deleteByResId((Long) theEntity.getPersistentId().getId());
|
||||
}
|
||||
|
||||
return super.doUpdateForUpdateOrPatch(theRequest, theResourceId, theMatchUrl, thePerformIndexing, theForceUpdateVersion, theResource, theEntity, theOperationType, theTransactionDetails);
|
||||
}
|
||||
|
||||
/**
|
||||
* Method for updating the historical version of the resource when a history version id is included in the request.
|
||||
|
|
|
@ -0,0 +1,22 @@
|
|||
package ca.uhn.fhir.jpa.dao.data;
|
||||
|
||||
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceSearchUrlEntity;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Modifying;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
public interface IResourceSearchUrlDao extends JpaRepository<ResourceSearchUrlEntity, Long>, IHapiFhirJpaRepository{
|
||||
|
||||
@Modifying
|
||||
@Query("DELETE FROM ResourceSearchUrlEntity s WHERE (s.myCreatedTime < :cutoff)")
|
||||
int deleteAllWhereCreatedBefore(@Param("cutoff") Date theCutoff);
|
||||
|
||||
@Modifying
|
||||
@Query("DELETE FROM ResourceSearchUrlEntity s WHERE (s.myResourcePid = :resID)")
|
||||
int deleteByResId(@Param("resID") long resId);
|
||||
|
||||
}
|
|
@ -67,6 +67,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
|
|||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceSearchUrlEntity;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTag;
|
||||
import ca.uhn.fhir.jpa.model.entity.SearchParamPresentEntity;
|
||||
|
@ -177,6 +178,7 @@ public class ExpungeEverythingService implements IExpungeEverythingService {
|
|||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TagDefinition.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceHistoryProvenanceEntity.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceHistoryTable.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceSearchUrlEntity.class, requestPartitionId));
|
||||
int counterBefore = counter.get();
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceTable.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, PartitionEntity.class, requestPartitionId));
|
||||
|
|
|
@ -123,6 +123,18 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
.addColumn("20230215.3", BulkExportJobEntity.JOB_ID)
|
||||
.nullable()
|
||||
.type(ColumnTypeEnum.STRING, UUID_LENGTH);
|
||||
|
||||
|
||||
Builder.BuilderAddTableByColumns resSearchUrlTable = version.addTableByColumns("20230227.1","HFJ_RES_SEARCH_URL", "RES_SEARCH_URL");
|
||||
|
||||
resSearchUrlTable.addColumn( "RES_SEARCH_URL").nonNullable().type(ColumnTypeEnum.STRING, 768);
|
||||
resSearchUrlTable.addColumn( "RES_ID").nonNullable().type(ColumnTypeEnum.LONG);
|
||||
|
||||
resSearchUrlTable.addColumn( "CREATED_TIME").nonNullable().type(ColumnTypeEnum.DATE_TIMESTAMP);
|
||||
|
||||
resSearchUrlTable.addIndex("20230227.2", "IDX_RESSEARCHURL_RES").unique(false).withColumns("RES_ID");
|
||||
resSearchUrlTable.addIndex("20230227.3", "IDX_RESSEARCHURL_TIME").unique(false).withColumns("CREATED_TIME");
|
||||
|
||||
}
|
||||
|
||||
protected void init640() {
|
||||
|
|
|
@ -0,0 +1,85 @@
|
|||
package ca.uhn.fhir.jpa.search;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceSearchUrlDao;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceSearchUrlEntity;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* This service ensures uniqueness of resources during create or create-on-update by storing the resource searchUrl.
|
||||
*/
|
||||
@Transactional
|
||||
@Service
|
||||
public class ResourceSearchUrlSvc {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(ResourceSearchUrlSvc.class);
|
||||
private final EntityManager myEntityManager;
|
||||
|
||||
private final IResourceSearchUrlDao myResourceSearchUrlDao;
|
||||
|
||||
private final MatchUrlService myMatchUrlService;
|
||||
|
||||
private final FhirContext myFhirContext;
|
||||
|
||||
public ResourceSearchUrlSvc(EntityManager theEntityManager, IResourceSearchUrlDao theResourceSearchUrlDao, MatchUrlService theMatchUrlService, FhirContext theFhirContext) {
|
||||
myEntityManager = theEntityManager;
|
||||
myResourceSearchUrlDao = theResourceSearchUrlDao;
|
||||
myMatchUrlService = theMatchUrlService;
|
||||
myFhirContext = theFhirContext;
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform removal of entries older than {@code theCutoffDate} since the create operations are done.
|
||||
*/
|
||||
public void deleteEntriesOlderThan(Date theCutoffDate) {
|
||||
ourLog.info("About to delete SearchUrl which are older than {}", theCutoffDate);
|
||||
int deletedCount = myResourceSearchUrlDao.deleteAllWhereCreatedBefore(theCutoffDate);
|
||||
ourLog.info("Deleted {} SearchUrls", deletedCount);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Once a resource is updated or deleted, we can trust that future match checks will find the committed resource in the db.
|
||||
* The use of the constraint table is done, and we can delete it to keep the table small.
|
||||
*/
|
||||
public void deleteByResId(long theResId){
|
||||
myResourceSearchUrlDao.deleteByResId(theResId);
|
||||
}
|
||||
|
||||
/**
|
||||
* We store a record of match urls with res_id so a db constraint can catch simultaneous creates that slip through.
|
||||
*/
|
||||
public void enforceMatchUrlResourceUniqueness(String theResourceName, String theMatchUrl, JpaPid theResourcePersistentId) {
|
||||
String canonicalizedUrlForStorage = createCanonicalizedUrlForStorage(theResourceName, theMatchUrl);
|
||||
|
||||
ResourceSearchUrlEntity searchUrlEntity = ResourceSearchUrlEntity.from(canonicalizedUrlForStorage, theResourcePersistentId.getId());
|
||||
// calling dao.save performs a merge operation which implies a trip to
|
||||
// the database to see if the resource exists. Since we don't need the check, we avoid the trip by calling em.persist.
|
||||
myEntityManager.persist(searchUrlEntity);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides a sanitized matchUrl to circumvent ordering matters.
|
||||
*/
|
||||
private String createCanonicalizedUrlForStorage(String theResourceName, String theMatchUrl){
|
||||
|
||||
RuntimeResourceDefinition resourceDef = myFhirContext.getResourceDefinition(theResourceName);
|
||||
SearchParameterMap matchUrlSearchParameterMap = myMatchUrlService.translateMatchUrl(theMatchUrl, resourceDef);
|
||||
|
||||
String canonicalizedMatchUrl = matchUrlSearchParameterMap.toNormalizedQueryString(myFhirContext);
|
||||
|
||||
return theResourceName + canonicalizedMatchUrl;
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,64 @@
|
|||
package ca.uhn.fhir.jpa.search;
|
||||
|
||||
import ca.uhn.fhir.jpa.api.svc.ISearchUrlJobMaintenanceSvc;
|
||||
import ca.uhn.fhir.jpa.model.sched.HapiJob;
|
||||
import ca.uhn.fhir.jpa.model.sched.IHasScheduledJobs;
|
||||
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.quartz.JobExecutionContext;
|
||||
import org.quartz.JobExecutionException;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* The purpose of this service is to define and register a job that will clean up
|
||||
* entries created by an instance of {@link ResourceSearchUrlSvc}.
|
||||
*/
|
||||
public class SearchUrlJobMaintenanceSvcImpl implements ISearchUrlJobMaintenanceSvc, IHasScheduledJobs {
|
||||
|
||||
private ResourceSearchUrlSvc myResourceSearchUrlSvc;
|
||||
|
||||
/**
|
||||
* An hour at 3k resources/second is ~10M resources. That's easy to manage with deletes by age.
|
||||
* We can shorten this if we have memory or storage pressure. MUST be longer that longest transaction
|
||||
* possible to work.
|
||||
*/
|
||||
public static final long OUR_CUTOFF_IN_MILLISECONDS = 1 * DateUtils.MILLIS_PER_HOUR;
|
||||
|
||||
public SearchUrlJobMaintenanceSvcImpl(ResourceSearchUrlSvc theResourceSearchUrlSvc) {
|
||||
myResourceSearchUrlSvc = theResourceSearchUrlSvc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void removeStaleEntries() {
|
||||
final Date cutoffDate = calculateCutoffDate();
|
||||
myResourceSearchUrlSvc.deleteEntriesOlderThan(cutoffDate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void scheduleJobs(ISchedulerService theSchedulerService) {
|
||||
ScheduledJobDefinition jobDetail = new ScheduledJobDefinition();
|
||||
jobDetail.setId(SearchUrlMaintenanceJob.class.getName());
|
||||
jobDetail.setJobClass(SearchUrlMaintenanceJob.class);
|
||||
theSchedulerService.scheduleLocalJob(10 * DateUtils.MILLIS_PER_MINUTE, jobDetail);
|
||||
}
|
||||
|
||||
private Date calculateCutoffDate() {
|
||||
return new Date(System.currentTimeMillis() - OUR_CUTOFF_IN_MILLISECONDS);
|
||||
}
|
||||
|
||||
public static class SearchUrlMaintenanceJob implements HapiJob{
|
||||
|
||||
|
||||
@Autowired
|
||||
private ISearchUrlJobMaintenanceSvc mySearchUrlJobMaintenanceSvc;
|
||||
|
||||
@Override
|
||||
public void execute(JobExecutionContext theJobExecutionContext) throws JobExecutionException {
|
||||
mySearchUrlJobMaintenanceSvc.removeStaleEntries();
|
||||
}
|
||||
|
||||
}
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
package ca.uhn.fhir.jpa.util;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.PersistenceContext;
|
||||
|
||||
/**
|
||||
* Utility class that provides a proxied entityManager. It can be directly injected or
|
||||
* used as part of a bean creation process to provide a proxied entityManager through the constructor.
|
||||
*/
|
||||
public class PersistenceContextProvider {
|
||||
|
||||
@PersistenceContext
|
||||
private EntityManager myEntityManager;
|
||||
|
||||
public EntityManager getEntityManager() {
|
||||
return myEntityManager;
|
||||
}
|
||||
}
|
|
@ -212,7 +212,6 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
|
|||
@Autowired
|
||||
private TestHSearchEventDispatcher myHSearchEventDispatcher;
|
||||
|
||||
|
||||
@Mock
|
||||
private IHSearchEventListener mySearchEventListener;
|
||||
|
||||
|
|
|
@ -81,7 +81,6 @@ public class FhirResourceDaoR4TerminologyElasticsearchIT extends BaseJpaTest {
|
|||
@Autowired
|
||||
private IBulkDataExportJobSchedulingHelper myBulkDataScheduleHelper;
|
||||
|
||||
|
||||
@BeforeEach
|
||||
public void beforeEach() {
|
||||
when(mySrd.getUserData().getOrDefault(MAKE_LOADING_VERSION_CURRENT, Boolean.TRUE)).thenReturn(Boolean.TRUE);
|
||||
|
|
|
@ -104,7 +104,6 @@ public class ValueSetExpansionR4ElasticsearchIT extends BaseJpaTest {
|
|||
@Mock
|
||||
private IValueSetConceptAccumulator myValueSetCodeAccumulator;
|
||||
|
||||
|
||||
@BeforeEach
|
||||
public void beforeEach() {
|
||||
when(mySrd.getUserData().getOrDefault(MAKE_LOADING_VERSION_CURRENT, Boolean.TRUE)).thenReturn(Boolean.TRUE);
|
||||
|
|
|
@ -0,0 +1,70 @@
|
|||
package ca.uhn.fhir.jpa.model.entity;
|
||||
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.Index;
|
||||
import javax.persistence.Table;
|
||||
import javax.persistence.Temporal;
|
||||
import javax.persistence.TemporalType;
|
||||
import java.util.Date;
|
||||
|
||||
@Entity
|
||||
@Table(name = "HFJ_RES_SEARCH_URL",
|
||||
indexes = {
|
||||
@Index(name = "IDX_RESSEARCHURL_RES", columnList = "RES_ID"),
|
||||
@Index(name = "IDX_RESSEARCHURL_TIME", columnList = "CREATED_TIME")
|
||||
})
|
||||
public class ResourceSearchUrlEntity {
|
||||
|
||||
public static final String RES_SEARCH_URL_COLUMN_NAME = "RES_SEARCH_URL";
|
||||
|
||||
public static final int RES_SEARCH_URL_LENGTH = 768;
|
||||
|
||||
@Id
|
||||
@Column(name = RES_SEARCH_URL_COLUMN_NAME, length = RES_SEARCH_URL_LENGTH, nullable = false)
|
||||
private String mySearchUrl;
|
||||
|
||||
@Column(name = "RES_ID", updatable = false, nullable = false)
|
||||
private Long myResourcePid;
|
||||
|
||||
@Column(name = "CREATED_TIME", nullable = false)
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
private Date myCreatedTime;
|
||||
|
||||
public static ResourceSearchUrlEntity from(String theUrl, Long theId) {
|
||||
return new ResourceSearchUrlEntity()
|
||||
.setResourcePid(theId)
|
||||
.setSearchUrl(theUrl)
|
||||
.setCreatedTime(new Date());
|
||||
}
|
||||
|
||||
public Long getResourcePid() {
|
||||
return myResourcePid;
|
||||
}
|
||||
|
||||
public ResourceSearchUrlEntity setResourcePid(Long theResourcePid) {
|
||||
myResourcePid = theResourcePid;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Date getCreatedTime() {
|
||||
return myCreatedTime;
|
||||
}
|
||||
|
||||
public ResourceSearchUrlEntity setCreatedTime(Date theCreatedTime) {
|
||||
myCreatedTime = theCreatedTime;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getSearchUrl() {
|
||||
return mySearchUrl;
|
||||
}
|
||||
|
||||
public ResourceSearchUrlEntity setSearchUrl(String theSearchUrl) {
|
||||
mySearchUrl = theSearchUrl;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -212,6 +212,7 @@ public abstract class BaseJpaDstu2Test extends BaseJpaTest {
|
|||
@Autowired
|
||||
private ValidationSupportChain myJpaValidationSupportChain;
|
||||
|
||||
|
||||
@RegisterExtension
|
||||
private final PreventDanglingInterceptorsExtension myPreventDanglingInterceptorsExtension = new PreventDanglingInterceptorsExtension(()-> myInterceptorRegistry);
|
||||
|
||||
|
|
|
@ -3566,7 +3566,7 @@ public class FhirResourceDaoDstu3SearchNoFtTest extends BaseJpaDstu3Test {
|
|||
assertEquals(10, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(5, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(1, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
assertEquals(1, myCaptureQueriesListener.countDeleteQueries());
|
||||
String unformattedSql = myCaptureQueriesListener.getUpdateQueriesForCurrentThread().get(0).getSql(true, false);
|
||||
assertThat(unformattedSql, stringContainsInOrder(
|
||||
"SRC_PATH='Observation.performer'",
|
||||
|
|
|
@ -33,6 +33,18 @@
|
|||
<version>${project.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.exparity</groupId>
|
||||
<artifactId>hamcrest-date</artifactId>
|
||||
<version>2.0.7</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>*</artifactId>
|
||||
<groupId>*</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
|
|
@ -66,7 +66,7 @@ public class ConsumeFilesStepR4Test extends BaseJpaR4Test {
|
|||
assertEquals(4, myCaptureQueriesListener.logSelectQueries().size());
|
||||
assertEquals(0, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
assertEquals(2, myCaptureQueriesListener.countDeleteQueries());
|
||||
assertEquals(1, myCaptureQueriesListener.countCommits());
|
||||
assertEquals(0, myCaptureQueriesListener.countRollbacks());
|
||||
|
||||
|
@ -115,7 +115,7 @@ public class ConsumeFilesStepR4Test extends BaseJpaR4Test {
|
|||
assertEquals(4, myCaptureQueriesListener.logSelectQueries().size());
|
||||
assertEquals(2, myCaptureQueriesListener.logInsertQueries());
|
||||
assertEquals(4, myCaptureQueriesListener.logUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
assertEquals(2, myCaptureQueriesListener.countDeleteQueries());
|
||||
assertEquals(1, myCaptureQueriesListener.countCommits());
|
||||
assertEquals(0, myCaptureQueriesListener.countRollbacks());
|
||||
|
||||
|
|
|
@ -0,0 +1,278 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IPointcut;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceSearchUrlDao;
|
||||
import ca.uhn.fhir.jpa.interceptor.UserRequestRetryVersionConflictsInterceptor;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceSearchUrlEntity;
|
||||
import ca.uhn.fhir.jpa.search.ResourceSearchUrlSvc;
|
||||
import ca.uhn.fhir.jpa.search.SearchUrlJobMaintenanceSvcImpl;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.jpa.test.config.TestR4Config;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.test.concurrency.PointcutLatch;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.r4.model.Identifier;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
public class FhirResourceDaoR4ConcurrentCreateTest extends BaseJpaR4Test {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(FhirResourceDaoR4ConcurrentCreateTest.class);
|
||||
|
||||
ThreadGaterPointcutLatch myThreadGaterPointcutLatchInterceptor;
|
||||
UserRequestRetryVersionConflictsInterceptor myUserRequestRetryVersionConflictsInterceptor;
|
||||
ResourceConcurrentSubmitterSvc myResourceConcurrentSubmitterSvc;
|
||||
|
||||
@Autowired
|
||||
SearchUrlJobMaintenanceSvcImpl mySearchUrlJobMaintenanceSvc;
|
||||
|
||||
@Autowired
|
||||
IResourceSearchUrlDao myResourceSearchUrlDao;
|
||||
|
||||
@Autowired
|
||||
ResourceSearchUrlSvc myResourceSearchUrlSvc;
|
||||
|
||||
Callable<String> myResource;
|
||||
|
||||
@BeforeEach
|
||||
public void beforeEach(){
|
||||
myThreadGaterPointcutLatchInterceptor = new ThreadGaterPointcutLatch("gaterLatch");
|
||||
myUserRequestRetryVersionConflictsInterceptor = new UserRequestRetryVersionConflictsInterceptor();
|
||||
|
||||
// this pointcut is AFTER the match url has resolved, but before commit.
|
||||
myInterceptorRegistry.registerAnonymousInterceptor(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED, myThreadGaterPointcutLatchInterceptor);
|
||||
myInterceptorRegistry.registerInterceptor(myUserRequestRetryVersionConflictsInterceptor);
|
||||
myResourceConcurrentSubmitterSvc = new ResourceConcurrentSubmitterSvc();
|
||||
myResource = buildResourceAndCreateCallable();
|
||||
|
||||
List<ResourceSearchUrlEntity> all = myResourceSearchUrlDao.findAll();
|
||||
assertThat(all, hasSize(0));
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
public void afterEach() {
|
||||
myResourceConcurrentSubmitterSvc.shutDown();
|
||||
}
|
||||
|
||||
@Override
|
||||
@AfterEach
|
||||
public void afterResetInterceptors() {
|
||||
super.afterResetInterceptors();
|
||||
myInterceptorRegistry.unregisterInterceptor(myThreadGaterPointcutLatchInterceptor);
|
||||
myInterceptorRegistry.unregisterInterceptor(myUserRequestRetryVersionConflictsInterceptor);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMultipleThreads_attemptingToCreatingTheSameResource_willCreateOnlyOneResource() throws InterruptedException, ExecutionException {
|
||||
// given
|
||||
final int numberOfThreadsAttemptingToCreateDuplicates = 2;
|
||||
int expectedResourceCount = myResourceTableDao.findAll().size() + 1;
|
||||
|
||||
myThreadGaterPointcutLatchInterceptor.setExpectedCount(numberOfThreadsAttemptingToCreateDuplicates);
|
||||
|
||||
// when
|
||||
// create a situation where multiple threads will try to create the same resource;
|
||||
for (int i = 0; i < numberOfThreadsAttemptingToCreateDuplicates; i++){
|
||||
myResourceConcurrentSubmitterSvc.submitResource(myResource);
|
||||
}
|
||||
|
||||
// let's wait for all executor threads to wait (block) at the pointcut
|
||||
myThreadGaterPointcutLatchInterceptor.awaitExpected();
|
||||
|
||||
// we get here only if latch.countdown has reach 0, ie, all executor threads have reached the pointcut
|
||||
// so notify them all to allow execution to proceed.
|
||||
myThreadGaterPointcutLatchInterceptor.doNotifyAll();
|
||||
|
||||
List<String> errorList = myResourceConcurrentSubmitterSvc.waitForThreadsCompletionAndReturnErrors();
|
||||
|
||||
// then
|
||||
assertThat(errorList, hasSize(0));
|
||||
// red-green before the fix, the size was 'numberOfThreadsAttemptingToCreateDuplicates'
|
||||
assertThat(myResourceTableDao.findAll(), hasSize(expectedResourceCount));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRemoveStaleEntries_withNonStaleAndStaleEntries_willOnlyDeleteStaleEntries(){
|
||||
// given
|
||||
long tenMinutes = 10 * DateUtils.MILLIS_PER_HOUR;
|
||||
|
||||
Date tooOldBy10Minutes = cutOffTimeMinus(tenMinutes);
|
||||
ResourceSearchUrlEntity tooOld1 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.444", 1l).setCreatedTime(tooOldBy10Minutes);
|
||||
ResourceSearchUrlEntity tooOld2 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.445", 2l).setCreatedTime(tooOldBy10Minutes);
|
||||
|
||||
Date tooNewBy10Minutes = cutOffTimePlus(tenMinutes);
|
||||
ResourceSearchUrlEntity tooNew1 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.446", 3l).setCreatedTime(tooNewBy10Minutes);
|
||||
ResourceSearchUrlEntity tooNew2 =ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.447", 4l).setCreatedTime(tooNewBy10Minutes);
|
||||
|
||||
myResourceSearchUrlDao.saveAll(asList(tooOld1, tooOld2, tooNew1, tooNew2));
|
||||
|
||||
// when
|
||||
mySearchUrlJobMaintenanceSvc.removeStaleEntries();
|
||||
|
||||
// then
|
||||
List<Long> resourcesPids = getStoredResourceSearchUrlEntitiesPids();
|
||||
assertThat(resourcesPids, containsInAnyOrder(3l, 4l));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRemoveStaleEntries_withNoEntries_willNotGenerateExceptions(){
|
||||
|
||||
mySearchUrlJobMaintenanceSvc.removeStaleEntries();
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMethodDeleteByResId_withEntries_willDeleteTheEntryIfExists(){
|
||||
|
||||
// given
|
||||
long nonExistentResourceId = 99l;
|
||||
|
||||
ResourceSearchUrlEntity entry1 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.444", 1l);
|
||||
ResourceSearchUrlEntity entry2 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.445", 2l);
|
||||
myResourceSearchUrlDao.saveAll(asList(entry1, entry2));
|
||||
|
||||
// when
|
||||
myResourceSearchUrlSvc.deleteByResId(entry1.getResourcePid());
|
||||
myResourceSearchUrlSvc.deleteByResId(nonExistentResourceId);
|
||||
|
||||
// then
|
||||
List<Long> resourcesPids = getStoredResourceSearchUrlEntitiesPids();
|
||||
assertThat(resourcesPids, containsInAnyOrder(2l));
|
||||
|
||||
}
|
||||
|
||||
private List<Long> getStoredResourceSearchUrlEntitiesPids(){
|
||||
List<ResourceSearchUrlEntity> remainingSearchUrlEntities = myResourceSearchUrlDao.findAll();
|
||||
return remainingSearchUrlEntities.stream().map(ResourceSearchUrlEntity::getResourcePid).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
private Date cutOffTimePlus(long theAdjustment) {
|
||||
long currentTimeMillis = System.currentTimeMillis();
|
||||
long offset = currentTimeMillis - SearchUrlJobMaintenanceSvcImpl.OUR_CUTOFF_IN_MILLISECONDS + theAdjustment;
|
||||
return new Date(offset);
|
||||
}
|
||||
|
||||
private Date cutOffTimeMinus(long theAdjustment) {
|
||||
return cutOffTimePlus(-theAdjustment);
|
||||
}
|
||||
|
||||
private Callable<String> buildResourceAndCreateCallable() {
|
||||
return () -> {
|
||||
|
||||
Identifier identifier = new Identifier().setValue("20210427133226.444+0800");
|
||||
Observation obs = new Observation().addIdentifier(identifier);
|
||||
|
||||
RequestDetails requestDetails = new SystemRequestDetails();
|
||||
requestDetails.setRetry(true);
|
||||
requestDetails.setMaxRetries(3);
|
||||
|
||||
try {
|
||||
ourLog.info("Creating resource");
|
||||
DaoMethodOutcome outcome = myObservationDao.create(obs, "identifier=20210427133226.444+0800", requestDetails);
|
||||
} catch (Throwable t) {
|
||||
ourLog.info("create threw an exception {}", t.getMessage());
|
||||
fail();
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* PointcutLatch that will force an executing thread to wait (block) until being notified.
|
||||
*
|
||||
* This class can be used to replicate race conditions. It provides a mechanism to block a predefined number of
|
||||
* executing threads at a pointcut. When all expected threads have reached the pointcut, the race condition is
|
||||
* created by invoking the {@link #doNotifyAll()} method that will mark all waiting threads as being ready for execution.
|
||||
*/
|
||||
public static class ThreadGaterPointcutLatch extends PointcutLatch {
|
||||
public ThreadGaterPointcutLatch(String theName) {
|
||||
super(theName);
|
||||
}
|
||||
|
||||
public void invoke(IPointcut thePointcut, HookParams theArgs) {
|
||||
doInvoke(thePointcut, theArgs);
|
||||
}
|
||||
|
||||
private synchronized void doInvoke(IPointcut thePointcut, HookParams theArgs){
|
||||
super.invoke(thePointcut, theArgs);
|
||||
try {
|
||||
String threadName = Thread.currentThread().getName();
|
||||
ourLog.info(String.format("I'm thread %s and i'll going to sleep", threadName));
|
||||
wait(10*1000);
|
||||
ourLog.info(String.format("I'm thread %s and i'm waking up", threadName));
|
||||
} catch (InterruptedException theE) {
|
||||
throw new RuntimeException(theE);
|
||||
}
|
||||
}
|
||||
|
||||
public synchronized void doNotifyAll(){
|
||||
notifyAll();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class ResourceConcurrentSubmitterSvc{
|
||||
ExecutorService myPool;
|
||||
List<Future<String>> myFutures = new ArrayList<>();
|
||||
public List<String> waitForThreadsCompletionAndReturnErrors() throws ExecutionException, InterruptedException {
|
||||
|
||||
List<String> errorList = new ArrayList<>();
|
||||
|
||||
for (Future<String> next : myFutures) {
|
||||
String nextError = next.get();
|
||||
if (StringUtils.isNotBlank(nextError)) {
|
||||
errorList.add(nextError);
|
||||
}
|
||||
}
|
||||
return errorList;
|
||||
}
|
||||
|
||||
private ExecutorService getExecutorServicePool(){
|
||||
if(Objects.isNull(myPool)){
|
||||
int maxThreadsUsed = TestR4Config.ourMaxThreads - 1;
|
||||
myPool = Executors.newFixedThreadPool(Math.min(maxThreadsUsed, 5));
|
||||
}
|
||||
|
||||
return myPool;
|
||||
}
|
||||
|
||||
public void shutDown(){
|
||||
getExecutorServicePool().shutdown();
|
||||
}
|
||||
|
||||
public void submitResource(Callable<String> theResourceRunnable) {
|
||||
Future<String> future = getExecutorServicePool().submit(theResourceRunnable);
|
||||
myFutures.add(future);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -9,13 +9,14 @@ import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantity;
|
|||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantityNormalized;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceSearchUrlEntity;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.model.util.UcumServiceUtil;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.jpa.test.config.TestR4Config;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.param.QuantityParam;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
|
@ -27,6 +28,7 @@ import ca.uhn.fhir.util.BundleBuilder;
|
|||
import ca.uhn.fhir.util.ClasspathUtil;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.exparity.hamcrest.date.DateMatchers;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
|
@ -64,11 +66,15 @@ import java.util.concurrent.Executors;
|
|||
import java.util.concurrent.Future;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.time.temporal.ChronoUnit.SECONDS;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.matchesPattern;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
@ -393,6 +399,29 @@ public class FhirResourceDaoR4CreateTest extends BaseJpaR4Test {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateResource_withConditionalCreate_willAddSearchUrlEntity(){
|
||||
// given
|
||||
String identifierCode = "20210427133226.4440+800";
|
||||
String matchUrl = "identifier=" + identifierCode;
|
||||
Observation obs = new Observation();
|
||||
obs.addIdentifier().setValue(identifierCode);
|
||||
// when
|
||||
DaoMethodOutcome outcome = myObservationDao.create(obs, matchUrl, new SystemRequestDetails());
|
||||
|
||||
// then
|
||||
Long expectedResId = outcome.getId().getIdPartAsLong();
|
||||
String expectedNormalizedMatchUrl = obs.fhirType() + "?" + StringUtils.replace(matchUrl, "+", "%2B");
|
||||
|
||||
assertTrue(outcome.getCreated());
|
||||
ResourceSearchUrlEntity searchUrlEntity = myResourceSearchUrlDao.findAll().get(0);
|
||||
assertThat(searchUrlEntity, is(notNullValue()) );
|
||||
assertThat(searchUrlEntity.getResourcePid(), equalTo(expectedResId));
|
||||
assertThat(searchUrlEntity.getCreatedTime(), DateMatchers.within(1, SECONDS, new Date()));
|
||||
assertThat(searchUrlEntity.getSearchUrl(), equalTo(expectedNormalizedMatchUrl));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateResourceWithKoreanText() throws IOException {
|
||||
String input = ClasspathUtil.loadResource("/r4/bug832-korean-text.xml");
|
||||
|
|
|
@ -2,15 +2,18 @@ package ca.uhn.fhir.jpa.dao.r4;
|
|||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Organization;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
|
@ -18,6 +21,10 @@ import org.junit.jupiter.api.Test;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
|
@ -76,7 +83,6 @@ public class FhirResourceDaoR4DeleteTest extends BaseJpaR4Test {
|
|||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testDeleteDisabled() {
|
||||
myStorageSettings.setDeleteEnabled(false);
|
||||
|
@ -93,7 +99,6 @@ public class FhirResourceDaoR4DeleteTest extends BaseJpaR4Test {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testDeleteCircularReferenceInTransaction() {
|
||||
|
||||
|
@ -174,7 +179,6 @@ public class FhirResourceDaoR4DeleteTest extends BaseJpaR4Test {
|
|||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testResourceIsConsideredDeletedIfOnlyResourceTableEntryIsDeleted() {
|
||||
|
||||
|
@ -216,13 +220,21 @@ public class FhirResourceDaoR4DeleteTest extends BaseJpaR4Test {
|
|||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testDeleteIgnoreReferentialIntegrityForPaths() {
|
||||
|
||||
public void testDeleteResourceCreatedWithConditionalUrl_willRemoveEntryInSearchUrlTable() {
|
||||
String identifierCode = "20210427133226.4440+800";
|
||||
String matchUrl = "identifier=20210427133226.4440+800";
|
||||
Observation obs = new Observation();
|
||||
obs.addIdentifier().setValue(identifierCode);
|
||||
IIdType firstObservationId = myObservationDao.create(obs, matchUrl, new SystemRequestDetails()).getId();
|
||||
assertThat(myResourceSearchUrlDao.findAll(), hasSize(1));
|
||||
|
||||
// when
|
||||
myObservationDao.delete(obs.getIdElement(), mySrd);
|
||||
DaoMethodOutcome daoMethodOutcome = myObservationDao.create(obs, matchUrl, new SystemRequestDetails());
|
||||
|
||||
// then
|
||||
assertThat(daoMethodOutcome.getCreated(), equalTo(Boolean.TRUE));
|
||||
assertThat(firstObservationId.getIdPart(), not(equalTo(daoMethodOutcome.getId())));
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -133,7 +133,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
|
||||
assertThat(myCaptureQueriesListener.getInsertQueriesForCurrentThread(), empty());
|
||||
assertThat(myCaptureQueriesListener.getDeleteQueriesForCurrentThread(), empty());
|
||||
assertEquals(1, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
||||
}
|
||||
|
||||
|
||||
|
@ -159,7 +159,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
||||
assertEquals(1, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logDeleteQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
||||
assertEquals(1, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -178,7 +178,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.clear();
|
||||
group = updateGroup(group, patientList.subList(initialPatientsCount, allPatientsCount));
|
||||
|
||||
assertQueryCount(10, 1, 2, 0);
|
||||
assertQueryCount(10, 1, 2, 1);
|
||||
|
||||
assertEquals(allPatientsCount, group.getMember().size());
|
||||
|
||||
|
@ -200,7 +200,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
group = updateGroup(group, Collections.emptyList());
|
||||
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertQueryCount(5, 1, 2, 0);
|
||||
assertQueryCount(5, 1, 2, 1);
|
||||
|
||||
assertEquals(30, group.getMember().size());
|
||||
|
||||
|
@ -237,7 +237,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
||||
assertEquals(1, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logDeleteQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
||||
assertEquals(1, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
||||
}
|
||||
|
||||
|
||||
|
@ -543,7 +543,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
||||
assertEquals(1, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logDeleteQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
||||
assertEquals(1, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
||||
|
||||
// Third time (caches all loaded by now)
|
||||
|
||||
|
@ -1426,7 +1426,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
assertEquals(2, myCaptureQueriesListener.countInsertQueries());
|
||||
myCaptureQueriesListener.logUpdateQueries();
|
||||
assertEquals(4, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
assertEquals(3, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
/*
|
||||
* Third time with mass ingestion mode enabled
|
||||
|
@ -1442,7 +1442,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
assertEquals(2, myCaptureQueriesListener.countInsertQueries());
|
||||
myCaptureQueriesListener.logUpdateQueries();
|
||||
assertEquals(4, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
assertEquals(1, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
}
|
||||
|
||||
|
@ -1509,7 +1509,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
assertEquals(7, myCaptureQueriesListener.countInsertQueries());
|
||||
myCaptureQueriesListener.logUpdateQueries();
|
||||
assertEquals(4, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
assertEquals(3, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
/*
|
||||
* Third time with mass ingestion mode enabled
|
||||
|
@ -1525,7 +1525,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
assertEquals(5, myCaptureQueriesListener.countInsertQueries());
|
||||
myCaptureQueriesListener.logUpdateQueries();
|
||||
assertEquals(4, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
assertEquals(1, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
}
|
||||
|
||||
|
@ -1785,7 +1785,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
|
||||
myCaptureQueriesListener.logInsertQueries();
|
||||
assertEquals(40, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(45, myCaptureQueriesListener.countInsertQueries());
|
||||
myCaptureQueriesListener.logUpdateQueries();
|
||||
assertEquals(4, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
@ -1803,7 +1803,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
assertEquals(4, myCaptureQueriesListener.countInsertQueries());
|
||||
myCaptureQueriesListener.logUpdateQueries();
|
||||
assertEquals(8, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
assertEquals(4, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
/*
|
||||
* Third time with mass ingestion mode enabled
|
||||
|
@ -1863,8 +1863,9 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
|
||||
myCaptureQueriesListener.clear();
|
||||
mySystemDao.transaction(mySrd, bundleCreator.get());
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(8, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(9, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
runInTransaction(() -> {
|
||||
|
@ -1926,7 +1927,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
mySystemDao.transaction(mySrd, bundleCreator.get());
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(8, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(9, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
runInTransaction(() -> {
|
||||
|
@ -2040,7 +2041,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
assertEquals(2, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
assertEquals(1, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
|
||||
|
||||
}
|
||||
|
@ -2834,7 +2835,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.clear();
|
||||
mySystemDao.transaction(new SystemRequestDetails(), supplier.get());
|
||||
assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(28, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(30, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
|
||||
|
@ -2844,7 +2845,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(7, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
assertEquals(3, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
}
|
||||
|
||||
|
||||
|
@ -2875,7 +2876,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
mySystemDao.transaction(new SystemRequestDetails(), loadResourceFromClasspath(Bundle.class, "r4/transaction-perf-bundle.json"));
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(123, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(125, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||
assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
|
@ -2887,7 +2888,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(2, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(6, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
assertEquals(7, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
|
||||
}
|
||||
|
||||
|
@ -2908,7 +2909,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myObservationDao.delete(idDt, mySrd);
|
||||
|
||||
// then
|
||||
assertQueryCount(3, 1, 1, 1);
|
||||
assertQueryCount(3, 1, 1, 2);
|
||||
}
|
||||
|
||||
private void printQueryCount() {
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
|
|
|
@ -1146,7 +1146,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
|
||||
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(3, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
assertEquals(1, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -1216,7 +1216,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(4, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
assertEquals(1, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
runInTransaction(() -> {
|
||||
assertEquals(1, myResourceTableDao.count());
|
||||
|
@ -1278,7 +1278,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
pt.getNameFirstRep().addGiven("GIVEN1C");
|
||||
myPatientDao.update(pt);
|
||||
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
assertEquals(1, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
}
|
||||
|
||||
|
@ -1307,7 +1307,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
pt.addName().setFamily("FAMILY2");
|
||||
myPatientDao.update(pt);
|
||||
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
assertEquals(1, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); // Add an entry to HFJ_RES_VER
|
||||
assertEquals(4, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); // Update SPIDX_STRING and HFJ_RESOURCE
|
||||
|
||||
|
@ -1355,7 +1355,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
*/
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(3, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
assertEquals(1, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); // Add an entry to HFJ_RES_VER
|
||||
assertEquals(2, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); // Update SPIDX_STRING and HFJ_RESOURCE
|
||||
|
||||
|
@ -1413,7 +1413,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
pt.getManagingOrganization().setReference(orgId2.getValue());
|
||||
myPatientDao.update(pt);
|
||||
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
assertEquals(1, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); // Add an entry to HFJ_RES_VER
|
||||
assertEquals(2, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); // Update SPIDX_STRING and HFJ_RESOURCE
|
||||
|
||||
|
|
|
@ -12,6 +12,7 @@ import ca.uhn.fhir.jpa.util.TestUtil;
|
|||
import ca.uhn.fhir.model.primitive.InstantDt;
|
||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
||||
|
@ -26,6 +27,7 @@ import org.hl7.fhir.r4.model.ContactPoint;
|
|||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.InstantType;
|
||||
import org.hl7.fhir.r4.model.Meta;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Organization;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Resource;
|
||||
|
@ -46,6 +48,7 @@ import static org.hamcrest.Matchers.contains;
|
|||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.endsWith;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.matchesPattern;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
@ -592,6 +595,24 @@ public class FhirResourceDaoR4UpdateTest extends BaseJpaR4Test {
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUpdateResourceCreatedWithConditionalUrl_willRemoveEntryInSearchUrlTable(){
|
||||
String identifierCode = "20210427133226.4440+800";
|
||||
String matchUrl = "identifier=20210427133226.4440+800";
|
||||
Observation obs = new Observation();
|
||||
obs.addIdentifier().setValue(identifierCode);
|
||||
myObservationDao.create(obs, matchUrl, new SystemRequestDetails());
|
||||
assertThat(myResourceSearchUrlDao.findAll(), hasSize(1));
|
||||
|
||||
// when
|
||||
obs.setStatus(Observation.ObservationStatus.CORRECTED);
|
||||
myObservationDao.update(obs, mySrd);
|
||||
|
||||
// then
|
||||
assertThat(myResourceSearchUrlDao.findAll(), hasSize(0));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUpdateWithoutId() {
|
||||
|
||||
|
|
|
@ -2766,7 +2766,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertThat(myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false), containsString("resourcein0_.HASH_SYS_AND_VALUE='-4132452001562191669' and (resourcein0_.PARTITION_ID in ('1'))"));
|
||||
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
||||
assertEquals(40, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(45, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||
assertEquals(4, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
|
@ -2784,7 +2784,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||
assertEquals(8, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
assertEquals(4, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
|
||||
/*
|
||||
* Third time with mass ingestion mode enabled
|
||||
|
|
|
@ -224,6 +224,7 @@ public class AuthorizationInterceptorJpaR4Test extends BaseResourceProviderR4Tes
|
|||
}
|
||||
}).andThen()
|
||||
.allow().createConditional().resourcesOfType("Patient").andThen()
|
||||
.allow().read().resourcesOfType("Patient").withAnyId().andThen()
|
||||
.allow().transaction().withAnyOperation().andApplyNormalRules().andThen()
|
||||
.build();
|
||||
}
|
||||
|
|
|
@ -379,7 +379,6 @@ public abstract class BaseJpaR4BTest extends BaseJpaTest implements ITestDataBui
|
|||
@Autowired
|
||||
private IBulkDataExportJobSchedulingHelper myBulkDataSchedulerHelper;
|
||||
|
||||
|
||||
@Override
|
||||
public IIdType doCreateResource(IBaseResource theResource) {
|
||||
IFhirResourceDao dao = myDaoRegistry.getResourceDao(theResource.getClass());
|
||||
|
|
|
@ -53,6 +53,7 @@ import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamQuantityDao;
|
|||
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamQuantityNormalizedDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamStringDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceReindexJobDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceSearchUrlDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
||||
|
@ -103,7 +104,6 @@ import ca.uhn.fhir.rest.api.Constants;
|
|||
import ca.uhn.fhir.rest.api.EncodingEnum;
|
||||
import ca.uhn.fhir.rest.server.BasePagingProvider;
|
||||
import ca.uhn.fhir.rest.server.provider.ResourceProviderFactory;
|
||||
import ca.uhn.fhir.storage.test.DaoTestDataBuilder;
|
||||
import ca.uhn.fhir.test.utilities.ITestDataBuilder;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import ca.uhn.fhir.validation.FhirValidator;
|
||||
|
@ -517,6 +517,8 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
|
|||
private PerformanceTracingLoggingInterceptor myPerformanceTracingLoggingInterceptor;
|
||||
@Autowired
|
||||
private IBulkDataExportJobSchedulingHelper myBulkDataScheduleHelper;
|
||||
@Autowired
|
||||
protected IResourceSearchUrlDao myResourceSearchUrlDao;
|
||||
|
||||
@RegisterExtension
|
||||
private final PreventDanglingInterceptorsExtension myPreventDanglingInterceptorsExtension = new PreventDanglingInterceptorsExtension(()-> myInterceptorRegistry);
|
||||
|
|
|
@ -156,7 +156,6 @@ public abstract class BaseValueSetHSearchExpansionR4Test extends BaseJpaTest {
|
|||
@Mock
|
||||
private IValueSetConceptAccumulator myValueSetCodeAccumulator;
|
||||
|
||||
|
||||
@BeforeEach
|
||||
public void beforeEach() {
|
||||
when(mySrd.getUserData().getOrDefault(MAKE_LOADING_VERSION_CURRENT, Boolean.TRUE)).thenReturn(Boolean.TRUE);
|
||||
|
|
|
@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.config;
|
|||
|
||||
import ca.uhn.fhir.i18n.HapiLocalizer;
|
||||
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceSearchUrlEntity;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.PersistentObjectException;
|
||||
|
@ -10,13 +11,15 @@ import org.hibernate.exception.ConstraintViolationException;
|
|||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
|
||||
import javax.persistence.PersistenceException;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
public class HapiFhirHibernateJpaDialectTest {
|
||||
|
||||
|
@ -46,6 +49,13 @@ public class HapiFhirHibernateJpaDialectTest {
|
|||
assertThat(e.getMessage(), containsString("The operation has failed with a version constraint failure"));
|
||||
}
|
||||
|
||||
try {
|
||||
mySvc.convertHibernateAccessException(new ConstraintViolationException("this is a message", new SQLException("reason"), ResourceSearchUrlEntity.RES_SEARCH_URL_COLUMN_NAME));
|
||||
fail();
|
||||
} catch (DataIntegrityViolationException e) {
|
||||
assertThat(e.getMessage(), containsString(ResourceSearchUrlEntity.RES_SEARCH_URL_COLUMN_NAME));
|
||||
}
|
||||
|
||||
outcome = mySvc.convertHibernateAccessException(new HibernateException("this is a message"));
|
||||
assertThat(outcome.getMessage(), containsString("HibernateException: this is a message"));
|
||||
|
||||
|
|
|
@ -33,6 +33,7 @@ import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
|||
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
|
||||
import ca.uhn.fhir.rest.server.interceptor.consent.ConsentInterceptor;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
|
@ -52,9 +53,13 @@ import java.util.Collections;
|
|||
import java.util.HashSet;
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import static java.util.Objects.isNull;
|
||||
import static java.util.Objects.nonNull;
|
||||
import static org.apache.commons.lang3.StringUtils.EMPTY;
|
||||
import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
|
@ -136,9 +141,12 @@ public class AuthorizationInterceptor implements IRuleApplier {
|
|||
theRequestDetails.getUserData().put(myRequestRuleListKey, rules);
|
||||
}
|
||||
Set<AuthorizationFlagsEnum> flags = getFlags();
|
||||
ourLog.trace("Applying {} rules to render an auth decision for operation {}, theInputResource type={}, theOutputResource type={} ", rules.size(), theOperation,
|
||||
((theInputResource != null) && (theInputResource.getIdElement() != null)) ? theInputResource.getIdElement().getResourceType() : "",
|
||||
((theOutputResource != null) && (theOutputResource.getIdElement() != null)) ? theOutputResource.getIdElement().getResourceType() : "");
|
||||
|
||||
ourLog.trace("Applying {} rules to render an auth decision for operation {}, theInputResource type={}, theOutputResource type={}, thePointcut={} ",
|
||||
rules.size(),
|
||||
getPointcutNameOrEmpty(thePointcut),
|
||||
getResourceTypeOrEmpty(theInputResource),
|
||||
getResourceTypeOrEmpty(theOutputResource));
|
||||
|
||||
Verdict verdict = null;
|
||||
for (IAuthRule nextRule : rules) {
|
||||
|
@ -558,4 +566,26 @@ public class AuthorizationInterceptor implements IRuleApplier {
|
|||
|
||||
}
|
||||
|
||||
private Object getPointcutNameOrEmpty(Pointcut thePointcut) {
|
||||
return nonNull(thePointcut) ? thePointcut.name() : EMPTY;
|
||||
}
|
||||
|
||||
private String getResourceTypeOrEmpty(IBaseResource theResource){
|
||||
String retVal = StringUtils.EMPTY;
|
||||
|
||||
if(isNull(theResource)){
|
||||
return retVal;
|
||||
}
|
||||
|
||||
if(isNull(theResource.getIdElement())){
|
||||
return retVal;
|
||||
}
|
||||
|
||||
if(isNull(theResource.getIdElement().getResourceType())){
|
||||
return retVal;
|
||||
}
|
||||
|
||||
return theResource.getIdElement().getResourceType();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
package ca.uhn.fhir.jpa.api.svc;
|
||||
|
||||
public interface ISearchUrlJobMaintenanceSvc {
|
||||
void removeStaleEntries();
|
||||
}
|
Loading…
Reference in New Issue