Merging master into working branch.
# Conflicts: # hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptDesignation.java # hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
This commit is contained in:
commit
67d24eedff
|
@ -182,4 +182,13 @@ public class TestUtil {
|
|||
return defaultString(theString).replace("\r", "");
|
||||
}
|
||||
|
||||
/**
|
||||
* <b>THIS IS FOR UNIT TESTS ONLY - DO NOT CALL THIS METHOD FROM USER CODE</b>
|
||||
* <p>
|
||||
* Strip \r chars from a string to account for line ending platform differences
|
||||
*/
|
||||
public static String stripWhitespace(String theString) {
|
||||
return stripReturns(theString).replace(" ", "");
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -14,6 +14,10 @@ import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider;
|
|||
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
|
||||
import ca.uhn.fhir.jpa.search.IStaleSearchDeletingSvc;
|
||||
import ca.uhn.fhir.jpa.search.StaleSearchDeletingSvcImpl;
|
||||
import ca.uhn.fhir.jpa.search.cache.DatabaseSearchCacheSvcImpl;
|
||||
import ca.uhn.fhir.jpa.search.cache.DatabaseSearchResultCacheSvcImpl;
|
||||
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
|
||||
import ca.uhn.fhir.jpa.search.cache.ISearchResultCacheSvc;
|
||||
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
||||
import ca.uhn.fhir.jpa.search.reindex.ResourceReindexingSvcImpl;
|
||||
import ca.uhn.fhir.jpa.subscription.dbmatcher.CompositeInMemoryDaoSubscriptionMatcher;
|
||||
|
@ -143,6 +147,16 @@ public abstract class BaseConfig implements SchedulingConfigurer {
|
|||
return new BinaryStorageInterceptor();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ISearchCacheSvc searchCacheSvc() {
|
||||
return new DatabaseSearchCacheSvcImpl();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ISearchResultCacheSvc searchResultCacheSvc() {
|
||||
return new DatabaseSearchResultCacheSvcImpl();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public TaskScheduler taskScheduler() {
|
||||
ConcurrentTaskScheduler retVal = new ConcurrentTaskScheduler();
|
||||
|
|
|
@ -19,6 +19,8 @@ import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
|
|||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.search.ISearchCoordinatorSvc;
|
||||
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
|
||||
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
|
||||
import ca.uhn.fhir.jpa.search.cache.ISearchResultCacheSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.ResourceMetaParams;
|
||||
import ca.uhn.fhir.jpa.searchparam.extractor.LogicalReferenceHelper;
|
||||
import ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams;
|
||||
|
@ -156,7 +158,9 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
|||
@Autowired
|
||||
private PlatformTransactionManager myPlatformTransactionManager;
|
||||
@Autowired
|
||||
private ISearchDao mySearchDao;
|
||||
private ISearchCacheSvc mySearchCacheSvc;
|
||||
@Autowired
|
||||
private ISearchResultCacheSvc mySearchResultCacheSvc;
|
||||
@Autowired
|
||||
private ISearchParamPresenceSvc mySearchParamPresenceSvc;
|
||||
@Autowired
|
||||
|
@ -463,7 +467,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
|||
}
|
||||
}
|
||||
|
||||
search = mySearchDao.save(search);
|
||||
search = mySearchCacheSvc.save(search);
|
||||
|
||||
return new PersistedJpaBundleProvider(theRequest, search.getUuid(), this);
|
||||
}
|
||||
|
@ -489,7 +493,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
|||
theProvider.setContext(getContext());
|
||||
theProvider.setEntityManager(myEntityManager);
|
||||
theProvider.setPlatformTransactionManager(myPlatformTransactionManager);
|
||||
theProvider.setSearchDao(mySearchDao);
|
||||
theProvider.setSearchCacheSvc(mySearchCacheSvc);
|
||||
theProvider.setSearchCoordinatorSvc(mySearchCoordinatorSvc);
|
||||
theProvider.setInterceptorBroadcaster(myInterceptorBroadcaster);
|
||||
}
|
||||
|
|
|
@ -54,7 +54,6 @@ import ca.uhn.fhir.validation.*;
|
|||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.instance.model.api.*;
|
||||
import org.hl7.fhir.r4.model.InstantType;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
@ -72,6 +71,7 @@ import javax.annotation.PostConstruct;
|
|||
import javax.persistence.NoResultException;
|
||||
import javax.persistence.TypedQuery;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import javax.validation.constraints.NotNull;
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
|
||||
|
|
|
@ -44,7 +44,10 @@ import org.springframework.transaction.annotation.Propagation;
|
|||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.util.*;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Note that this interface is not considered a stable interface. While it is possible to build applications
|
||||
|
|
|
@ -24,7 +24,6 @@ import ca.uhn.fhir.jpa.util.ExpungeOptions;
|
|||
import ca.uhn.fhir.jpa.util.ExpungeOutcome;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import com.google.common.annotations.Beta;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
||||
|
|
|
@ -10,6 +10,7 @@ import org.springframework.data.repository.query.Param;
|
|||
|
||||
import java.util.Collection;
|
||||
import java.util.Date;
|
||||
import java.util.Optional;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
|
@ -33,15 +34,12 @@ import java.util.Date;
|
|||
|
||||
public interface ISearchDao extends JpaRepository<Search, Long> {
|
||||
|
||||
@Query("SELECT s FROM Search s WHERE s.myUuid = :uuid")
|
||||
Search findByUuid(@Param("uuid") String theUuid);
|
||||
@Query("SELECT s FROM Search s LEFT OUTER JOIN FETCH s.myIncludes WHERE s.myUuid = :uuid")
|
||||
Optional<Search> findByUuidAndFetchIncludes(@Param("uuid") String theUuid);
|
||||
|
||||
@Query("SELECT s.myId FROM Search s WHERE s.mySearchLastReturned < :cutoff")
|
||||
Slice<Long> findWhereLastReturnedBefore(@Param("cutoff") Date theCutoff, Pageable thePage);
|
||||
|
||||
// @SqlQuery("SELECT s FROM Search s WHERE s.myCreated < :cutoff")
|
||||
// public Collection<Search> findWhereCreatedBefore(@Param("cutoff") Date theCutoff);
|
||||
|
||||
@Query("SELECT s FROM Search s WHERE s.myResourceType = :type AND mySearchQueryStringHash = :hash AND s.myCreated > :cutoff AND s.myDeleted = false")
|
||||
Collection<Search> find(@Param("type") String theResourceType, @Param("hash") int theHashCode, @Param("cutoff") Date theCreatedCutoff);
|
||||
|
||||
|
|
|
@ -20,13 +20,12 @@ package ca.uhn.fhir.jpa.dao.data;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.SearchInclude;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Modifying;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.SearchInclude;
|
||||
|
||||
public interface ISearchIncludeDao extends JpaRepository<SearchInclude, Long> {
|
||||
|
||||
@Modifying
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
package ca.uhn.fhir.jpa.dao.data;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.entity.SearchResult;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
|
@ -10,9 +8,7 @@ import org.springframework.data.jpa.repository.Modifying;
|
|||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
|
@ -36,19 +32,15 @@ import java.util.Set;
|
|||
|
||||
public interface ISearchResultDao extends JpaRepository<SearchResult, Long> {
|
||||
|
||||
@Query(value="SELECT r.myResourcePid FROM SearchResult r WHERE r.mySearch = :search ORDER BY r.myOrder ASC")
|
||||
Page<Long> findWithSearchUuid(@Param("search") Search theSearch, Pageable thePage);
|
||||
@Query(value="SELECT r.myResourcePid FROM SearchResult r WHERE r.mySearchPid = :search ORDER BY r.myOrder ASC")
|
||||
Slice<Long> findWithSearchPid(@Param("search") Long theSearchPid, Pageable thePage);
|
||||
|
||||
@Query(value="SELECT r.myResourcePid FROM SearchResult r WHERE r.mySearch = :search")
|
||||
List<Long> findWithSearchUuidOrderIndependent(@Param("search") Search theSearch);
|
||||
@Query(value="SELECT r.myResourcePid FROM SearchResult r WHERE r.mySearchPid = :search")
|
||||
List<Long> findWithSearchPidOrderIndependent(@Param("search") Long theSearchPid);
|
||||
|
||||
@Query(value="SELECT r.myId FROM SearchResult r WHERE r.mySearchPid = :search")
|
||||
Slice<Long> findForSearch(Pageable thePage, @Param("search") Long theSearchPid);
|
||||
|
||||
@Modifying
|
||||
@Query("DELETE FROM SearchResult s WHERE s.myResourcePid IN :ids")
|
||||
void deleteByResourceIds(@Param("ids") List<Long> theContent);
|
||||
|
||||
@Modifying
|
||||
@Query("DELETE FROM SearchResult s WHERE s.myId IN :ids")
|
||||
void deleteByIds(@Param("ids") List<Long> theContent);
|
||||
|
|
|
@ -84,7 +84,6 @@ public class ExpungeOperation implements Callable<ExpungeOutcome> {
|
|||
private void expungeDeletedResources() {
|
||||
Slice<Long> resourceIds = findHistoricalVersionsOfDeletedResources();
|
||||
|
||||
deleteSearchResultCacheEntries(resourceIds);
|
||||
deleteHistoricalVersions(resourceIds);
|
||||
if (expungeLimitReached()) {
|
||||
return;
|
||||
|
@ -123,10 +122,6 @@ public class ExpungeOperation implements Callable<ExpungeOutcome> {
|
|||
myPartitionRunner.runInPartitionedThreads(theResourceIds, partition -> myExpungeDaoService.expungeHistoricalVersionsOfIds(myRequestDetails, partition, myRemainingCount));
|
||||
}
|
||||
|
||||
private void deleteSearchResultCacheEntries(Slice<Long> theResourceIds) {
|
||||
myPartitionRunner.runInPartitionedThreads(theResourceIds, partition -> myExpungeDaoService.deleteByResourceIdPartitions(partition));
|
||||
}
|
||||
|
||||
private ExpungeOutcome expungeOutcome() {
|
||||
return new ExpungeOutcome().setDeletedCount(myExpungeOptions.getLimit() - myRemainingCount.get());
|
||||
}
|
||||
|
|
|
@ -37,7 +37,5 @@ public interface IResourceExpungeService {
|
|||
|
||||
void expungeHistoricalVersionsOfIds(RequestDetails theRequestDetails, List<Long> thePartition, AtomicInteger theRemainingCount);
|
||||
|
||||
void deleteByResourceIdPartitions(List<Long> thePartition);
|
||||
|
||||
void deleteAllSearchParams(Long theResourceId);
|
||||
}
|
||||
|
|
|
@ -58,8 +58,6 @@ class ResourceExpungeService implements IResourceExpungeService {
|
|||
@Autowired
|
||||
private IResourceTableDao myResourceTableDao;
|
||||
@Autowired
|
||||
private ISearchResultDao mySearchResultDao;
|
||||
@Autowired
|
||||
private IResourceHistoryTableDao myResourceHistoryTableDao;
|
||||
@Autowired
|
||||
private IResourceIndexedSearchParamUriDao myResourceIndexedSearchParamUriDao;
|
||||
|
@ -250,12 +248,6 @@ class ResourceExpungeService implements IResourceExpungeService {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public void deleteByResourceIdPartitions(List<Long> theResourceIds) {
|
||||
mySearchResultDao.deleteByResourceIds(theResourceIds);
|
||||
}
|
||||
|
||||
private Slice<Long> toSlice(ResourceHistoryTable myVersion) {
|
||||
Validate.notNull(myVersion);
|
||||
return new SliceImpl<>(Collections.singletonList(myVersion.getId()));
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
package ca.uhn.fhir.jpa.entity;
|
||||
|
||||
import ca.uhn.fhir.rest.server.util.ICachedSearchDetails;
|
||||
import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.model.api.Include;
|
||||
import ca.uhn.fhir.rest.param.DateRangeParam;
|
||||
import ca.uhn.fhir.rest.server.util.ICachedSearchDetails;
|
||||
import org.apache.commons.lang3.SerializationUtils;
|
||||
import org.hibernate.annotations.OptimisticLock;
|
||||
|
||||
|
@ -80,8 +80,6 @@ public class Search implements ICachedSearchDetails, Serializable {
|
|||
private Long myResourceId;
|
||||
@Column(name = "RESOURCE_TYPE", length = 200, nullable = true)
|
||||
private String myResourceType;
|
||||
@OneToMany(mappedBy = "mySearch", fetch = FetchType.LAZY)
|
||||
private Collection<SearchResult> myResults;
|
||||
@NotNull
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
@Column(name = "SEARCH_LAST_RETURNED", nullable = false, updatable = false)
|
||||
|
|
|
@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.entity;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
|
||||
import javax.persistence.*;
|
||||
|
@ -39,17 +39,11 @@ public class SearchResult implements Serializable {
|
|||
@Id
|
||||
@Column(name = "PID")
|
||||
private Long myId;
|
||||
@Column(name = "SEARCH_ORDER", nullable = false)
|
||||
@Column(name = "SEARCH_ORDER", nullable = false, insertable = true, updatable = false)
|
||||
private int myOrder;
|
||||
@ManyToOne
|
||||
@JoinColumn(name = "RESOURCE_PID", referencedColumnName = "RES_ID", foreignKey = @ForeignKey(name = "FK_SEARCHRES_RES"), insertable = false, updatable = false, nullable = false)
|
||||
private ResourceTable myResource;
|
||||
@Column(name = "RESOURCE_PID", insertable = true, updatable = false, nullable = false)
|
||||
private Long myResourcePid;
|
||||
@ManyToOne
|
||||
@JoinColumn(name = "SEARCH_PID", referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_SEARCHRES_SEARCH"))
|
||||
private Search mySearch;
|
||||
@Column(name = "SEARCH_PID", insertable = false, updatable = false, nullable = false)
|
||||
@Column(name = "SEARCH_PID", insertable = true, updatable = false, nullable = false)
|
||||
private Long mySearchPid;
|
||||
|
||||
/**
|
||||
|
@ -63,7 +57,8 @@ public class SearchResult implements Serializable {
|
|||
* Constructor
|
||||
*/
|
||||
public SearchResult(Search theSearch) {
|
||||
mySearch = theSearch;
|
||||
Validate.notNull(theSearch.getId());
|
||||
mySearchPid = theSearch.getId();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -31,12 +31,10 @@ import java.util.Collection;
|
|||
|
||||
import static org.apache.commons.lang3.StringUtils.length;
|
||||
|
||||
//@formatter:off
|
||||
@Table(name = "TRM_CODESYSTEM_VER"
|
||||
// Note, we used to have a constraint named IDX_CSV_RESOURCEPID_AND_VER (don't reuse this)
|
||||
)
|
||||
@Entity()
|
||||
//@formatter:on
|
||||
public class TermCodeSystemVersion implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
|
|
|
@ -26,11 +26,11 @@ import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
|||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.jpa.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
|
||||
import ca.uhn.fhir.jpa.model.entity.BaseHasResource;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
|
||||
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.model.primitive.InstantDt;
|
||||
import ca.uhn.fhir.rest.api.server.*;
|
||||
|
@ -46,7 +46,6 @@ import org.springframework.transaction.support.TransactionTemplate;
|
|||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.NoResultException;
|
||||
import javax.persistence.TypedQuery;
|
||||
import javax.persistence.criteria.CriteriaBuilder;
|
||||
import javax.persistence.criteria.CriteriaQuery;
|
||||
|
@ -64,7 +63,7 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
|
|||
private EntityManager myEntityManager;
|
||||
private PlatformTransactionManager myPlatformTransactionManager;
|
||||
private ISearchCoordinatorSvc mySearchCoordinatorSvc;
|
||||
private ISearchDao mySearchDao;
|
||||
private ISearchCacheSvc mySearchCacheSvc;
|
||||
private Search mySearchEntity;
|
||||
private String myUuid;
|
||||
private boolean myCacheHit;
|
||||
|
@ -192,27 +191,16 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
|
|||
if (mySearchEntity == null) {
|
||||
ensureDependenciesInjected();
|
||||
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myPlatformTransactionManager);
|
||||
txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRED);
|
||||
txTemplate.setIsolationLevel(TransactionDefinition.ISOLATION_READ_COMMITTED);
|
||||
return txTemplate.execute(s -> {
|
||||
try {
|
||||
setSearchEntity(mySearchDao.findByUuid(myUuid));
|
||||
|
||||
if (mySearchEntity == null) {
|
||||
Optional<Search> search = mySearchCacheSvc.fetchByUuid(myUuid);
|
||||
if (!search.isPresent()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
setSearchEntity(search.get());
|
||||
|
||||
ourLog.trace("Retrieved search with version {} and total {}", mySearchEntity.getVersion(), mySearchEntity.getTotalCount());
|
||||
|
||||
// Load the includes now so that they are available outside of this transaction
|
||||
mySearchEntity.getIncludes().size();
|
||||
|
||||
return true;
|
||||
} catch (NoResultException e) {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -292,10 +280,6 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
|
|||
mySearchCoordinatorSvc = theSearchCoordinatorSvc;
|
||||
}
|
||||
|
||||
public void setSearchDao(ISearchDao theSearchDao) {
|
||||
mySearchDao = theSearchDao;
|
||||
}
|
||||
|
||||
// Note: Leave as protected, HSPC depends on this
|
||||
@SuppressWarnings("WeakerAccess")
|
||||
protected void setSearchEntity(Search theSearchEntity) {
|
||||
|
@ -353,4 +337,8 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
|
|||
public void setInterceptorBroadcaster(IInterceptorBroadcaster theInterceptorBroadcaster) {
|
||||
myInterceptorBroadcaster = theInterceptorBroadcaster;
|
||||
}
|
||||
|
||||
public void setSearchCacheSvc(ISearchCacheSvc theSearchCacheSvc) {
|
||||
mySearchCacheSvc = theSearchCacheSvc;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,16 +25,14 @@ import ca.uhn.fhir.interceptor.api.HookParams;
|
|||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.jpa.dao.*;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchIncludeDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.entity.SearchInclude;
|
||||
import ca.uhn.fhir.jpa.entity.SearchResult;
|
||||
import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
|
||||
import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails;
|
||||
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
|
||||
import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
|
||||
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
|
||||
import ca.uhn.fhir.jpa.search.cache.ISearchResultCacheSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.model.api.Include;
|
||||
|
@ -56,15 +54,12 @@ import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
|||
import ca.uhn.fhir.rest.server.util.ICachedSearchDetails;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.exception.ExceptionUtils;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.AbstractPageRequest;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.orm.jpa.JpaDialect;
|
||||
|
@ -83,7 +78,10 @@ import org.springframework.transaction.support.TransactionTemplate;
|
|||
import javax.annotation.Nullable;
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.validation.constraints.NotNull;
|
||||
import java.io.IOException;
|
||||
import java.time.Instant;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.*;
|
||||
|
||||
|
@ -106,16 +104,14 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
private long myMaxMillisToWaitForRemoteResults = DateUtils.MILLIS_PER_MINUTE;
|
||||
private boolean myNeverUseLocalSearchForUnitTests;
|
||||
@Autowired
|
||||
private ISearchDao mySearchDao;
|
||||
@Autowired
|
||||
private ISearchIncludeDao mySearchIncludeDao;
|
||||
@Autowired
|
||||
private ISearchResultDao mySearchResultDao;
|
||||
@Autowired
|
||||
private IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||
@Autowired
|
||||
private PlatformTransactionManager myManagedTxManager;
|
||||
@Autowired
|
||||
private ISearchCacheSvc mySearchCacheSvc;
|
||||
@Autowired
|
||||
private ISearchResultCacheSvc mySearchResultCacheSvc;
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
@Autowired
|
||||
private IPagingProvider myPagingProvider;
|
||||
|
@ -134,6 +130,12 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
myExecutor = Executors.newCachedThreadPool(threadFactory);
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setSearchCacheServicesForUnitTest(ISearchCacheSvc theSearchCacheSvc, ISearchResultCacheSvc theSearchResultCacheSvc) {
|
||||
mySearchCacheSvc = theSearchCacheSvc;
|
||||
mySearchResultCacheSvc = theSearchResultCacheSvc;
|
||||
}
|
||||
|
||||
@PostConstruct
|
||||
public void start() {
|
||||
if (myManagedTxManager instanceof JpaTransactionManager) {
|
||||
|
@ -184,13 +186,13 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
}
|
||||
}
|
||||
|
||||
search = txTemplate.execute(t -> mySearchDao.findByUuid(theUuid));
|
||||
|
||||
if (search == null) {
|
||||
search = mySearchCacheSvc
|
||||
.fetchByUuid(theUuid)
|
||||
.orElseThrow(() -> {
|
||||
ourLog.debug("Client requested unknown paging ID[{}]", theUuid);
|
||||
String msg = myContext.getLocalizer().getMessage(PageMethodBinding.class, "unknownSearchId", theUuid);
|
||||
throw new ResourceGoneException(msg);
|
||||
}
|
||||
return new ResourceGoneException(msg);
|
||||
});
|
||||
|
||||
verifySearchHasntFailedOrThrowInternalErrorException(search);
|
||||
if (search.getStatus() == SearchStatusEnum.FINISHED) {
|
||||
|
@ -210,7 +212,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
// If the search was saved in "pass complete mode" it's probably time to
|
||||
// start a new pass
|
||||
if (search.getStatus() == SearchStatusEnum.PASSCMPLET) {
|
||||
Optional<Search> newSearch = tryToMarkSearchAsInProgress(search);
|
||||
Optional<Search> newSearch = mySearchCacheSvc.tryToMarkSearchAsInProgress(search);
|
||||
if (newSearch.isPresent()) {
|
||||
search = newSearch.get();
|
||||
String resourceType = search.getResourceType();
|
||||
|
@ -229,60 +231,22 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
}
|
||||
}
|
||||
|
||||
final Pageable page = toPage(theFrom, theTo);
|
||||
if (page == null) {
|
||||
return Collections.emptyList();
|
||||
|
||||
return mySearchResultCacheSvc.fetchResultPids(search, theFrom, theTo);
|
||||
}
|
||||
|
||||
final Search foundSearch = search;
|
||||
|
||||
ourLog.trace("Loading stored search");
|
||||
List<Long> retVal = txTemplate.execute(theStatus -> {
|
||||
final List<Long> resultPids = new ArrayList<>();
|
||||
Page<Long> searchResultPids = mySearchResultDao.findWithSearchUuid(foundSearch, page);
|
||||
for (Long next : searchResultPids) {
|
||||
resultPids.add(next);
|
||||
}
|
||||
return resultPids;
|
||||
});
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private Optional<Search> tryToMarkSearchAsInProgress(Search theSearch) {
|
||||
ourLog.trace("Going to try to change search status from {} to {}", theSearch.getStatus(), SearchStatusEnum.LOADING);
|
||||
try {
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myManagedTxManager);
|
||||
txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
|
||||
txTemplate.afterPropertiesSet();
|
||||
return txTemplate.execute(t -> {
|
||||
Search search = mySearchDao.findById(theSearch.getId()).orElse(theSearch);
|
||||
|
||||
if (search.getStatus() != SearchStatusEnum.PASSCMPLET) {
|
||||
throw new IllegalStateException("Can't change to LOADING because state is " + theSearch.getStatus());
|
||||
}
|
||||
search.setStatus(SearchStatusEnum.LOADING);
|
||||
Search newSearch = mySearchDao.save(search);
|
||||
return Optional.of(newSearch);
|
||||
});
|
||||
} catch (Exception e) {
|
||||
ourLog.warn("Failed to activate search: {}", e.toString());
|
||||
ourLog.trace("Failed to activate search", e);
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
||||
private void populateBundleProvider(PersistedJpaBundleProvider theRetVal) {
|
||||
theRetVal.setContext(myContext);
|
||||
theRetVal.setEntityManager(myEntityManager);
|
||||
theRetVal.setPlatformTransactionManager(myManagedTxManager);
|
||||
theRetVal.setSearchDao(mySearchDao);
|
||||
theRetVal.setSearchCacheSvc(mySearchCacheSvc);
|
||||
theRetVal.setSearchCoordinatorSvc(this);
|
||||
theRetVal.setInterceptorBroadcaster(myInterceptorBroadcaster);
|
||||
}
|
||||
|
||||
@Override
|
||||
public IBundleProvider registerSearch(final IDao theCallingDao, final SearchParameterMap theParams, String theResourceType, CacheControlDirective theCacheControlDirective, RequestDetails theRequestDetails) {
|
||||
StopWatch w = new StopWatch();
|
||||
final String searchUuid = UUID.randomUUID().toString();
|
||||
|
||||
ourLog.debug("Registering new search {}", searchUuid);
|
||||
|
@ -292,24 +256,125 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
sb.setType(resourceTypeClass, theResourceType);
|
||||
sb.setFetchSize(mySyncSize);
|
||||
|
||||
final Integer loadSynchronousUpTo;
|
||||
if (theCacheControlDirective != null && theCacheControlDirective.isNoStore()) {
|
||||
if (theCacheControlDirective.getMaxResults() != null) {
|
||||
loadSynchronousUpTo = theCacheControlDirective.getMaxResults();
|
||||
if (loadSynchronousUpTo > myDaoConfig.getCacheControlNoStoreMaxResultsUpperLimit()) {
|
||||
throw new InvalidRequestException(Constants.HEADER_CACHE_CONTROL + " header " + Constants.CACHE_CONTROL_MAX_RESULTS + " value must not exceed " + myDaoConfig.getCacheControlNoStoreMaxResultsUpperLimit());
|
||||
}
|
||||
} else {
|
||||
loadSynchronousUpTo = 100;
|
||||
}
|
||||
} else {
|
||||
loadSynchronousUpTo = null;
|
||||
}
|
||||
final Integer loadSynchronousUpTo = getLoadSynchronousUpToOrNull(theCacheControlDirective);
|
||||
|
||||
if (theParams.isLoadSynchronous() || loadSynchronousUpTo != null) {
|
||||
|
||||
ourLog.debug("Search {} is loading in synchronous mode", searchUuid);
|
||||
SearchRuntimeDetails searchRuntimeDetails = new SearchRuntimeDetails(theRequestDetails, searchUuid);
|
||||
return executeQuery(theParams, theRequestDetails, searchUuid, sb, loadSynchronousUpTo);
|
||||
}
|
||||
|
||||
/*
|
||||
* See if there are any cached searches whose results we can return
|
||||
* instead
|
||||
*/
|
||||
boolean useCache = true;
|
||||
if (theCacheControlDirective != null && theCacheControlDirective.isNoCache() == true) {
|
||||
useCache = false;
|
||||
}
|
||||
|
||||
final String queryString = theParams.toNormalizedQueryString(myContext);
|
||||
if (theParams.getEverythingMode() == null) {
|
||||
if (myDaoConfig.getReuseCachedSearchResultsForMillis() != null && useCache) {
|
||||
IBundleProvider foundSearchProvider = findCachedQuery(theCallingDao, theParams, theResourceType, theRequestDetails, queryString);
|
||||
if (foundSearchProvider != null) {
|
||||
return foundSearchProvider;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return submitSearch(theCallingDao, theParams, theResourceType, theRequestDetails, searchUuid, sb, queryString);
|
||||
|
||||
}
|
||||
|
||||
@NotNull
|
||||
private IBundleProvider submitSearch(IDao theCallingDao, SearchParameterMap theParams, String theResourceType, RequestDetails theRequestDetails, String theSearchUuid, ISearchBuilder theSb, String theQueryString) {
|
||||
StopWatch w = new StopWatch();
|
||||
Search search = new Search();
|
||||
populateSearchEntity(theParams, theResourceType, theSearchUuid, theQueryString, search);
|
||||
|
||||
// Interceptor call: STORAGE_PRESEARCH_REGISTERED
|
||||
HookParams params = new HookParams()
|
||||
.add(ICachedSearchDetails.class, search)
|
||||
.add(RequestDetails.class, theRequestDetails)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
|
||||
JpaInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PRESEARCH_REGISTERED, params);
|
||||
|
||||
SearchTask task = new SearchTask(search, theCallingDao, theParams, theResourceType, theRequestDetails);
|
||||
myIdToSearchTask.put(search.getUuid(), task);
|
||||
myExecutor.submit(task);
|
||||
|
||||
PersistedJpaSearchFirstPageBundleProvider retVal = new PersistedJpaSearchFirstPageBundleProvider(search, theCallingDao, task, theSb, myManagedTxManager, theRequestDetails);
|
||||
populateBundleProvider(retVal);
|
||||
|
||||
ourLog.debug("Search initial phase completed in {}ms", w.getMillis());
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@org.jetbrains.annotations.Nullable
|
||||
private IBundleProvider findCachedQuery(IDao theCallingDao, SearchParameterMap theParams, String theResourceType, RequestDetails theRequestDetails, String theQueryString) {
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myManagedTxManager);
|
||||
PersistedJpaBundleProvider foundSearchProvider = txTemplate.execute(t -> {
|
||||
|
||||
// Interceptor call: STORAGE_PRECHECK_FOR_CACHED_SEARCH
|
||||
HookParams params = new HookParams()
|
||||
.add(SearchParameterMap.class, theParams)
|
||||
.add(RequestDetails.class, theRequestDetails)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
|
||||
Object outcome = JpaInterceptorBroadcaster.doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PRECHECK_FOR_CACHED_SEARCH, params);
|
||||
if (Boolean.FALSE.equals(outcome)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Check for a search matching the given hash
|
||||
Search searchToUse = findSearchToUseOrNull(theQueryString, theResourceType);
|
||||
if (searchToUse == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
ourLog.debug("Reusing search {} from cache", searchToUse.getUuid());
|
||||
// Interceptor call: JPA_PERFTRACE_SEARCH_REUSING_CACHED
|
||||
params = new HookParams()
|
||||
.add(SearchParameterMap.class, theParams)
|
||||
.add(RequestDetails.class, theRequestDetails)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
|
||||
JpaInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_SEARCH_REUSING_CACHED, params);
|
||||
|
||||
mySearchCacheSvc.updateSearchLastReturned(searchToUse, new Date());
|
||||
|
||||
PersistedJpaBundleProvider retVal = new PersistedJpaBundleProvider(theRequestDetails, searchToUse.getUuid(), theCallingDao);
|
||||
retVal.setCacheHit(true);
|
||||
populateBundleProvider(retVal);
|
||||
|
||||
return retVal;
|
||||
});
|
||||
|
||||
if (foundSearchProvider != null) {
|
||||
return foundSearchProvider;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private Search findSearchToUseOrNull(String theQueryString, String theResourceType) {
|
||||
Search searchToUse = null;
|
||||
|
||||
// createdCutoff is in recent past
|
||||
final Instant createdCutoff = Instant.now().minus(myDaoConfig.getReuseCachedSearchResultsForMillis(), ChronoUnit.MILLIS);
|
||||
Collection<Search> candidates = mySearchCacheSvc.findCandidatesForReuse(theResourceType, theQueryString, theQueryString.hashCode(), Date.from(createdCutoff));
|
||||
|
||||
for (Search nextCandidateSearch : candidates) {
|
||||
// We should only reuse our search if it was created within the permitted window
|
||||
// Date.after() is unreliable. Instant.isAfter() always works.
|
||||
if (theQueryString.equals(nextCandidateSearch.getSearchQueryString()) && nextCandidateSearch.getCreated().toInstant().isAfter(createdCutoff)) {
|
||||
searchToUse = nextCandidateSearch;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return searchToUse;
|
||||
}
|
||||
|
||||
private IBundleProvider executeQuery(SearchParameterMap theParams, RequestDetails theRequestDetails, String theSearchUuid, ISearchBuilder theSb, Integer theLoadSynchronousUpTo) {
|
||||
SearchRuntimeDetails searchRuntimeDetails = new SearchRuntimeDetails(theRequestDetails, theSearchUuid);
|
||||
searchRuntimeDetails.setLoadSynchronous(true);
|
||||
|
||||
// Execute the query and make sure we return distinct results
|
||||
|
@ -320,10 +385,10 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
// Load the results synchronously
|
||||
final List<Long> pids = new ArrayList<>();
|
||||
|
||||
try (IResultIterator resultIter = sb.createQuery(theParams, searchRuntimeDetails, theRequestDetails)) {
|
||||
try (IResultIterator resultIter = theSb.createQuery(theParams, searchRuntimeDetails, theRequestDetails)) {
|
||||
while (resultIter.hasNext()) {
|
||||
pids.add(resultIter.next());
|
||||
if (loadSynchronousUpTo != null && pids.size() >= loadSynchronousUpTo) {
|
||||
if (theLoadSynchronousUpTo != null && pids.size() >= theLoadSynchronousUpTo) {
|
||||
break;
|
||||
}
|
||||
if (theParams.getLoadSynchronousUpTo() != null && pids.size() >= theParams.getLoadSynchronousUpTo()) {
|
||||
|
@ -335,7 +400,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
throw new InternalErrorException(e);
|
||||
}
|
||||
|
||||
JpaPreResourceAccessDetails accessDetails = new JpaPreResourceAccessDetails(pids, () -> sb);
|
||||
JpaPreResourceAccessDetails accessDetails = new JpaPreResourceAccessDetails(pids, () -> theSb);
|
||||
HookParams params = new HookParams()
|
||||
.add(IPreResourceAccessDetails.class, accessDetails)
|
||||
.add(RequestDetails.class, theRequestDetails)
|
||||
|
@ -358,105 +423,32 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
* individually for pages as we return them to clients
|
||||
*/
|
||||
final Set<Long> includedPids = new HashSet<>();
|
||||
includedPids.addAll(sb.loadIncludes(myContext, myEntityManager, pids, theParams.getRevIncludes(), true, theParams.getLastUpdated(), "(synchronous)", theRequestDetails));
|
||||
includedPids.addAll(sb.loadIncludes(myContext, myEntityManager, pids, theParams.getIncludes(), false, theParams.getLastUpdated(), "(synchronous)", theRequestDetails));
|
||||
includedPids.addAll(theSb.loadIncludes(myContext, myEntityManager, pids, theParams.getRevIncludes(), true, theParams.getLastUpdated(), "(synchronous)", theRequestDetails));
|
||||
includedPids.addAll(theSb.loadIncludes(myContext, myEntityManager, pids, theParams.getIncludes(), false, theParams.getLastUpdated(), "(synchronous)", theRequestDetails));
|
||||
List<Long> includedPidsList = new ArrayList<>(includedPids);
|
||||
|
||||
List<IBaseResource> resources = new ArrayList<>();
|
||||
sb.loadResourcesByPid(pids, includedPidsList, resources, false, theRequestDetails);
|
||||
theSb.loadResourcesByPid(pids, includedPidsList, resources, false, theRequestDetails);
|
||||
return new SimpleBundleProvider(resources);
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* See if there are any cached searches whose results we can return
|
||||
* instead
|
||||
*/
|
||||
boolean useCache = true;
|
||||
if (theCacheControlDirective != null && theCacheControlDirective.isNoCache() == true) {
|
||||
useCache = false;
|
||||
@org.jetbrains.annotations.Nullable
|
||||
private Integer getLoadSynchronousUpToOrNull(CacheControlDirective theCacheControlDirective) {
|
||||
final Integer loadSynchronousUpTo;
|
||||
if (theCacheControlDirective != null && theCacheControlDirective.isNoStore()) {
|
||||
if (theCacheControlDirective.getMaxResults() != null) {
|
||||
loadSynchronousUpTo = theCacheControlDirective.getMaxResults();
|
||||
if (loadSynchronousUpTo > myDaoConfig.getCacheControlNoStoreMaxResultsUpperLimit()) {
|
||||
throw new InvalidRequestException(Constants.HEADER_CACHE_CONTROL + " header " + Constants.CACHE_CONTROL_MAX_RESULTS + " value must not exceed " + myDaoConfig.getCacheControlNoStoreMaxResultsUpperLimit());
|
||||
}
|
||||
final String queryString = theParams.toNormalizedQueryString(myContext);
|
||||
if (theParams.getEverythingMode() == null) {
|
||||
if (myDaoConfig.getReuseCachedSearchResultsForMillis() != null && useCache) {
|
||||
|
||||
final Date createdCutoff = new Date(System.currentTimeMillis() - myDaoConfig.getReuseCachedSearchResultsForMillis());
|
||||
final String resourceType = theResourceType;
|
||||
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myManagedTxManager);
|
||||
PersistedJpaBundleProvider foundSearchProvider = txTemplate.execute(t -> {
|
||||
Search searchToUse = null;
|
||||
|
||||
// Interceptor call: STORAGE_PRECHECK_FOR_CACHED_SEARCH
|
||||
HookParams params = new HookParams()
|
||||
.add(SearchParameterMap.class, theParams)
|
||||
.add(RequestDetails.class, theRequestDetails)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
|
||||
Object outcome = JpaInterceptorBroadcaster.doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PRECHECK_FOR_CACHED_SEARCH, params);
|
||||
if (Boolean.FALSE.equals(outcome)) {
|
||||
return null;
|
||||
} else {
|
||||
loadSynchronousUpTo = 100;
|
||||
}
|
||||
|
||||
// Check for a search matching the given hash
|
||||
int hashCode = queryString.hashCode();
|
||||
Collection<Search> candidates = mySearchDao.find(resourceType, hashCode, createdCutoff);
|
||||
for (Search nextCandidateSearch : candidates) {
|
||||
if (queryString.equals(nextCandidateSearch.getSearchQueryString())) {
|
||||
searchToUse = nextCandidateSearch;
|
||||
break;
|
||||
} else {
|
||||
loadSynchronousUpTo = null;
|
||||
}
|
||||
}
|
||||
|
||||
PersistedJpaBundleProvider retVal = null;
|
||||
if (searchToUse != null) {
|
||||
ourLog.debug("Reusing search {} from cache", searchToUse.getUuid());
|
||||
|
||||
// Interceptor call: JPA_PERFTRACE_SEARCH_REUSING_CACHED
|
||||
params = new HookParams()
|
||||
.add(SearchParameterMap.class, theParams)
|
||||
.add(RequestDetails.class, theRequestDetails)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
|
||||
JpaInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_SEARCH_REUSING_CACHED, params);
|
||||
|
||||
searchToUse.setSearchLastReturned(new Date());
|
||||
mySearchDao.updateSearchLastReturned(searchToUse.getId(), new Date());
|
||||
|
||||
retVal = new PersistedJpaBundleProvider(theRequestDetails, searchToUse.getUuid(), theCallingDao);
|
||||
retVal.setCacheHit(true);
|
||||
|
||||
populateBundleProvider(retVal);
|
||||
}
|
||||
|
||||
return retVal;
|
||||
});
|
||||
|
||||
if (foundSearchProvider != null) {
|
||||
return foundSearchProvider;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
Search search = new Search();
|
||||
populateSearchEntity(theParams, theResourceType, searchUuid, queryString, search);
|
||||
|
||||
// Interceptor call: STORAGE_PRESEARCH_REGISTERED
|
||||
HookParams params = new HookParams()
|
||||
.add(ICachedSearchDetails.class, search)
|
||||
.add(RequestDetails.class, theRequestDetails)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
|
||||
JpaInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PRESEARCH_REGISTERED, params);
|
||||
|
||||
SearchTask task = new SearchTask(search, theCallingDao, theParams, theResourceType, theRequestDetails);
|
||||
myIdToSearchTask.put(search.getUuid(), task);
|
||||
myExecutor.submit(task);
|
||||
|
||||
PersistedJpaSearchFirstPageBundleProvider retVal = new PersistedJpaSearchFirstPageBundleProvider(search, theCallingDao, task, sb, myManagedTxManager, theRequestDetails);
|
||||
populateBundleProvider(retVal);
|
||||
|
||||
ourLog.debug("Search initial phase completed in {}ms", w.getMillis());
|
||||
return retVal;
|
||||
|
||||
return loadSynchronousUpTo;
|
||||
}
|
||||
|
||||
private void callInterceptorStoragePreAccessResources(IInterceptorBroadcaster theInterceptorBroadcaster, RequestDetails theRequestDetails, ISearchBuilder theSb, List<Long> thePids) {
|
||||
|
@ -500,21 +492,6 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
myNeverUseLocalSearchForUnitTests = theNeverUseLocalSearchForUnitTests;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
void setSearchDaoForUnitTest(ISearchDao theSearchDao) {
|
||||
mySearchDao = theSearchDao;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
void setSearchDaoIncludeForUnitTest(ISearchIncludeDao theSearchIncludeDao) {
|
||||
mySearchIncludeDao = theSearchIncludeDao;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
void setSearchDaoResultForUnitTest(ISearchResultDao theSearchResultDao) {
|
||||
mySearchResultDao = theSearchResultDao;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setSyncSizeForUnitTests(int theSyncSize) {
|
||||
mySyncSize = theSyncSize;
|
||||
|
@ -696,20 +673,10 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
}
|
||||
}
|
||||
|
||||
List<SearchResult> resultsToSave = Lists.newArrayList();
|
||||
for (Long nextPid : unsyncedPids) {
|
||||
SearchResult nextResult = new SearchResult(mySearch);
|
||||
nextResult.setResourcePid(nextPid);
|
||||
nextResult.setOrder(myCountSavedTotal);
|
||||
resultsToSave.add(nextResult);
|
||||
int order = nextResult.getOrder();
|
||||
ourLog.trace("Saving ORDER[{}] Resource {}", order, nextResult.getResourcePid());
|
||||
|
||||
myCountSavedTotal++;
|
||||
myCountSavedThisPass++;
|
||||
}
|
||||
|
||||
mySearchResultDao.saveAll(resultsToSave);
|
||||
// Actually store the results in the query cache storage
|
||||
myCountSavedTotal += unsyncedPids.size();
|
||||
myCountSavedThisPass += unsyncedPids.size();
|
||||
mySearchResultCacheSvc.storeResults(mySearch, mySyncedPids, unsyncedPids);
|
||||
|
||||
synchronized (mySyncedPids) {
|
||||
int numSyncedThisPass = unsyncedPids.size();
|
||||
|
@ -877,15 +844,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
|
||||
private void doSaveSearch() {
|
||||
|
||||
Search newSearch;
|
||||
if (mySearch.getId() == null) {
|
||||
newSearch = mySearchDao.save(mySearch);
|
||||
for (SearchInclude next : mySearch.getIncludes()) {
|
||||
mySearchIncludeDao.save(next);
|
||||
}
|
||||
} else {
|
||||
newSearch = mySearchDao.save(mySearch);
|
||||
}
|
||||
Search newSearch = mySearchCacheSvc.save(mySearch);
|
||||
|
||||
// mySearchDao.save is not supposed to return null, but in unit tests
|
||||
// it can if the mock search dao isn't set up to handle that
|
||||
|
@ -928,7 +887,6 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
mySearch.setStatus(SearchStatusEnum.FINISHED);
|
||||
}
|
||||
doSaveSearch();
|
||||
mySearchDao.flush();
|
||||
}
|
||||
});
|
||||
if (wantOnlyCount) {
|
||||
|
@ -1058,7 +1016,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
TransactionTemplate txTemplate = new TransactionTemplate(myManagedTxManager);
|
||||
txTemplate.afterPropertiesSet();
|
||||
txTemplate.execute(t -> {
|
||||
List<Long> previouslyAddedResourcePids = mySearchResultDao.findWithSearchUuidOrderIndependent(getSearch());
|
||||
List<Long> previouslyAddedResourcePids = mySearchResultCacheSvc.fetchAllResultPids(getSearch());
|
||||
ourLog.debug("Have {} previously added IDs in search: {}", previouslyAddedResourcePids.size(), getSearch().getUuid());
|
||||
setPreviouslyAddedResourcePids(previouslyAddedResourcePids);
|
||||
return null;
|
||||
|
|
|
@ -21,24 +21,13 @@ package ca.uhn.fhir.jpa.search;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchIncludeDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.dstu3.model.InstantType;
|
||||
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.scheduling.annotation.Scheduled;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import static ca.uhn.fhir.jpa.search.cache.DatabaseSearchCacheSvcImpl.DEFAULT_CUTOFF_SLACK;
|
||||
|
||||
/**
|
||||
* Deletes old searches
|
||||
|
@ -49,111 +38,16 @@ import java.util.List;
|
|||
// in Smile.
|
||||
//
|
||||
public class StaleSearchDeletingSvcImpl implements IStaleSearchDeletingSvc {
|
||||
public static final long DEFAULT_CUTOFF_SLACK = 10 * DateUtils.MILLIS_PER_SECOND;
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(StaleSearchDeletingSvcImpl.class);
|
||||
/*
|
||||
* Be careful increasing this number! We use the number of params here in a
|
||||
* DELETE FROM foo WHERE params IN (aaaa)
|
||||
* type query and this can fail if we have 1000s of params
|
||||
*/
|
||||
public static final int DEFAULT_MAX_RESULTS_TO_DELETE_IN_ONE_STMT = 500;
|
||||
public static final int DEFAULT_MAX_RESULTS_TO_DELETE_IN_ONE_PAS = 20000;
|
||||
private static int ourMaximumResultsToDeleteInOneStatement = DEFAULT_MAX_RESULTS_TO_DELETE_IN_ONE_STMT;
|
||||
private static int ourMaximumResultsToDeleteInOnePass = DEFAULT_MAX_RESULTS_TO_DELETE_IN_ONE_PAS;
|
||||
private static Long ourNowForUnitTests;
|
||||
/*
|
||||
* We give a bit of extra leeway just to avoid race conditions where a query result
|
||||
* is being reused (because a new client request came in with the same params) right before
|
||||
* the result is to be deleted
|
||||
*/
|
||||
private long myCutoffSlack = DEFAULT_CUTOFF_SLACK;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
@Autowired
|
||||
private ISearchDao mySearchDao;
|
||||
@Autowired
|
||||
private ISearchIncludeDao mySearchIncludeDao;
|
||||
@Autowired
|
||||
private ISearchResultDao mySearchResultDao;
|
||||
@Autowired
|
||||
private PlatformTransactionManager myTransactionManager;
|
||||
|
||||
private void deleteSearch(final Long theSearchPid) {
|
||||
mySearchDao.findById(theSearchPid).ifPresent(searchToDelete -> {
|
||||
mySearchIncludeDao.deleteForSearch(searchToDelete.getId());
|
||||
|
||||
/*
|
||||
* Note, we're only deleting up to 500 results in an individual search here. This
|
||||
* is to prevent really long running transactions in cases where there are
|
||||
* huge searches with tons of results in them. By the time we've gotten here
|
||||
* we have marked the parent Search entity as deleted, so it's not such a
|
||||
* huge deal to be only partially deleting search results. They'll get deleted
|
||||
* eventually
|
||||
*/
|
||||
int max = ourMaximumResultsToDeleteInOnePass;
|
||||
Slice<Long> resultPids = mySearchResultDao.findForSearch(PageRequest.of(0, max), searchToDelete.getId());
|
||||
if (resultPids.hasContent()) {
|
||||
List<List<Long>> partitions = Lists.partition(resultPids.getContent(), ourMaximumResultsToDeleteInOneStatement);
|
||||
for (List<Long> nextPartition : partitions) {
|
||||
mySearchResultDao.deleteByIds(nextPartition);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Only delete if we don't have results left in this search
|
||||
if (resultPids.getNumberOfElements() < max) {
|
||||
ourLog.debug("Deleting search {}/{} - Created[{}] -- Last returned[{}]", searchToDelete.getId(), searchToDelete.getUuid(), new InstantType(searchToDelete.getCreated()), new InstantType(searchToDelete.getSearchLastReturned()));
|
||||
mySearchDao.deleteByPid(searchToDelete.getId());
|
||||
} else {
|
||||
ourLog.debug("Purged {} search results for deleted search {}/{}", resultPids.getSize(), searchToDelete.getId(), searchToDelete.getUuid());
|
||||
}
|
||||
});
|
||||
}
|
||||
private ISearchCacheSvc mySearchCacheSvc;
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.NEVER)
|
||||
public void pollForStaleSearchesAndDeleteThem() {
|
||||
if (!myDaoConfig.isExpireSearchResults()) {
|
||||
return;
|
||||
}
|
||||
|
||||
long cutoffMillis = myDaoConfig.getExpireSearchResultsAfterMillis();
|
||||
if (myDaoConfig.getReuseCachedSearchResultsForMillis() != null) {
|
||||
cutoffMillis = Math.max(cutoffMillis, myDaoConfig.getReuseCachedSearchResultsForMillis());
|
||||
}
|
||||
final Date cutoff = new Date((now() - cutoffMillis) - myCutoffSlack);
|
||||
|
||||
if (ourNowForUnitTests != null) {
|
||||
ourLog.info("Searching for searches which are before {} - now is {}", new InstantType(cutoff), new InstantType(new Date(now())));
|
||||
}
|
||||
|
||||
ourLog.debug("Searching for searches which are before {}", cutoff);
|
||||
|
||||
TransactionTemplate tt = new TransactionTemplate(myTransactionManager);
|
||||
final Slice<Long> toDelete = tt.execute(theStatus ->
|
||||
mySearchDao.findWhereLastReturnedBefore(cutoff, PageRequest.of(0, 2000))
|
||||
);
|
||||
for (final Long nextSearchToDelete : toDelete) {
|
||||
ourLog.debug("Deleting search with PID {}", nextSearchToDelete);
|
||||
tt.execute(t -> {
|
||||
mySearchDao.updateDeleted(nextSearchToDelete, true);
|
||||
return null;
|
||||
});
|
||||
|
||||
tt.execute(t -> {
|
||||
deleteSearch(nextSearchToDelete);
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
int count = toDelete.getContent().size();
|
||||
if (count > 0) {
|
||||
if (ourLog.isDebugEnabled()) {
|
||||
long total = tt.execute(t -> mySearchDao.count());
|
||||
ourLog.debug("Deleted {} searches, {} remaining", count, total);
|
||||
}
|
||||
}
|
||||
|
||||
mySearchCacheSvc.pollForStaleSearchesAndDeleteThem();
|
||||
}
|
||||
|
||||
@Scheduled(fixedDelay = DEFAULT_CUTOFF_SLACK)
|
||||
|
@ -164,35 +58,4 @@ public class StaleSearchDeletingSvcImpl implements IStaleSearchDeletingSvc {
|
|||
pollForStaleSearchesAndDeleteThem();
|
||||
}
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setCutoffSlackForUnitTest(long theCutoffSlack) {
|
||||
myCutoffSlack = theCutoffSlack;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public static void setMaximumResultsToDeleteInOnePassForUnitTest(int theMaximumResultsToDeleteInOnePass) {
|
||||
ourMaximumResultsToDeleteInOnePass = theMaximumResultsToDeleteInOnePass;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public static void setMaximumResultsToDeleteForUnitTest(int theMaximumResultsToDelete) {
|
||||
ourMaximumResultsToDeleteInOneStatement = theMaximumResultsToDelete;
|
||||
}
|
||||
|
||||
private static long now() {
|
||||
if (ourNowForUnitTests != null) {
|
||||
return ourNowForUnitTests;
|
||||
}
|
||||
return System.currentTimeMillis();
|
||||
}
|
||||
|
||||
/**
|
||||
* This is for unit tests only, do not call otherwise
|
||||
*/
|
||||
@VisibleForTesting
|
||||
public static void setNowForUnitTests(Long theNowForUnitTests) {
|
||||
ourNowForUnitTests = theNowForUnitTests;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
64
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/BaseSearchCacheSvcImpl.java
vendored
Normal file
64
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/BaseSearchCacheSvcImpl.java
vendored
Normal file
|
@ -0,0 +1,64 @@
|
|||
package ca.uhn.fhir.jpa.search.cache;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2019 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.scheduling.annotation.Scheduled;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
public abstract class BaseSearchCacheSvcImpl implements ISearchCacheSvc {
|
||||
|
||||
@Autowired
|
||||
private PlatformTransactionManager myTxManager;
|
||||
|
||||
private ConcurrentHashMap<Long, Date> myUnsyncedLastUpdated = new ConcurrentHashMap<>();
|
||||
|
||||
@Override
|
||||
public void updateSearchLastReturned(Search theSearch, Date theDate) {
|
||||
myUnsyncedLastUpdated.put(theSearch.getId(), theDate);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Scheduled(fixedDelay = 10 * DateUtils.MILLIS_PER_SECOND)
|
||||
public void flushLastUpdated() {
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myTxManager);
|
||||
txTemplate.execute(t->{
|
||||
for (Iterator<Map.Entry<Long, Date>> iter = myUnsyncedLastUpdated.entrySet().iterator(); iter.hasNext(); ) {
|
||||
Map.Entry<Long, Date> next = iter.next();
|
||||
flushLastUpdated(next.getKey(), next.getValue());
|
||||
iter.remove();
|
||||
}
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
protected abstract void flushLastUpdated(Long theSearchId, Date theLastUpdated);
|
||||
|
||||
|
||||
}
|
261
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchCacheSvcImpl.java
vendored
Normal file
261
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchCacheSvcImpl.java
vendored
Normal file
|
@ -0,0 +1,261 @@
|
|||
package ca.uhn.fhir.jpa.search.cache;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2019 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchIncludeDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.entity.SearchInclude;
|
||||
import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.dstu3.model.InstantType;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.TransactionDefinition;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import javax.transaction.Transactional;
|
||||
import java.util.Collection;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
public class DatabaseSearchCacheSvcImpl extends BaseSearchCacheSvcImpl {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(DatabaseSearchCacheSvcImpl.class);
|
||||
|
||||
/*
|
||||
* Be careful increasing this number! We use the number of params here in a
|
||||
* DELETE FROM foo WHERE params IN (aaaa)
|
||||
* type query and this can fail if we have 1000s of params
|
||||
*/
|
||||
public static final int DEFAULT_MAX_RESULTS_TO_DELETE_IN_ONE_STMT = 500;
|
||||
public static final int DEFAULT_MAX_RESULTS_TO_DELETE_IN_ONE_PAS = 20000;
|
||||
public static final long DEFAULT_CUTOFF_SLACK = 10 * DateUtils.MILLIS_PER_SECOND;
|
||||
|
||||
private static int ourMaximumResultsToDeleteInOneStatement = DEFAULT_MAX_RESULTS_TO_DELETE_IN_ONE_STMT;
|
||||
private static int ourMaximumResultsToDeleteInOnePass = DEFAULT_MAX_RESULTS_TO_DELETE_IN_ONE_PAS;
|
||||
private static Long ourNowForUnitTests;
|
||||
/*
|
||||
* We give a bit of extra leeway just to avoid race conditions where a query result
|
||||
* is being reused (because a new client request came in with the same params) right before
|
||||
* the result is to be deleted
|
||||
*/
|
||||
private long myCutoffSlack = DEFAULT_CUTOFF_SLACK;
|
||||
|
||||
@Autowired
|
||||
private ISearchDao mySearchDao;
|
||||
@Autowired
|
||||
private ISearchResultDao mySearchResultDao;
|
||||
@Autowired
|
||||
private ISearchIncludeDao mySearchIncludeDao;
|
||||
@Autowired
|
||||
private PlatformTransactionManager myTxManager;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
|
||||
@VisibleForTesting
|
||||
public void setCutoffSlackForUnitTest(long theCutoffSlack) {
|
||||
myCutoffSlack = theCutoffSlack;
|
||||
}
|
||||
|
||||
@Transactional(Transactional.TxType.REQUIRED)
|
||||
@Override
|
||||
public Search save(Search theSearch) {
|
||||
Search newSearch;
|
||||
if (theSearch.getId() == null) {
|
||||
newSearch = mySearchDao.save(theSearch);
|
||||
for (SearchInclude next : theSearch.getIncludes()) {
|
||||
mySearchIncludeDao.save(next);
|
||||
}
|
||||
} else {
|
||||
newSearch = mySearchDao.save(theSearch);
|
||||
}
|
||||
return newSearch;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(Transactional.TxType.REQUIRED)
|
||||
public Optional<Search> fetchByUuid(String theUuid) {
|
||||
Validate.notBlank(theUuid);
|
||||
return mySearchDao.findByUuidAndFetchIncludes(theUuid);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
@Transactional(Transactional.TxType.NEVER)
|
||||
public Optional<Search> tryToMarkSearchAsInProgress(Search theSearch) {
|
||||
ourLog.trace("Going to try to change search status from {} to {}", theSearch.getStatus(), SearchStatusEnum.LOADING);
|
||||
try {
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myTxManager);
|
||||
txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
|
||||
txTemplate.afterPropertiesSet();
|
||||
return txTemplate.execute(t -> {
|
||||
Search search = mySearchDao.findById(theSearch.getId()).orElse(theSearch);
|
||||
|
||||
if (search.getStatus() != SearchStatusEnum.PASSCMPLET) {
|
||||
throw new IllegalStateException("Can't change to LOADING because state is " + theSearch.getStatus());
|
||||
}
|
||||
search.setStatus(SearchStatusEnum.LOADING);
|
||||
Search newSearch = mySearchDao.save(search);
|
||||
return Optional.of(newSearch);
|
||||
});
|
||||
} catch (Exception e) {
|
||||
ourLog.warn("Failed to activate search: {}", e.toString());
|
||||
ourLog.trace("Failed to activate search", e);
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Search> findCandidatesForReuse(String theResourceType, String theQueryString, int theQueryStringHash, Date theCreatedAfter) {
|
||||
int hashCode = theQueryString.hashCode();
|
||||
return mySearchDao.find(theResourceType, hashCode, theCreatedAfter);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void flushLastUpdated(Long theSearchId, Date theLastUpdated) {
|
||||
mySearchDao.updateSearchLastReturned(theSearchId, theLastUpdated);
|
||||
}
|
||||
|
||||
@Transactional(Transactional.TxType.NEVER)
|
||||
@Override
|
||||
public void pollForStaleSearchesAndDeleteThem() {
|
||||
if (!myDaoConfig.isExpireSearchResults()) {
|
||||
return;
|
||||
}
|
||||
|
||||
long cutoffMillis = myDaoConfig.getExpireSearchResultsAfterMillis();
|
||||
if (myDaoConfig.getReuseCachedSearchResultsForMillis() != null) {
|
||||
cutoffMillis = Math.max(cutoffMillis, myDaoConfig.getReuseCachedSearchResultsForMillis());
|
||||
}
|
||||
final Date cutoff = new Date((now() - cutoffMillis) - myCutoffSlack);
|
||||
|
||||
if (ourNowForUnitTests != null) {
|
||||
ourLog.info("Searching for searches which are before {} - now is {}", new InstantType(cutoff), new InstantType(new Date(now())));
|
||||
}
|
||||
|
||||
ourLog.debug("Searching for searches which are before {}", cutoff);
|
||||
|
||||
TransactionTemplate tt = new TransactionTemplate(myTxManager);
|
||||
final Slice<Long> toDelete = tt.execute(theStatus ->
|
||||
mySearchDao.findWhereLastReturnedBefore(cutoff, PageRequest.of(0, 2000))
|
||||
);
|
||||
for (final Long nextSearchToDelete : toDelete) {
|
||||
ourLog.debug("Deleting search with PID {}", nextSearchToDelete);
|
||||
tt.execute(t -> {
|
||||
mySearchDao.updateDeleted(nextSearchToDelete, true);
|
||||
return null;
|
||||
});
|
||||
|
||||
tt.execute(t -> {
|
||||
deleteSearch(nextSearchToDelete);
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
int count = toDelete.getContent().size();
|
||||
if (count > 0) {
|
||||
if (ourLog.isDebugEnabled()) {
|
||||
long total = tt.execute(t -> mySearchDao.count());
|
||||
ourLog.debug("Deleted {} searches, {} remaining", count, total);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@VisibleForTesting
|
||||
void setSearchDaoForUnitTest(ISearchDao theSearchDao) {
|
||||
mySearchDao = theSearchDao;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
void setSearchDaoIncludeForUnitTest(ISearchIncludeDao theSearchIncludeDao) {
|
||||
mySearchIncludeDao = theSearchIncludeDao;
|
||||
}
|
||||
|
||||
private void deleteSearch(final Long theSearchPid) {
|
||||
mySearchDao.findById(theSearchPid).ifPresent(searchToDelete -> {
|
||||
mySearchIncludeDao.deleteForSearch(searchToDelete.getId());
|
||||
|
||||
/*
|
||||
* Note, we're only deleting up to 500 results in an individual search here. This
|
||||
* is to prevent really long running transactions in cases where there are
|
||||
* huge searches with tons of results in them. By the time we've gotten here
|
||||
* we have marked the parent Search entity as deleted, so it's not such a
|
||||
* huge deal to be only partially deleting search results. They'll get deleted
|
||||
* eventually
|
||||
*/
|
||||
int max = ourMaximumResultsToDeleteInOnePass;
|
||||
Slice<Long> resultPids = mySearchResultDao.findForSearch(PageRequest.of(0, max), searchToDelete.getId());
|
||||
if (resultPids.hasContent()) {
|
||||
List<List<Long>> partitions = Lists.partition(resultPids.getContent(), ourMaximumResultsToDeleteInOneStatement);
|
||||
for (List<Long> nextPartition : partitions) {
|
||||
mySearchResultDao.deleteByIds(nextPartition);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Only delete if we don't have results left in this search
|
||||
if (resultPids.getNumberOfElements() < max) {
|
||||
ourLog.debug("Deleting search {}/{} - Created[{}] -- Last returned[{}]", searchToDelete.getId(), searchToDelete.getUuid(), new InstantType(searchToDelete.getCreated()), new InstantType(searchToDelete.getSearchLastReturned()));
|
||||
mySearchDao.deleteByPid(searchToDelete.getId());
|
||||
} else {
|
||||
ourLog.debug("Purged {} search results for deleted search {}/{}", resultPids.getSize(), searchToDelete.getId(), searchToDelete.getUuid());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public static void setMaximumResultsToDeleteInOnePassForUnitTest(int theMaximumResultsToDeleteInOnePass) {
|
||||
ourMaximumResultsToDeleteInOnePass = theMaximumResultsToDeleteInOnePass;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public static void setMaximumResultsToDeleteForUnitTest(int theMaximumResultsToDelete) {
|
||||
ourMaximumResultsToDeleteInOneStatement = theMaximumResultsToDelete;
|
||||
}
|
||||
|
||||
/**
|
||||
* This is for unit tests only, do not call otherwise
|
||||
*/
|
||||
@VisibleForTesting
|
||||
public static void setNowForUnitTests(Long theNowForUnitTests) {
|
||||
ourNowForUnitTests = theNowForUnitTests;
|
||||
}
|
||||
|
||||
private static long now() {
|
||||
if (ourNowForUnitTests != null) {
|
||||
return ourNowForUnitTests;
|
||||
}
|
||||
return System.currentTimeMillis();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,98 @@
|
|||
package ca.uhn.fhir.jpa.search.cache;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2019 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.entity.SearchResult;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
|
||||
import javax.transaction.Transactional;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl.toPage;
|
||||
|
||||
public class DatabaseSearchResultCacheSvcImpl implements ISearchResultCacheSvc {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(DatabaseSearchResultCacheSvcImpl.class);
|
||||
|
||||
@Autowired
|
||||
private ISearchResultDao mySearchResultDao;
|
||||
|
||||
@Override
|
||||
@Transactional(Transactional.TxType.REQUIRED)
|
||||
public List<Long> fetchResultPids(Search theSearch, int theFrom, int theTo) {
|
||||
final Pageable page = toPage(theFrom, theTo);
|
||||
if (page == null) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
List<Long> retVal = mySearchResultDao
|
||||
.findWithSearchPid(theSearch.getId(), page)
|
||||
.getContent();
|
||||
|
||||
ourLog.trace("fetchResultPids for range {}-{} returned {} pids", theFrom, theTo, retVal.size());
|
||||
|
||||
return new ArrayList<>(retVal);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(Transactional.TxType.REQUIRED)
|
||||
public List<Long> fetchAllResultPids(Search theSearch) {
|
||||
List<Long> retVal = mySearchResultDao.findWithSearchPidOrderIndependent(theSearch.getId());
|
||||
ourLog.trace("fetchAllResultPids returned {} pids", retVal.size());
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(Transactional.TxType.REQUIRED)
|
||||
public void storeResults(Search theSearch, List<Long> thePreviouslyStoredResourcePids, List<Long> theNewResourcePids) {
|
||||
List<SearchResult> resultsToSave = Lists.newArrayList();
|
||||
|
||||
ourLog.trace("Storing {} results with {} previous for search", theNewResourcePids.size(), thePreviouslyStoredResourcePids.size());
|
||||
|
||||
int order = thePreviouslyStoredResourcePids.size();
|
||||
for (Long nextPid : theNewResourcePids) {
|
||||
SearchResult nextResult = new SearchResult(theSearch);
|
||||
nextResult.setResourcePid(nextPid);
|
||||
nextResult.setOrder(order);
|
||||
resultsToSave.add(nextResult);
|
||||
ourLog.trace("Saving ORDER[{}] Resource {}", order, nextResult.getResourcePid());
|
||||
|
||||
order++;
|
||||
}
|
||||
|
||||
mySearchResultDao.saveAll(resultsToSave);
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
void setSearchDaoResultForUnitTest(ISearchResultDao theSearchResultDao) {
|
||||
mySearchResultDao = theSearchResultDao;
|
||||
}
|
||||
|
||||
|
||||
}
|
101
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/ISearchCacheSvc.java
vendored
Normal file
101
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/ISearchCacheSvc.java
vendored
Normal file
|
@ -0,0 +1,101 @@
|
|||
package ca.uhn.fhir.jpa.search.cache;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2019 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Date;
|
||||
import java.util.Optional;
|
||||
|
||||
public interface ISearchCacheSvc {
|
||||
|
||||
/**
|
||||
* Places a new search of some sort in the cache, or updates an existing search. The search passed in is guaranteed to have
|
||||
* a {@link Search#getUuid() UUID} so that is a good candidate for consistent identification.
|
||||
*
|
||||
* @param theSearch The search to store
|
||||
* @return Returns a copy of the search as it was saved. Callers should use the returned Search object for any further processing.
|
||||
*/
|
||||
Search save(Search theSearch);
|
||||
|
||||
/**
|
||||
* Fetch a search using its UUID. The search should be fully loaded when it is returned (i.e. includes are fetched, so that access to its
|
||||
* fields will not cause database errors if the current tranaction scope ends.
|
||||
*
|
||||
* @param theUuid The search UUID
|
||||
* @return The search if it exists
|
||||
*/
|
||||
Optional<Search> fetchByUuid(String theUuid);
|
||||
|
||||
/**
|
||||
* TODO: this is perhaps an inappropriate responsibility for this service
|
||||
*
|
||||
* <p>
|
||||
* This method marks a search as in progress, but should only allow exactly one call to do so across the cluster. This
|
||||
* is done so that if two client threads request the next page at the exact same time (which is unlikely but not
|
||||
* impossible) only one will actually proceed to load the next results and the other will just wait for them
|
||||
* to arrive.
|
||||
*
|
||||
* @param theSearch The search to mark
|
||||
* @return This method should return an empty optional if the search was not marked (meaning that another thread
|
||||
* succeeded in marking it). If the search doesn't exist or some other error occurred, an exception will be thrown
|
||||
* instead of {@link Optional#empty()}
|
||||
*/
|
||||
Optional<Search> tryToMarkSearchAsInProgress(Search theSearch);
|
||||
|
||||
/**
|
||||
* Look for any existing searches matching the given resource type and query string.
|
||||
* <p>
|
||||
* This method is allowed to perofrm a "best effort" return, so it can return searches that don't match the query string exactly, or
|
||||
* which have a created timestamp before <code>theCreatedAfter</code> date. The caller is responsible for removing
|
||||
* any inappropriate Searches and picking the most relevant one.
|
||||
* </p>
|
||||
*
|
||||
* @param theResourceType The resource type of the search. Results MUST match this type
|
||||
* @param theQueryString The query string. Results SHOULD match this type
|
||||
* @param theQueryStringHash The query string hash. Results SHOULD match this type
|
||||
* @param theCreatedAfter Results SHOULD not include any searches created before this cutoff timestamp
|
||||
* @return A collection of candidate searches
|
||||
*/
|
||||
Collection<Search> findCandidatesForReuse(String theResourceType, String theQueryString, int theQueryStringHash, Date theCreatedAfter);
|
||||
|
||||
/**
|
||||
* Mark a search as having been "last used" at the given time. This method may (and probably should) be implemented
|
||||
* to work asynchronously in order to avoid hammering the database if the search gets reused many times.
|
||||
*
|
||||
* @param theSearch The search
|
||||
* @param theDate The "last returned" timestamp
|
||||
*/
|
||||
void updateSearchLastReturned(Search theSearch, Date theDate);
|
||||
|
||||
/**
|
||||
* This is mostly public for unit tests
|
||||
*/
|
||||
void flushLastUpdated();
|
||||
|
||||
/**
|
||||
* This method will be called periodically to delete stale searches. Implementations are not required to do anything
|
||||
* if they have some other mechanism for expiring stale results other than manually looking for them
|
||||
* and deleting them.
|
||||
*/
|
||||
void pollForStaleSearchesAndDeleteThem();
|
||||
}
|
51
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/ISearchResultCacheSvc.java
vendored
Normal file
51
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/ISearchResultCacheSvc.java
vendored
Normal file
|
@ -0,0 +1,51 @@
|
|||
package ca.uhn.fhir.jpa.search.cache;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2019 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public interface ISearchResultCacheSvc {
|
||||
/**
|
||||
* @param theSearch The search - This method is not required to persist any chances to the Search object, it is only provided here for identification
|
||||
* @param thePreviouslyStoredResourcePids A list of resource PIDs that have previously been saved to this search
|
||||
* @param theNewResourcePids A list of new resoure PIDs to add to this search (these ones have not been previously saved)
|
||||
*/
|
||||
void storeResults(Search theSearch, List<Long> thePreviouslyStoredResourcePids, List<Long> theNewResourcePids);
|
||||
|
||||
/**
|
||||
* Fetch a sunset of the search result IDs from the cache
|
||||
*
|
||||
* @param theSearch The search to fetch IDs for
|
||||
* @param theFrom The starting index (inclusive)
|
||||
* @param theTo The ending index (exclusive)
|
||||
* @return A list of resource PIDs
|
||||
*/
|
||||
List<Long> fetchResultPids(Search theSearch, int theFrom, int theTo);
|
||||
|
||||
/**
|
||||
* Fetch all result PIDs for a given search with no particular order required
|
||||
* @param theSearch
|
||||
* @return
|
||||
*/
|
||||
List<Long> fetchAllResultPids(Search theSearch);
|
||||
}
|
|
@ -62,7 +62,6 @@ import org.hl7.fhir.instance.model.api.IIdType;
|
|||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.hl7.fhir.r4.model.*;
|
||||
import org.hl7.fhir.r4.model.codesystems.ConceptSubsumptionOutcome;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
|
@ -88,6 +87,7 @@ import javax.persistence.PersistenceContext;
|
|||
import javax.persistence.PersistenceContextType;
|
||||
import javax.persistence.TypedQuery;
|
||||
import javax.persistence.criteria.*;
|
||||
import javax.validation.constraints.NotNull;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
|
|
@ -26,7 +26,9 @@ import org.hl7.fhir.instance.hapi.validation.IValidationSupport;
|
|||
import org.hl7.fhir.instance.model.api.IBaseDatatype;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.*;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
|
|
@ -6,7 +6,9 @@ import ca.uhn.fhir.jpa.entity.*;
|
|||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import org.hl7.fhir.instance.model.api.*;
|
||||
import org.hl7.fhir.r4.model.*;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.util.List;
|
||||
|
|
|
@ -34,9 +34,9 @@ import org.hl7.fhir.r4.model.CodeSystem;
|
|||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
import org.hl7.fhir.r4.model.Enumerations;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import javax.validation.constraints.NotNull;
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
import java.util.Map.Entry;
|
||||
|
|
|
@ -5,23 +5,25 @@ import ca.uhn.fhir.interceptor.api.IInterceptorService;
|
|||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.executor.InterceptorService;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.provider.SystemProviderDstu2Test;
|
||||
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
|
||||
import ca.uhn.fhir.jpa.search.ISearchCoordinatorSvc;
|
||||
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
|
||||
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
|
||||
import ca.uhn.fhir.jpa.search.cache.ISearchResultCacheSvc;
|
||||
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.jpa.term.VersionIndependentConcept;
|
||||
import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener;
|
||||
import ca.uhn.fhir.jpa.util.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.test.utilities.LoggingRule;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Bundle;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Bundle.Entry;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.test.utilities.LoggingRule;
|
||||
import ca.uhn.fhir.util.BundleUtil;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
|
@ -91,6 +93,10 @@ public abstract class BaseJpaTest {
|
|||
protected IInterceptorService myInterceptorRegistry;
|
||||
@Autowired
|
||||
protected CircularQueueCaptureQueriesListener myCaptureQueriesListener;
|
||||
@Autowired
|
||||
protected ISearchResultCacheSvc mySearchResultCacheSvc;
|
||||
@Autowired
|
||||
protected ISearchCacheSvc mySearchCacheSvc;
|
||||
|
||||
@After
|
||||
public void afterPerformCleanup() {
|
||||
|
|
|
@ -206,8 +206,6 @@ public abstract class BaseJpaDstu3Test extends BaseJpaTest {
|
|||
@Autowired(required = false)
|
||||
protected IFulltextSearchSvc mySearchDao;
|
||||
@Autowired
|
||||
protected ISearchDao mySearchEntityDao;
|
||||
@Autowired
|
||||
@Qualifier("mySearchParameterDaoDstu3")
|
||||
protected IFhirResourceDao<SearchParameter> mySearchParameterDao;
|
||||
@Autowired
|
||||
|
|
|
@ -1,401 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.dao.dstu3;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
||||
import ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoR4SearchPageExpiryTest;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.search.StaleSearchDeletingSvcImpl;
|
||||
import ca.uhn.fhir.jpa.util.TestUtil;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.dstu3.model.InstantType;
|
||||
import org.hl7.fhir.dstu3.model.Patient;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.test.util.AopTestUtils;
|
||||
import org.springframework.transaction.TransactionStatus;
|
||||
import org.springframework.transaction.support.TransactionCallback;
|
||||
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class FhirResourceDaoDstu3SearchPageExpiryTest extends BaseJpaDstu3Test {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(FhirResourceDaoDstu3SearchPageExpiryTest.class);
|
||||
|
||||
@After()
|
||||
public void after() {
|
||||
StaleSearchDeletingSvcImpl staleSearchDeletingSvc = AopTestUtils.getTargetObject(myStaleSearchDeletingSvc);
|
||||
staleSearchDeletingSvc.setCutoffSlackForUnitTest(StaleSearchDeletingSvcImpl.DEFAULT_CUTOFF_SLACK);
|
||||
StaleSearchDeletingSvcImpl.setNowForUnitTests(null);
|
||||
|
||||
myDaoConfig.setReuseCachedSearchResultsForMillis(new DaoConfig().getReuseCachedSearchResultsForMillis());
|
||||
myDaoConfig.setExpireSearchResults(new DaoConfig().isExpireSearchResults());
|
||||
}
|
||||
|
||||
@Before
|
||||
public void before() {
|
||||
myDaoConfig.setReuseCachedSearchResultsForMillis(new DaoConfig().getReuseCachedSearchResultsForMillis());
|
||||
myDaoConfig.setExpireSearchResults(new DaoConfig().isExpireSearchResults());
|
||||
myDaoConfig.setCountSearchResultsUpTo(null);
|
||||
|
||||
StaleSearchDeletingSvcImpl staleSearchDeletingSvc = AopTestUtils.getTargetObject(myStaleSearchDeletingSvc);
|
||||
staleSearchDeletingSvc.setCutoffSlackForUnitTest(0);
|
||||
}
|
||||
|
||||
@Before
|
||||
public void beforeDisableResultReuse() {
|
||||
myDaoConfig.setReuseCachedSearchResultsForMillis(null);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExpirePagesAfterReuse() throws Exception {
|
||||
IIdType pid1;
|
||||
IIdType pid2;
|
||||
{
|
||||
Patient patient = new Patient();
|
||||
patient.addName().setFamily("EXPIRE");
|
||||
pid1 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
Thread.sleep(10);
|
||||
{
|
||||
Patient patient = new Patient();
|
||||
patient.addName().setFamily("EXPIRE");
|
||||
pid2 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
Thread.sleep(10);
|
||||
|
||||
myDaoConfig.setExpireSearchResultsAfterMillis(1000L);
|
||||
myDaoConfig.setReuseCachedSearchResultsForMillis(500L);
|
||||
long start = System.currentTimeMillis();
|
||||
|
||||
final String searchUuid1;
|
||||
{
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
params.add(Patient.SP_FAMILY, new StringParam("EXPIRE"));
|
||||
final IBundleProvider bundleProvider = myPatientDao.search(params);
|
||||
assertThat(toUnqualifiedVersionlessIds(bundleProvider), containsInAnyOrder(pid1, pid2));
|
||||
searchUuid1 = bundleProvider.getUuid();
|
||||
Validate.notBlank(searchUuid1);
|
||||
}
|
||||
|
||||
waitForSearchToSave(searchUuid1);
|
||||
|
||||
final String searchUuid2;
|
||||
{
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
params.add(Patient.SP_FAMILY, new StringParam("EXPIRE"));
|
||||
final IBundleProvider bundleProvider = myPatientDao.search(params);
|
||||
assertThat(toUnqualifiedVersionlessIds(bundleProvider), containsInAnyOrder(pid1, pid2));
|
||||
searchUuid2 = bundleProvider.getUuid();
|
||||
Validate.notBlank(searchUuid2);
|
||||
}
|
||||
assertEquals(searchUuid1, searchUuid2);
|
||||
|
||||
TestUtil.sleepAtLeast(501);
|
||||
// We're now past 500ms so we shouldn't reuse the search
|
||||
|
||||
final String searchUuid3;
|
||||
{
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
params.add(Patient.SP_FAMILY, new StringParam("EXPIRE"));
|
||||
final IBundleProvider bundleProvider = myPatientDao.search(params);
|
||||
assertThat(toUnqualifiedVersionlessIds(bundleProvider), containsInAnyOrder(pid1, pid2));
|
||||
searchUuid3 = bundleProvider.getUuid();
|
||||
Validate.notBlank(searchUuid3);
|
||||
}
|
||||
assertNotEquals(searchUuid1, searchUuid3);
|
||||
|
||||
// Search just got used so it shouldn't be deleted
|
||||
|
||||
StaleSearchDeletingSvcImpl.setNowForUnitTests(start + 500);
|
||||
final AtomicLong search3timestamp = new AtomicLong();
|
||||
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
Search search3 = mySearchEntityDao.findByUuid(searchUuid3);
|
||||
assertNotNull(search3);
|
||||
Search search2 = mySearchEntityDao.findByUuid(searchUuid2);
|
||||
assertNotNull(search2);
|
||||
search3timestamp.set(search2.getSearchLastReturned().getTime());
|
||||
}
|
||||
});
|
||||
|
||||
StaleSearchDeletingSvcImpl.setNowForUnitTests(search3timestamp.get() + 800);
|
||||
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
|
||||
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
assertNotNull(mySearchEntityDao.findByUuid(searchUuid3));
|
||||
}
|
||||
});
|
||||
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
assertNotNull(mySearchEntityDao.findByUuid(searchUuid1));
|
||||
}
|
||||
});
|
||||
|
||||
StaleSearchDeletingSvcImpl.setNowForUnitTests(search3timestamp.get() + 1100);
|
||||
|
||||
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
|
||||
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
assertNull("Search 1 still exists", mySearchEntityDao.findByUuid(searchUuid1));
|
||||
assertNotNull("Search 3 still exists", mySearchEntityDao.findByUuid(searchUuid3));
|
||||
}
|
||||
});
|
||||
|
||||
StaleSearchDeletingSvcImpl.setNowForUnitTests(search3timestamp.get() + 2100);
|
||||
|
||||
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
|
||||
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
assertNull("Search 1 still exists", mySearchEntityDao.findByUuid(searchUuid1));
|
||||
assertNull("Search 3 still exists", mySearchEntityDao.findByUuid(searchUuid3));
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExpirePagesAfterSingleUse() throws Exception {
|
||||
IIdType pid1;
|
||||
IIdType pid2;
|
||||
{
|
||||
Patient patient = new Patient();
|
||||
patient.addName().setFamily("EXPIRE");
|
||||
pid1 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
Thread.sleep(10);
|
||||
{
|
||||
Patient patient = new Patient();
|
||||
patient.addName().setFamily("EXPIRE");
|
||||
pid2 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
Thread.sleep(10);
|
||||
|
||||
final StopWatch sw = new StopWatch();
|
||||
|
||||
SearchParameterMap params;
|
||||
params = new SearchParameterMap();
|
||||
params.add(Patient.SP_FAMILY, new StringParam("EXPIRE"));
|
||||
final IBundleProvider bundleProvider = myPatientDao.search(params);
|
||||
assertThat(toUnqualifiedVersionlessIds(bundleProvider), containsInAnyOrder(pid1, pid2));
|
||||
assertThat(toUnqualifiedVersionlessIds(bundleProvider), containsInAnyOrder(pid1, pid2));
|
||||
|
||||
waitForSearchToSave(bundleProvider.getUuid());
|
||||
final AtomicLong start = new AtomicLong();
|
||||
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myTxManager);
|
||||
txTemplate.execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
Search search = mySearchEntityDao.findByUuid(bundleProvider.getUuid());
|
||||
assertNotNull("Failed after " + sw.toString(), search);
|
||||
start.set(search.getCreated().getTime());
|
||||
ourLog.info("Search was created: {}", new InstantType(new Date(start.get())));
|
||||
}
|
||||
});
|
||||
|
||||
myDaoConfig.setExpireSearchResultsAfterMillis(500);
|
||||
myDaoConfig.setReuseCachedSearchResultsForMillis(500L);
|
||||
StaleSearchDeletingSvcImpl.setNowForUnitTests(start.get() + 499);
|
||||
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
|
||||
txTemplate.execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
assertNotNull(mySearchEntityDao.findByUuid(bundleProvider.getUuid()));
|
||||
}
|
||||
});
|
||||
|
||||
StaleSearchDeletingSvcImpl.setNowForUnitTests(start.get() + 600);
|
||||
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
|
||||
txTemplate.execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
assertNull(mySearchEntityDao.findByUuid(bundleProvider.getUuid()));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExpirePagesAfterSingleUse2() throws Exception {
|
||||
IIdType pid1;
|
||||
IIdType pid2;
|
||||
{
|
||||
Patient patient = new Patient();
|
||||
patient.addName().setFamily("EXPIRE");
|
||||
pid1 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
Thread.sleep(10);
|
||||
{
|
||||
Patient patient = new Patient();
|
||||
patient.addName().setFamily("EXPIRE");
|
||||
pid2 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
Thread.sleep(10);
|
||||
|
||||
myDaoConfig.setExpireSearchResultsAfterMillis(1000L);
|
||||
myDaoConfig.setReuseCachedSearchResultsForMillis(500L);
|
||||
long start = System.currentTimeMillis();
|
||||
|
||||
final String searchUuid1;
|
||||
{
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
params.add(Patient.SP_FAMILY, new StringParam("EXPIRE"));
|
||||
final IBundleProvider bundleProvider = myPatientDao.search(params);
|
||||
assertThat(toUnqualifiedVersionlessIds(bundleProvider), containsInAnyOrder(pid1, pid2));
|
||||
searchUuid1 = bundleProvider.getUuid();
|
||||
Validate.notBlank(searchUuid1);
|
||||
}
|
||||
|
||||
String searchUuid2;
|
||||
{
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
params.add(Patient.SP_FAMILY, new StringParam("EXPIRE"));
|
||||
final IBundleProvider bundleProvider = myPatientDao.search(params);
|
||||
assertThat(toUnqualifiedVersionlessIds(bundleProvider), containsInAnyOrder(pid1, pid2));
|
||||
searchUuid2 = bundleProvider.getUuid();
|
||||
Validate.notBlank(searchUuid2);
|
||||
}
|
||||
assertEquals(searchUuid1, searchUuid2);
|
||||
|
||||
TestUtil.sleepAtLeast(501);
|
||||
|
||||
// We're now past 500ms so we shouldn't reuse the search
|
||||
|
||||
final String searchUuid3;
|
||||
{
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
params.add(Patient.SP_FAMILY, new StringParam("EXPIRE"));
|
||||
final IBundleProvider bundleProvider = myPatientDao.search(params);
|
||||
assertThat(toUnqualifiedVersionlessIds(bundleProvider), containsInAnyOrder(pid1, pid2));
|
||||
searchUuid3 = bundleProvider.getUuid();
|
||||
Validate.notBlank(searchUuid3);
|
||||
}
|
||||
assertNotEquals(searchUuid1, searchUuid3);
|
||||
|
||||
// Search just got used so it shouldn't be deleted
|
||||
|
||||
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
|
||||
final AtomicLong search3timestamp = new AtomicLong();
|
||||
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
Search search3 = mySearchEntityDao.findByUuid(searchUuid3);
|
||||
assertNotNull(search3);
|
||||
search3timestamp.set(search3.getCreated().getTime());
|
||||
}
|
||||
});
|
||||
|
||||
StaleSearchDeletingSvcImpl.setNowForUnitTests(search3timestamp.get() + 800);
|
||||
|
||||
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
|
||||
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
assertNotNull(mySearchEntityDao.findByUuid(searchUuid3));
|
||||
}
|
||||
});
|
||||
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
assertNull(mySearchEntityDao.findByUuid(searchUuid1));
|
||||
}
|
||||
});
|
||||
|
||||
StaleSearchDeletingSvcImpl.setNowForUnitTests(search3timestamp.get() + 1100);
|
||||
|
||||
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
|
||||
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
assertNull("Search 1 still exists", mySearchEntityDao.findByUuid(searchUuid1));
|
||||
assertNull("Search 3 still exists", mySearchEntityDao.findByUuid(searchUuid3));
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchPagesExpiryDisabled() throws Exception {
|
||||
{
|
||||
Patient patient = new Patient();
|
||||
patient.addName().setFamily("EXPIRE");
|
||||
myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
{
|
||||
Patient patient = new Patient();
|
||||
patient.addName().setFamily("EXPIRE");
|
||||
myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
|
||||
SearchParameterMap params;
|
||||
params = new SearchParameterMap();
|
||||
params.add(Patient.SP_FAMILY, new StringParam("EXPIRE"));
|
||||
params.setCount(1);
|
||||
final IBundleProvider bundleProvider = myPatientDao.search(params);
|
||||
|
||||
Search search = null;
|
||||
for (int i = 0; i < 100 && search == null; i++) {
|
||||
search = newTxTemplate().execute(new TransactionCallback<Search>() {
|
||||
@Nullable
|
||||
@Override
|
||||
public Search doInTransaction(TransactionStatus status) {
|
||||
return mySearchEntityDao.findByUuid(bundleProvider.getUuid());
|
||||
}
|
||||
});
|
||||
if (search == null) {
|
||||
TestUtil.sleepAtLeast(100);
|
||||
}
|
||||
}
|
||||
assertNotNull("Search " + bundleProvider.getUuid() + " not found on disk after 10 seconds", search);
|
||||
|
||||
|
||||
myDaoConfig.setExpireSearchResults(false);
|
||||
StaleSearchDeletingSvcImpl.setNowForUnitTests(System.currentTimeMillis() + DateUtils.MILLIS_PER_DAY);
|
||||
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
|
||||
|
||||
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
Search search = mySearchEntityDao.findByUuid(bundleProvider.getUuid());
|
||||
assertNotNull(search);
|
||||
}
|
||||
});
|
||||
|
||||
myDaoConfig.setExpireSearchResults(true);
|
||||
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
|
||||
|
||||
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
Search search = mySearchEntityDao.findByUuid(bundleProvider.getUuid());
|
||||
assertNull(search);
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
private void waitForSearchToSave(final String theUuid) {
|
||||
final ISearchDao searchEntityDao = mySearchEntityDao;
|
||||
TransactionTemplate txTemplate = newTxTemplate();
|
||||
FhirResourceDaoR4SearchPageExpiryTest.waitForSearchToSave(theUuid, searchEntityDao, txTemplate);
|
||||
}
|
||||
}
|
|
@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.dao.r4;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorService;
|
||||
import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider;
|
||||
import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor;
|
||||
import ca.uhn.fhir.jpa.config.TestR4Config;
|
||||
import ca.uhn.fhir.jpa.dao.*;
|
||||
|
@ -11,7 +12,6 @@ import ca.uhn.fhir.jpa.interceptor.PerformanceTracingLoggingInterceptor;
|
|||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider;
|
||||
import ca.uhn.fhir.jpa.provider.r4.JpaSystemProviderR4;
|
||||
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
|
||||
import ca.uhn.fhir.jpa.search.ISearchCoordinatorSvc;
|
||||
|
@ -21,7 +21,6 @@ import ca.uhn.fhir.jpa.search.warm.ICacheWarmingSvc;
|
|||
import ca.uhn.fhir.jpa.searchparam.registry.BaseSearchParamRegistry;
|
||||
import ca.uhn.fhir.jpa.subscription.module.cache.SubscriptionRegistry;
|
||||
import ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvcR4;
|
||||
import ca.uhn.fhir.jpa.util.ResourceCountCache;
|
||||
import ca.uhn.fhir.jpa.util.ResourceProviderFactory;
|
||||
|
@ -57,7 +56,6 @@ import org.springframework.beans.factory.annotation.Autowired;
|
|||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.TestPropertySource;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
import org.springframework.test.util.AopTestUtils;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
|
@ -251,12 +249,6 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
|
|||
@Autowired(required = false)
|
||||
protected IFulltextSearchSvc mySearchDao;
|
||||
@Autowired
|
||||
protected ISearchDao mySearchEntityDao;
|
||||
@Autowired
|
||||
protected ISearchResultDao mySearchResultDao;
|
||||
@Autowired
|
||||
protected ISearchIncludeDao mySearchIncludeDao;
|
||||
@Autowired
|
||||
protected IResourceReindexJobDao myResourceReindexJobDao;
|
||||
@Autowired
|
||||
@Qualifier("mySearchParameterDaoR4")
|
||||
|
|
|
@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.dao.r4;
|
|||
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
||||
import ca.uhn.fhir.jpa.model.entity.*;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
|
@ -55,6 +56,8 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
|
||||
@Autowired
|
||||
MatchUrlService myMatchUrlService;
|
||||
@Autowired
|
||||
private ISearchDao mySearchEntityDao;
|
||||
|
||||
@After
|
||||
public void afterResetSearchSize() {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener;
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
||||
import ca.uhn.fhir.jpa.model.entity.*;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap.EverythingModeEnum;
|
||||
|
@ -30,6 +30,7 @@ import org.hl7.fhir.r4.model.Observation.ObservationStatus;
|
|||
import org.hl7.fhir.r4.model.Subscription.SubscriptionChannelType;
|
||||
import org.hl7.fhir.r4.model.Subscription.SubscriptionStatus;
|
||||
import org.junit.*;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.TransactionStatus;
|
||||
import org.springframework.transaction.support.TransactionCallback;
|
||||
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
|
||||
|
@ -49,6 +50,8 @@ import static org.mockito.Mockito.mock;
|
|||
@SuppressWarnings({"unchecked", "Duplicates"})
|
||||
public class FhirResourceDaoR4SearchNoHashesTest extends BaseJpaR4Test {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoR4SearchNoHashesTest.class);
|
||||
@Autowired
|
||||
private ISearchDao mySearchEntityDao;
|
||||
|
||||
@After
|
||||
public void afterResetSearchSize() {
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
|
||||
|
@ -14,6 +16,7 @@ import ca.uhn.fhir.rest.param.ReferenceOrListParam;
|
|||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
|
@ -24,6 +27,7 @@ import org.junit.AfterClass;
|
|||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.springframework.aop.framework.AopProxyUtils;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.scheduling.concurrent.ThreadPoolExecutorFactoryBean;
|
||||
import org.springframework.test.context.TestPropertySource;
|
||||
|
||||
|
@ -48,6 +52,10 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoR4SearchOptimizedTest.class);
|
||||
private SearchCoordinatorSvcImpl mySearchCoordinatorSvcImpl;
|
||||
@Autowired
|
||||
private ISearchDao mySearchEntityDao;
|
||||
@Autowired
|
||||
private ISearchResultDao mySearchResultDao;
|
||||
|
||||
@Before
|
||||
public void before() {
|
||||
|
@ -288,7 +296,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
*/
|
||||
|
||||
runInTransaction(() -> {
|
||||
Search search = mySearchEntityDao.findByUuid(uuid);
|
||||
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
|
||||
assertEquals(200, search.getNumFound());
|
||||
assertEquals(search.getNumFound(), mySearchResultDao.count());
|
||||
assertNull(search.getTotalCount());
|
||||
|
@ -307,7 +315,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
* Search gets incremented twice as a part of loading the next batch
|
||||
*/
|
||||
runInTransaction(() -> {
|
||||
Search search = mySearchEntityDao.findByUuid(uuid);
|
||||
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
|
||||
assertEquals(SearchStatusEnum.FINISHED, search.getStatus());
|
||||
assertEquals(200, search.getNumFound());
|
||||
assertEquals(search.getNumFound(), mySearchResultDao.count());
|
||||
|
@ -343,7 +351,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
*/
|
||||
|
||||
runInTransaction(() -> {
|
||||
Search search = mySearchEntityDao.findByUuid(uuid);
|
||||
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
|
||||
assertEquals(20, search.getNumFound());
|
||||
assertEquals(search.getNumFound(), mySearchResultDao.count());
|
||||
assertNull(search.getTotalCount());
|
||||
|
@ -367,7 +375,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
* Search should be untouched
|
||||
*/
|
||||
runInTransaction(() -> {
|
||||
Search search = mySearchEntityDao.findByUuid(uuid);
|
||||
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
|
||||
assertEquals(1, search.getVersion().intValue());
|
||||
});
|
||||
|
||||
|
@ -383,7 +391,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
* Search gets incremented twice as a part of loading the next batch
|
||||
*/
|
||||
runInTransaction(() -> {
|
||||
Search search = mySearchEntityDao.findByUuid(uuid);
|
||||
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
|
||||
assertEquals(SearchStatusEnum.PASSCMPLET, search.getStatus());
|
||||
assertEquals(50, search.getNumFound());
|
||||
assertEquals(search.getNumFound(), mySearchResultDao.count());
|
||||
|
@ -407,7 +415,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
* Search should be untouched
|
||||
*/
|
||||
runInTransaction(() -> {
|
||||
Search search = mySearchEntityDao.findByUuid(uuid);
|
||||
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
|
||||
assertEquals(3, search.getVersion().intValue());
|
||||
});
|
||||
|
||||
|
@ -423,7 +431,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
* Search gets incremented twice as a part of loading the next batch
|
||||
*/
|
||||
runInTransaction(() -> {
|
||||
Search search = mySearchEntityDao.findByUuid(uuid);
|
||||
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
|
||||
assertEquals(190, search.getNumFound());
|
||||
assertEquals(search.getNumFound(), mySearchResultDao.count());
|
||||
assertEquals(190, search.getTotalCount().intValue());
|
||||
|
@ -470,7 +478,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
*/
|
||||
|
||||
runInTransaction(() -> {
|
||||
Search search = mySearchEntityDao.findByUuid(uuid);
|
||||
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
|
||||
assertEquals(50, search.getNumFound());
|
||||
assertEquals(search.getNumFound(), mySearchResultDao.count());
|
||||
assertEquals(null, search.getTotalCount());
|
||||
|
@ -505,7 +513,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
*/
|
||||
|
||||
runInTransaction(() -> {
|
||||
Search search = mySearchEntityDao.findByUuid(uuid);
|
||||
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
|
||||
assertEquals(20, search.getNumFound());
|
||||
assertEquals(search.getNumFound(), mySearchResultDao.count());
|
||||
assertNull(search.getTotalCount());
|
||||
|
@ -529,7 +537,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
* Search should be untouched
|
||||
*/
|
||||
runInTransaction(() -> {
|
||||
Search search = mySearchEntityDao.findByUuid(uuid);
|
||||
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
|
||||
assertEquals(200, search.getNumFound());
|
||||
assertEquals(search.getNumFound(), mySearchResultDao.count());
|
||||
assertEquals(200, search.getTotalCount().intValue());
|
||||
|
@ -562,9 +570,9 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
* 20 should be prefetched since that's the initial page size
|
||||
*/
|
||||
|
||||
waitForSize(20, () -> runInTransaction(() -> mySearchEntityDao.findByUuid(uuid).getNumFound()));
|
||||
waitForSize(20, () -> runInTransaction(() -> mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException("")).getNumFound()));
|
||||
runInTransaction(() -> {
|
||||
Search search = mySearchEntityDao.findByUuid(uuid);
|
||||
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
|
||||
assertEquals(20, search.getNumFound());
|
||||
assertEquals(search.getNumFound(), mySearchResultDao.count());
|
||||
assertNull(search.getTotalCount());
|
||||
|
@ -625,7 +633,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
assertEquals(1, ids.size());
|
||||
|
||||
runInTransaction(() -> {
|
||||
Search search = mySearchEntityDao.findByUuid(uuid);
|
||||
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
|
||||
assertEquals(SearchStatusEnum.FINISHED, search.getStatus());
|
||||
assertEquals(1, search.getNumFound());
|
||||
assertEquals(search.getNumFound(), mySearchResultDao.count());
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
|
||||
import ca.uhn.fhir.jpa.search.StaleSearchDeletingSvcImpl;
|
||||
import ca.uhn.fhir.jpa.search.cache.DatabaseSearchCacheSvcImpl;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.util.TestUtil;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
@ -20,6 +21,7 @@ import org.junit.Before;
|
|||
import org.junit.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.test.util.AopTestUtils;
|
||||
import org.springframework.transaction.TransactionStatus;
|
||||
import org.springframework.transaction.support.TransactionCallback;
|
||||
|
@ -30,6 +32,7 @@ import javax.annotation.Nullable;
|
|||
import java.util.Date;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import static ca.uhn.fhir.jpa.search.cache.DatabaseSearchCacheSvcImpl.DEFAULT_CUTOFF_SLACK;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
|
@ -37,16 +40,19 @@ import static org.junit.Assert.*;
|
|||
public class FhirResourceDaoR4SearchPageExpiryTest extends BaseJpaR4Test {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(FhirResourceDaoR4SearchPageExpiryTest.class);
|
||||
|
||||
@Autowired
|
||||
private ISearchDao mySearchEntityDao;
|
||||
|
||||
@After()
|
||||
public void after() {
|
||||
StaleSearchDeletingSvcImpl staleSearchDeletingSvc = AopTestUtils.getTargetObject(myStaleSearchDeletingSvc);
|
||||
staleSearchDeletingSvc.setCutoffSlackForUnitTest(StaleSearchDeletingSvcImpl.DEFAULT_CUTOFF_SLACK);
|
||||
StaleSearchDeletingSvcImpl.setNowForUnitTests(null);
|
||||
DatabaseSearchCacheSvcImpl staleSearchDeletingSvc = AopTestUtils.getTargetObject(mySearchCacheSvc);
|
||||
staleSearchDeletingSvc.setCutoffSlackForUnitTest(DEFAULT_CUTOFF_SLACK);
|
||||
DatabaseSearchCacheSvcImpl.setNowForUnitTests(null);
|
||||
}
|
||||
|
||||
@Before
|
||||
public void before() {
|
||||
StaleSearchDeletingSvcImpl staleSearchDeletingSvc = AopTestUtils.getTargetObject(myStaleSearchDeletingSvc);
|
||||
DatabaseSearchCacheSvcImpl staleSearchDeletingSvc = AopTestUtils.getTargetObject(mySearchCacheSvc);
|
||||
staleSearchDeletingSvc.setCutoffSlackForUnitTest(0);
|
||||
myDaoConfig.setCountSearchResultsUpTo(new DaoConfig().getCountSearchResultsUpTo());
|
||||
}
|
||||
|
@ -77,7 +83,7 @@ public class FhirResourceDaoR4SearchPageExpiryTest extends BaseJpaR4Test {
|
|||
myDaoConfig.setExpireSearchResultsAfterMillis(1000L);
|
||||
myDaoConfig.setReuseCachedSearchResultsForMillis(500L);
|
||||
long start = System.currentTimeMillis();
|
||||
StaleSearchDeletingSvcImpl.setNowForUnitTests(start);
|
||||
DatabaseSearchCacheSvcImpl.setNowForUnitTests(start);
|
||||
|
||||
final String searchUuid1;
|
||||
{
|
||||
|
@ -117,53 +123,53 @@ public class FhirResourceDaoR4SearchPageExpiryTest extends BaseJpaR4Test {
|
|||
|
||||
// Search just got used so it shouldn't be deleted
|
||||
|
||||
StaleSearchDeletingSvcImpl.setNowForUnitTests(start + 500);
|
||||
DatabaseSearchCacheSvcImpl.setNowForUnitTests(start + 500);
|
||||
final AtomicLong search3timestamp = new AtomicLong();
|
||||
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
Search search3 = mySearchEntityDao.findByUuid(searchUuid3);
|
||||
Search search3 = mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid3).orElseThrow(()->new InternalErrorException("Search doesn't exist"));
|
||||
assertNotNull(search3);
|
||||
Search search2 = mySearchEntityDao.findByUuid(searchUuid2);
|
||||
Search search2 = mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid2).orElseThrow(()->new InternalErrorException("Search doesn't exist"));
|
||||
assertNotNull(search2);
|
||||
search3timestamp.set(search2.getSearchLastReturned().getTime());
|
||||
}
|
||||
});
|
||||
|
||||
StaleSearchDeletingSvcImpl.setNowForUnitTests(search3timestamp.get() + 800);
|
||||
DatabaseSearchCacheSvcImpl.setNowForUnitTests(search3timestamp.get() + 800);
|
||||
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
|
||||
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
assertNotNull(mySearchEntityDao.findByUuid(searchUuid3));
|
||||
assertNotNull(mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid3));
|
||||
}
|
||||
});
|
||||
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
assertNotNull(mySearchEntityDao.findByUuid(searchUuid1));
|
||||
assertNotNull(mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid1));
|
||||
}
|
||||
});
|
||||
|
||||
StaleSearchDeletingSvcImpl.setNowForUnitTests(search3timestamp.get() + 1100);
|
||||
DatabaseSearchCacheSvcImpl.setNowForUnitTests(search3timestamp.get() + 1100);
|
||||
|
||||
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
|
||||
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
assertNull("Search 1 still exists", mySearchEntityDao.findByUuid(searchUuid1));
|
||||
assertNotNull("Search 3 still exists", mySearchEntityDao.findByUuid(searchUuid3));
|
||||
assertFalse("Search 1 still exists", mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid1).isPresent());
|
||||
assertTrue("Search 3 still exists", mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid3).isPresent());
|
||||
}
|
||||
});
|
||||
|
||||
StaleSearchDeletingSvcImpl.setNowForUnitTests(search3timestamp.get() + 2100);
|
||||
DatabaseSearchCacheSvcImpl.setNowForUnitTests(search3timestamp.get() + 2100);
|
||||
|
||||
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
|
||||
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
assertNull("Search 1 still exists", mySearchEntityDao.findByUuid(searchUuid1));
|
||||
assertNull("Search 3 still exists", mySearchEntityDao.findByUuid(searchUuid3));
|
||||
assertFalse("Search 1 still exists", mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid1).isPresent());
|
||||
assertFalse("Search 3 still exists", mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid3).isPresent());
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -202,7 +208,7 @@ public class FhirResourceDaoR4SearchPageExpiryTest extends BaseJpaR4Test {
|
|||
txTemplate.execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
Search search = mySearchEntityDao.findByUuid(bundleProvider.getUuid());
|
||||
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(bundleProvider.getUuid()).orElseThrow(()->new InternalErrorException("Search doesn't exist"));
|
||||
assertNotNull("Failed after " + sw.toString(), search);
|
||||
start.set(search.getCreated().getTime());
|
||||
ourLog.info("Search was created: {}", new InstantType(new Date(start.get())));
|
||||
|
@ -211,21 +217,21 @@ public class FhirResourceDaoR4SearchPageExpiryTest extends BaseJpaR4Test {
|
|||
|
||||
myDaoConfig.setExpireSearchResultsAfterMillis(500);
|
||||
myDaoConfig.setReuseCachedSearchResultsForMillis(500L);
|
||||
StaleSearchDeletingSvcImpl.setNowForUnitTests(start.get() + 499);
|
||||
DatabaseSearchCacheSvcImpl.setNowForUnitTests(start.get() + 499);
|
||||
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
|
||||
txTemplate.execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
assertNotNull(mySearchEntityDao.findByUuid(bundleProvider.getUuid()));
|
||||
assertNotNull(mySearchEntityDao.findByUuidAndFetchIncludes(bundleProvider.getUuid()));
|
||||
}
|
||||
});
|
||||
|
||||
StaleSearchDeletingSvcImpl.setNowForUnitTests(start.get() + 600);
|
||||
DatabaseSearchCacheSvcImpl.setNowForUnitTests(start.get() + 600);
|
||||
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
|
||||
txTemplate.execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
assertNull(mySearchEntityDao.findByUuid(bundleProvider.getUuid()));
|
||||
assertFalse(mySearchEntityDao.findByUuidAndFetchIncludes(bundleProvider.getUuid()).isPresent());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -296,36 +302,36 @@ public class FhirResourceDaoR4SearchPageExpiryTest extends BaseJpaR4Test {
|
|||
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
Search search3 = mySearchEntityDao.findByUuid(searchUuid3);
|
||||
Search search3 = mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid3).orElseThrow(()->new InternalErrorException("Search doesn't exist"));
|
||||
assertNotNull(search3);
|
||||
search3timestamp.set(search3.getCreated().getTime());
|
||||
}
|
||||
});
|
||||
|
||||
StaleSearchDeletingSvcImpl.setNowForUnitTests(search3timestamp.get() + 800);
|
||||
DatabaseSearchCacheSvcImpl.setNowForUnitTests(search3timestamp.get() + 800);
|
||||
|
||||
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
|
||||
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
assertNotNull(mySearchEntityDao.findByUuid(searchUuid3));
|
||||
assertNotNull(mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid3));
|
||||
}
|
||||
});
|
||||
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
assertNull(mySearchEntityDao.findByUuid(searchUuid1));
|
||||
assertFalse(mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid1).isPresent());
|
||||
}
|
||||
});
|
||||
|
||||
StaleSearchDeletingSvcImpl.setNowForUnitTests(search3timestamp.get() + 1100);
|
||||
DatabaseSearchCacheSvcImpl.setNowForUnitTests(search3timestamp.get() + 1100);
|
||||
|
||||
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
|
||||
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
assertNull("Search 1 still exists", mySearchEntityDao.findByUuid(searchUuid1));
|
||||
assertNull("Search 3 still exists", mySearchEntityDao.findByUuid(searchUuid3));
|
||||
assertFalse("Search 1 still exists", mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid1).isPresent());
|
||||
assertFalse("Search 3 still exists", mySearchEntityDao.findByUuidAndFetchIncludes(searchUuid3).isPresent());
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -356,7 +362,7 @@ public class FhirResourceDaoR4SearchPageExpiryTest extends BaseJpaR4Test {
|
|||
@Nullable
|
||||
@Override
|
||||
public Search doInTransaction(TransactionStatus status) {
|
||||
return mySearchEntityDao.findByUuid(bundleProvider.getUuid());
|
||||
return mySearchEntityDao.findByUuidAndFetchIncludes(bundleProvider.getUuid()).orElse(null);
|
||||
}
|
||||
});
|
||||
if (search == null) {
|
||||
|
@ -367,13 +373,13 @@ public class FhirResourceDaoR4SearchPageExpiryTest extends BaseJpaR4Test {
|
|||
|
||||
|
||||
myDaoConfig.setExpireSearchResults(false);
|
||||
StaleSearchDeletingSvcImpl.setNowForUnitTests(System.currentTimeMillis() + DateUtils.MILLIS_PER_DAY);
|
||||
DatabaseSearchCacheSvcImpl.setNowForUnitTests(System.currentTimeMillis() + DateUtils.MILLIS_PER_DAY);
|
||||
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
|
||||
|
||||
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
Search search = mySearchEntityDao.findByUuid(bundleProvider.getUuid());
|
||||
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(bundleProvider.getUuid()).orElseThrow(()->new InternalErrorException("Search doesn't exist"));
|
||||
assertNotNull(search);
|
||||
}
|
||||
});
|
||||
|
@ -386,7 +392,7 @@ public class FhirResourceDaoR4SearchPageExpiryTest extends BaseJpaR4Test {
|
|||
newTxTemplate().execute(new TransactionCallbackWithoutResult() {
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
Search search = mySearchEntityDao.findByUuid(bundleProvider.getUuid());
|
||||
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(bundleProvider.getUuid()).orElse(null);
|
||||
assertNull(search);
|
||||
}
|
||||
});
|
||||
|
@ -405,7 +411,7 @@ public class FhirResourceDaoR4SearchPageExpiryTest extends BaseJpaR4Test {
|
|||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||
Search search = null;
|
||||
for (int i = 0; i < 20 && search == null; i++) {
|
||||
search = theSearchEntityDao.findByUuid(theUuid);
|
||||
search = theSearchEntityDao.findByUuidAndFetchIncludes(theUuid).orElse(null);
|
||||
if (search == null || search.getStatus() == SearchStatusEnum.LOADING) {
|
||||
TestUtil.sleepAtLeast(100);
|
||||
}
|
||||
|
|
|
@ -3860,7 +3860,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
|
|||
search.setStatus(SearchStatusEnum.FAILED);
|
||||
search.setFailureCode(500);
|
||||
search.setFailureMessage("FOO");
|
||||
mySearchEntityDao.save(search);
|
||||
mySearchCacheSvc.save(search);
|
||||
});
|
||||
|
||||
IBundleProvider results = myEncounterDao.search(map);
|
||||
|
|
|
@ -249,12 +249,6 @@ public abstract class BaseJpaR5Test extends BaseJpaTest {
|
|||
@Autowired(required = false)
|
||||
protected IFulltextSearchSvc mySearchDao;
|
||||
@Autowired
|
||||
protected ISearchDao mySearchEntityDao;
|
||||
@Autowired
|
||||
protected ISearchResultDao mySearchResultDao;
|
||||
@Autowired
|
||||
protected ISearchIncludeDao mySearchIncludeDao;
|
||||
@Autowired
|
||||
protected IResourceReindexJobDao myResourceReindexJobDao;
|
||||
@Autowired
|
||||
@Qualifier("mySearchParameterDaoR5")
|
||||
|
|
|
@ -13,6 +13,7 @@ import ca.uhn.fhir.rest.client.api.IGenericClient;
|
|||
import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum;
|
||||
import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
|
||||
import ca.uhn.fhir.rest.server.RestfulServer;
|
||||
import ca.uhn.fhir.test.utilities.JettyUtil;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
|
@ -36,8 +37,6 @@ import java.util.concurrent.TimeUnit;
|
|||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
import ca.uhn.fhir.test.utilities.JettyUtil;
|
||||
|
||||
public abstract class BaseResourceProviderDstu2Test extends BaseJpaDstu2Test {
|
||||
|
||||
protected static IGenericClient ourClient;
|
||||
|
@ -61,7 +60,7 @@ public abstract class BaseResourceProviderDstu2Test extends BaseJpaDstu2Test {
|
|||
myFhirCtx.getRestfulClientFactory().setServerValidationMode(ServerValidationModeEnum.ONCE);
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
@Before
|
||||
public void before() throws Exception {
|
||||
myFhirCtx.getRestfulClientFactory().setServerValidationMode(ServerValidationModeEnum.NEVER);
|
||||
|
@ -82,7 +81,7 @@ public abstract class BaseResourceProviderDstu2Test extends BaseJpaDstu2Test {
|
|||
ourConnectionPoolSize = myAppCtx.getBean("maxDatabaseThreadsForTest", Integer.class);
|
||||
ourRestServer.setPagingProvider(ourPagingProvider);
|
||||
|
||||
Server server = new Server(ourPort);
|
||||
Server server = new Server(0);
|
||||
|
||||
ServletContextHandler proxyHandler = new ServletContextHandler();
|
||||
proxyHandler.setContextPath("/");
|
||||
|
@ -103,9 +102,7 @@ public abstract class BaseResourceProviderDstu2Test extends BaseJpaDstu2Test {
|
|||
dispatcherServlet.setContextClass(AnnotationConfigWebApplicationContext.class);
|
||||
ServletHolder subsServletHolder = new ServletHolder();
|
||||
subsServletHolder.setServlet(dispatcherServlet);
|
||||
subsServletHolder.setInitParameter(
|
||||
ContextLoader.CONFIG_LOCATION_PARAM,
|
||||
WebsocketDispatcherConfig.class.getName());
|
||||
subsServletHolder.setInitParameter(ContextLoader.CONFIG_LOCATION_PARAM, WebsocketDispatcherConfig.class.getName());
|
||||
proxyHandler.addServlet(subsServletHolder, "/*");
|
||||
|
||||
|
||||
|
|
|
@ -44,9 +44,11 @@ import static org.hamcrest.Matchers.startsWith;
|
|||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertThat;
|
||||
|
||||
public class GraphQLR4ProviderTest {
|
||||
public class JpaGraphQLR4ProviderTest {
|
||||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(GraphQLR4ProviderTest.class);
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(JpaGraphQLR4ProviderTest.class);
|
||||
public static final String DATA_PREFIX = "{\"data\": ";
|
||||
public static final String DATA_SUFFIX = "}";
|
||||
private static CloseableHttpClient ourClient;
|
||||
private static FhirContext ourCtx = FhirContext.forR4();
|
||||
private static int ourPort;
|
||||
|
@ -67,14 +69,14 @@ public class GraphQLR4ProviderTest {
|
|||
ourLog.info(responseContent);
|
||||
assertEquals(200, status.getStatusLine().getStatusCode());
|
||||
|
||||
assertEquals(TestUtil.stripReturns("{\n" +
|
||||
assertEquals(TestUtil.stripWhitespace(DATA_PREFIX + "{\n" +
|
||||
" \"name\":[{\n" +
|
||||
" \"family\":\"FAMILY\",\n" +
|
||||
" \"given\":[\"GIVEN1\",\"GIVEN2\"]\n" +
|
||||
" },{\n" +
|
||||
" \"given\":[\"GivenOnly1\",\"GivenOnly2\"]\n" +
|
||||
" }]\n" +
|
||||
"}"), TestUtil.stripReturns(responseContent));
|
||||
"}" + DATA_SUFFIX), TestUtil.stripWhitespace(responseContent));
|
||||
assertThat(status.getFirstHeader(Constants.HEADER_CONTENT_TYPE).getValue(), startsWith("application/json"));
|
||||
|
||||
} finally {
|
||||
|
@ -93,12 +95,12 @@ public class GraphQLR4ProviderTest {
|
|||
ourLog.info(responseContent);
|
||||
assertEquals(200, status.getStatusLine().getStatusCode());
|
||||
|
||||
assertEquals(TestUtil.stripReturns("{\n" +
|
||||
assertEquals(TestUtil.stripWhitespace(DATA_PREFIX + "{\n" +
|
||||
" \"name\":[{\n" +
|
||||
" \"given\":[\"GIVEN1\",\"GIVEN2\"],\n" +
|
||||
" \"family\":\"FAMILY\"\n" +
|
||||
" }]\n" +
|
||||
"}"), TestUtil.stripReturns(responseContent));
|
||||
"}" + DATA_SUFFIX), TestUtil.stripWhitespace(responseContent));
|
||||
assertThat(status.getFirstHeader(Constants.HEADER_CONTENT_TYPE).getValue(), startsWith("application/json"));
|
||||
|
||||
} finally {
|
||||
|
@ -117,7 +119,7 @@ public class GraphQLR4ProviderTest {
|
|||
ourLog.info(responseContent);
|
||||
assertEquals(200, status.getStatusLine().getStatusCode());
|
||||
|
||||
assertEquals(TestUtil.stripReturns("{\n" +
|
||||
assertEquals(TestUtil.stripWhitespace(DATA_PREFIX + "{\n" +
|
||||
" \"Patient\":{\n" +
|
||||
" \"name\":[{\n" +
|
||||
" \"given\":[\"GIVEN1\",\"GIVEN2\"],\n" +
|
||||
|
@ -126,7 +128,7 @@ public class GraphQLR4ProviderTest {
|
|||
" \"given\":[\"GivenOnly1\",\"GivenOnly2\"]\n" +
|
||||
" }]\n" +
|
||||
" }\n" +
|
||||
"}"), TestUtil.stripReturns(responseContent));
|
||||
"}" + DATA_SUFFIX), TestUtil.stripWhitespace(responseContent));
|
||||
assertThat(status.getFirstHeader(Constants.HEADER_CONTENT_TYPE).getValue(), startsWith("application/json"));
|
||||
|
||||
} finally {
|
||||
|
@ -145,7 +147,7 @@ public class GraphQLR4ProviderTest {
|
|||
ourLog.info(responseContent);
|
||||
assertEquals(200, status.getStatusLine().getStatusCode());
|
||||
|
||||
assertEquals(TestUtil.stripReturns("{\n" +
|
||||
assertEquals(TestUtil.stripWhitespace(DATA_PREFIX + "{\n" +
|
||||
" \"PatientList\":[{\n" +
|
||||
" \"name\":[{\n" +
|
||||
" \"family\":\"pet\",\n" +
|
||||
|
@ -158,7 +160,7 @@ public class GraphQLR4ProviderTest {
|
|||
" \"given\":[\"GivenOnlyB1\",\"GivenOnlyB2\"]\n" +
|
||||
" }]\n" +
|
||||
" }]\n" +
|
||||
"}"), TestUtil.stripReturns(responseContent));
|
||||
"}" + DATA_SUFFIX), TestUtil.stripWhitespace(responseContent));
|
||||
assertThat(status.getFirstHeader(Constants.HEADER_CONTENT_TYPE).getValue(), startsWith("application/json"));
|
||||
|
||||
} finally {
|
|
@ -1,7 +1,6 @@
|
|||
package ca.uhn.fhir.jpa.provider.dstu3;
|
||||
|
||||
import ca.uhn.fhir.jpa.config.WebsocketDispatcherConfig;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
||||
import ca.uhn.fhir.jpa.dao.dstu3.BaseJpaDstu3Test;
|
||||
import ca.uhn.fhir.jpa.provider.GraphQLProvider;
|
||||
import ca.uhn.fhir.jpa.provider.SubscriptionTriggeringProvider;
|
||||
|
@ -18,6 +17,7 @@ import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum;
|
|||
import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
|
||||
import ca.uhn.fhir.rest.server.RestfulServer;
|
||||
import ca.uhn.fhir.rest.server.interceptor.CorsInterceptor;
|
||||
import ca.uhn.fhir.test.utilities.JettyUtil;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
|
@ -48,8 +48,6 @@ import java.util.concurrent.TimeUnit;
|
|||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
import ca.uhn.fhir.test.utilities.JettyUtil;
|
||||
|
||||
|
||||
public abstract class BaseResourceProviderDstu3Test extends BaseJpaDstu3Test {
|
||||
|
||||
|
@ -62,7 +60,6 @@ public abstract class BaseResourceProviderDstu3Test extends BaseJpaDstu3Test {
|
|||
protected static GenericWebApplicationContext ourWebApplicationContext;
|
||||
protected static SearchParamRegistryDstu3 ourSearchParamRegistry;
|
||||
protected static DatabaseBackedPagingProvider ourPagingProvider;
|
||||
protected static ISearchDao mySearchEntityDao;
|
||||
protected static ISearchCoordinatorSvc ourSearchCoordinatorSvc;
|
||||
private static Server ourServer;
|
||||
protected static SubscriptionTriggeringProvider ourSubscriptionTriggeringProvider;
|
||||
|
@ -156,7 +153,6 @@ public abstract class BaseResourceProviderDstu3Test extends BaseJpaDstu3Test {
|
|||
WebApplicationContext wac = WebApplicationContextUtils.getWebApplicationContext(subsServletHolder.getServlet().getServletConfig().getServletContext());
|
||||
myValidationSupport = wac.getBean(JpaValidationSupportChainDstu3.class);
|
||||
ourSearchCoordinatorSvc = wac.getBean(ISearchCoordinatorSvc.class);
|
||||
mySearchEntityDao = wac.getBean(ISearchDao.class);
|
||||
ourSearchParamRegistry = wac.getBean(SearchParamRegistryDstu3.class);
|
||||
ourSubscriptionTriggeringProvider = wac.getBean(SubscriptionTriggeringProvider.class);
|
||||
|
||||
|
|
|
@ -5,8 +5,8 @@ import ca.uhn.fhir.util.UrlUtil;
|
|||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.dstu3.model.Patient;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.junit.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -14,6 +14,8 @@ import org.slf4j.LoggerFactory;
|
|||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import static ca.uhn.fhir.jpa.provider.JpaGraphQLR4ProviderTest.DATA_PREFIX;
|
||||
import static ca.uhn.fhir.jpa.provider.JpaGraphQLR4ProviderTest.DATA_SUFFIX;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
public class GraphQLProviderDstu3Test extends BaseResourceProviderDstu3Test {
|
||||
|
@ -30,14 +32,14 @@ public class GraphQLProviderDstu3Test extends BaseResourceProviderDstu3Test {
|
|||
try (CloseableHttpResponse response = ourHttpClient.execute(httpGet)) {
|
||||
String resp = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
ourLog.info(resp);
|
||||
assertEquals(TestUtil.stripReturns("{\n" +
|
||||
assertEquals(TestUtil.stripWhitespace(DATA_PREFIX +"{\n" +
|
||||
" \"name\":[{\n" +
|
||||
" \"family\":\"FAM\",\n" +
|
||||
" \"given\":[\"GIVEN1\",\"GIVEN2\"]\n" +
|
||||
" },{\n" +
|
||||
" \"given\":[\"GivenOnly1\",\"GivenOnly2\"]\n" +
|
||||
" }]\n" +
|
||||
"}"), TestUtil.stripReturns(resp));
|
||||
"}" + DATA_SUFFIX), TestUtil.stripWhitespace(resp));
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -52,7 +54,7 @@ public class GraphQLProviderDstu3Test extends BaseResourceProviderDstu3Test {
|
|||
try (CloseableHttpResponse response = ourHttpClient.execute(httpGet)) {
|
||||
String resp = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
ourLog.info(resp);
|
||||
assertEquals(TestUtil.stripReturns("{\n" +
|
||||
assertEquals(TestUtil.stripWhitespace(DATA_PREFIX +"{\n" +
|
||||
" \"PatientList\":[{\n" +
|
||||
" \"name\":[{\n" +
|
||||
" \"family\":\"FAM\",\n" +
|
||||
|
@ -65,7 +67,7 @@ public class GraphQLProviderDstu3Test extends BaseResourceProviderDstu3Test {
|
|||
" \"given\":[\"GivenOnlyB1\",\"GivenOnlyB2\"]\n" +
|
||||
" }]\n" +
|
||||
" }]\n" +
|
||||
"}"), TestUtil.stripReturns(resp));
|
||||
"}" + DATA_SUFFIX), TestUtil.stripWhitespace(resp));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package ca.uhn.fhir.jpa.provider.dstu3;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.provider.r4.ResourceProviderR4Test;
|
||||
import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl;
|
||||
|
@ -18,10 +19,7 @@ import ca.uhn.fhir.rest.client.api.IHttpRequest;
|
|||
import ca.uhn.fhir.rest.client.api.IHttpResponse;
|
||||
import ca.uhn.fhir.rest.gclient.StringClientParam;
|
||||
import ca.uhn.fhir.rest.param.*;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.*;
|
||||
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
|
@ -51,7 +49,11 @@ import org.hl7.fhir.dstu3.model.Subscription.SubscriptionStatus;
|
|||
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.junit.*;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.test.util.AopTestUtils;
|
||||
import org.springframework.transaction.TransactionStatus;
|
||||
import org.springframework.transaction.support.TransactionCallback;
|
||||
|
@ -74,6 +76,8 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
|
|||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResourceProviderDstu3Test.class);
|
||||
private SearchCoordinatorSvcImpl mySearchCoordinatorSvcRaw;
|
||||
@Autowired
|
||||
private ISearchDao mySearchEntityDao;
|
||||
|
||||
@Override
|
||||
@After
|
||||
|
@ -2969,12 +2973,13 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
|
|||
.count(5)
|
||||
.returnBundle(Bundle.class)
|
||||
.execute();
|
||||
mySearchCacheSvc.flushLastUpdated();
|
||||
|
||||
final String uuid1 = toSearchUuidFromLinkNext(result1);
|
||||
Search search1 = newTxTemplate().execute(new TransactionCallback<Search>() {
|
||||
@Override
|
||||
public Search doInTransaction(TransactionStatus theStatus) {
|
||||
return mySearchEntityDao.findByUuid(uuid1);
|
||||
return mySearchEntityDao.findByUuidAndFetchIncludes(uuid1).orElseThrow(() -> new InternalErrorException(""));
|
||||
}
|
||||
});
|
||||
Date lastReturned1 = search1.getSearchLastReturned();
|
||||
|
@ -2986,12 +2991,13 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
|
|||
.count(5)
|
||||
.returnBundle(Bundle.class)
|
||||
.execute();
|
||||
mySearchCacheSvc.flushLastUpdated();
|
||||
|
||||
final String uuid2 = toSearchUuidFromLinkNext(result2);
|
||||
Search search2 = newTxTemplate().execute(new TransactionCallback<Search>() {
|
||||
@Override
|
||||
public Search doInTransaction(TransactionStatus theStatus) {
|
||||
return mySearchEntityDao.findByUuid(uuid2);
|
||||
return mySearchEntityDao.findByUuidAndFetchIncludes(uuid2).orElseThrow(() -> new InternalErrorException(""));
|
||||
}
|
||||
});
|
||||
Date lastReturned2 = search2.getSearchLastReturned();
|
||||
|
@ -3031,14 +3037,10 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
|
|||
.forResource("Organization")
|
||||
.returnBundle(Bundle.class)
|
||||
.execute();
|
||||
mySearchCacheSvc.flushLastUpdated();
|
||||
|
||||
final String uuid1 = toSearchUuidFromLinkNext(result1);
|
||||
Search search1 = newTxTemplate().execute(new TransactionCallback<Search>() {
|
||||
@Override
|
||||
public Search doInTransaction(TransactionStatus theStatus) {
|
||||
return mySearchEntityDao.findByUuid(uuid1);
|
||||
}
|
||||
});
|
||||
Search search1 = newTxTemplate().execute(theStatus -> mySearchEntityDao.findByUuidAndFetchIncludes(uuid1).orElseThrow(() -> new InternalErrorException("")));
|
||||
Date lastReturned1 = search1.getSearchLastReturned();
|
||||
|
||||
Bundle result2 = ourClient
|
||||
|
@ -3046,14 +3048,10 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
|
|||
.forResource("Organization")
|
||||
.returnBundle(Bundle.class)
|
||||
.execute();
|
||||
mySearchCacheSvc.flushLastUpdated();
|
||||
|
||||
final String uuid2 = toSearchUuidFromLinkNext(result2);
|
||||
Search search2 = newTxTemplate().execute(new TransactionCallback<Search>() {
|
||||
@Override
|
||||
public Search doInTransaction(TransactionStatus theStatus) {
|
||||
return mySearchEntityDao.findByUuid(uuid2);
|
||||
}
|
||||
});
|
||||
Search search2 = newTxTemplate().execute(theStatus -> mySearchEntityDao.findByUuidAndFetchIncludes(uuid2).orElseThrow(() -> new InternalErrorException("")));
|
||||
Date lastReturned2 = search2.getSearchLastReturned();
|
||||
|
||||
assertTrue(lastReturned2.getTime() > lastReturned1.getTime());
|
||||
|
|
|
@ -1,97 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.provider.dstu3;
|
||||
|
||||
import static org.hamcrest.Matchers.blankOrNullString;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import org.hl7.fhir.dstu3.model.Bundle;
|
||||
import org.hl7.fhir.dstu3.model.Bundle.BundleLinkComponent;
|
||||
import org.hl7.fhir.dstu3.model.Patient;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.springframework.test.util.AopTestUtils;
|
||||
|
||||
import ca.uhn.fhir.jpa.search.StaleSearchDeletingSvcImpl;
|
||||
import ca.uhn.fhir.rest.gclient.IClientExecutable;
|
||||
import ca.uhn.fhir.rest.gclient.IQuery;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
|
||||
public class StaleSearchDeletingSvcDstu3Test extends BaseResourceProviderDstu3Test {
|
||||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(StaleSearchDeletingSvcDstu3Test.class);
|
||||
|
||||
@AfterClass
|
||||
public static void afterClassClearContext() {
|
||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||
}
|
||||
|
||||
|
||||
@After()
|
||||
public void after() throws Exception {
|
||||
super.after();
|
||||
StaleSearchDeletingSvcImpl staleSearchDeletingSvc = AopTestUtils.getTargetObject(myStaleSearchDeletingSvc);
|
||||
staleSearchDeletingSvc.setCutoffSlackForUnitTest(StaleSearchDeletingSvcImpl.DEFAULT_CUTOFF_SLACK);
|
||||
}
|
||||
|
||||
@Before
|
||||
public void before() throws Exception {
|
||||
super.before();
|
||||
StaleSearchDeletingSvcImpl staleSearchDeletingSvc = AopTestUtils.getTargetObject(myStaleSearchDeletingSvc);
|
||||
staleSearchDeletingSvc.setCutoffSlackForUnitTest(0);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEverythingInstanceWithContentFilter() throws Exception {
|
||||
|
||||
for (int i = 0; i < 20; i++) {
|
||||
Patient pt1 = new Patient();
|
||||
pt1.addName().setFamily("Everything").addGiven("Arthur");
|
||||
myPatientDao.create(pt1, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
|
||||
//@formatter:off
|
||||
IClientExecutable<IQuery<Bundle>, Bundle> search = ourClient
|
||||
.search()
|
||||
.forResource(Patient.class)
|
||||
.where(Patient.NAME.matches().value("Everything"))
|
||||
.returnBundle(Bundle.class);
|
||||
//@formatter:on
|
||||
|
||||
Bundle resp1 = search.execute();
|
||||
|
||||
for (int i = 0; i < 20; i++) {
|
||||
search.execute();
|
||||
}
|
||||
|
||||
BundleLinkComponent nextLink = resp1.getLink("next");
|
||||
assertNotNull(nextLink);
|
||||
String nextLinkUrl = nextLink.getUrl();
|
||||
assertThat(nextLinkUrl, not(blankOrNullString()));
|
||||
|
||||
Bundle resp2 = ourClient.search().byUrl(nextLinkUrl).returnBundle(Bundle.class).execute();
|
||||
ourLog.info(myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(resp2));
|
||||
|
||||
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
|
||||
|
||||
ourClient.search().byUrl(nextLinkUrl).returnBundle(Bundle.class).execute();
|
||||
|
||||
Thread.sleep(20);
|
||||
myDaoConfig.setExpireSearchResultsAfterMillis(10);
|
||||
myDaoConfig.setReuseCachedSearchResultsForMillis(null);
|
||||
myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
|
||||
|
||||
try {
|
||||
ourClient.search().byUrl(nextLinkUrl).returnBundle(Bundle.class).execute();
|
||||
fail();
|
||||
} catch (ResourceGoneException e) {
|
||||
assertThat(e.getMessage(), containsString("does not exist and may have expired"));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -2,7 +2,6 @@ package ca.uhn.fhir.jpa.provider.r4;
|
|||
|
||||
import ca.uhn.fhir.jpa.config.WebsocketDispatcherConfig;
|
||||
import ca.uhn.fhir.jpa.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
||||
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.jpa.provider.GraphQLProvider;
|
||||
import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider;
|
||||
|
|
|
@ -2,6 +2,8 @@ package ca.uhn.fhir.jpa.provider.r4;
|
|||
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
|
||||
import ca.uhn.fhir.jpa.search.PersistedJpaSearchFirstPageBundleProvider;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.util.ExpungeOptions;
|
||||
|
@ -21,6 +23,7 @@ import org.junit.Before;
|
|||
import org.junit.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
|
@ -38,6 +41,10 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
|||
private IIdType myOneVersionObservationId;
|
||||
private IIdType myTwoVersionObservationId;
|
||||
private IIdType myDeletedObservationId;
|
||||
@Autowired
|
||||
private ISearchDao mySearchEntityDao;
|
||||
@Autowired
|
||||
private ISearchResultDao mySearchResultDao;
|
||||
|
||||
@After
|
||||
public void afterDisableExpunge() {
|
||||
|
|
|
@ -14,6 +14,8 @@ import org.slf4j.LoggerFactory;
|
|||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import static ca.uhn.fhir.jpa.provider.JpaGraphQLR4ProviderTest.DATA_PREFIX;
|
||||
import static ca.uhn.fhir.jpa.provider.JpaGraphQLR4ProviderTest.DATA_SUFFIX;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
public class GraphQLProviderR4Test extends BaseResourceProviderR4Test {
|
||||
|
@ -30,14 +32,14 @@ public class GraphQLProviderR4Test extends BaseResourceProviderR4Test {
|
|||
try (CloseableHttpResponse response = ourHttpClient.execute(httpGet)) {
|
||||
String resp = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
ourLog.info(resp);
|
||||
assertEquals(TestUtil.stripReturns("{\n" +
|
||||
assertEquals(TestUtil.stripWhitespace(DATA_PREFIX + "{\n" +
|
||||
" \"name\":[{\n" +
|
||||
" \"family\":\"FAM\",\n" +
|
||||
" \"given\":[\"GIVEN1\",\"GIVEN2\"]\n" +
|
||||
" },{\n" +
|
||||
" \"given\":[\"GivenOnly1\",\"GivenOnly2\"]\n" +
|
||||
" }]\n" +
|
||||
"}"), TestUtil.stripReturns(resp));
|
||||
"}" + DATA_SUFFIX), TestUtil.stripWhitespace(resp));
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -52,7 +54,7 @@ public class GraphQLProviderR4Test extends BaseResourceProviderR4Test {
|
|||
try (CloseableHttpResponse response = ourHttpClient.execute(httpGet)) {
|
||||
String resp = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
ourLog.info(resp);
|
||||
assertEquals(TestUtil.stripReturns("{\n" +
|
||||
assertEquals(TestUtil.stripWhitespace(DATA_PREFIX + "{\n" +
|
||||
" \"PatientList\":[{\n" +
|
||||
" \"name\":[{\n" +
|
||||
" \"family\":\"FAM\",\n" +
|
||||
|
@ -65,7 +67,7 @@ public class GraphQLProviderR4Test extends BaseResourceProviderR4Test {
|
|||
" \"given\":[\"GivenOnlyB1\",\"GivenOnlyB2\"]\n" +
|
||||
" }]\n" +
|
||||
" }]\n" +
|
||||
"}"), TestUtil.stripReturns(resp));
|
||||
"}" + DATA_SUFFIX), TestUtil.stripWhitespace(resp));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
package ca.uhn.fhir.jpa.provider.r4;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
||||
import ca.uhn.fhir.jpa.util.TestUtil;
|
||||
import ca.uhn.fhir.parser.StrictErrorHandler;
|
||||
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||
|
@ -13,9 +13,8 @@ import org.hl7.fhir.r4.model.Patient;
|
|||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Test;
|
||||
import org.springframework.test.util.AopTestUtils;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Date;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
@ -24,9 +23,9 @@ import static org.junit.Assert.*;
|
|||
|
||||
public class ResourceProviderR4CacheTest extends BaseResourceProviderR4Test {
|
||||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResourceProviderR4CacheTest.class);
|
||||
private SearchCoordinatorSvcImpl mySearchCoordinatorSvcRaw;
|
||||
private CapturingInterceptor myCapturingInterceptor;
|
||||
@Autowired
|
||||
private ISearchDao mySearchEntityDao;
|
||||
|
||||
@Override
|
||||
@After
|
||||
|
@ -42,14 +41,13 @@ public class ResourceProviderR4CacheTest extends BaseResourceProviderR4Test {
|
|||
public void before() throws Exception {
|
||||
super.before();
|
||||
myFhirCtx.setParserErrorHandler(new StrictErrorHandler());
|
||||
mySearchCoordinatorSvcRaw = AopTestUtils.getTargetObject(mySearchCoordinatorSvc);
|
||||
|
||||
myCapturingInterceptor = new CapturingInterceptor();
|
||||
ourClient.registerInterceptor(myCapturingInterceptor);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCacheNoStore() throws IOException {
|
||||
public void testCacheNoStore() {
|
||||
|
||||
Patient pt1 = new Patient();
|
||||
pt1.addName().setFamily("FAM");
|
||||
|
@ -84,7 +82,7 @@ public class ResourceProviderR4CacheTest extends BaseResourceProviderR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testCacheNoStoreMaxResults() throws IOException {
|
||||
public void testCacheNoStoreMaxResults() {
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
Patient pt1 = new Patient();
|
||||
|
@ -106,7 +104,7 @@ public class ResourceProviderR4CacheTest extends BaseResourceProviderR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testCacheNoStoreMaxResultsWithIllegalValue() throws IOException {
|
||||
public void testCacheNoStoreMaxResultsWithIllegalValue() {
|
||||
myDaoConfig.setCacheControlNoStoreMaxResultsUpperLimit(123);
|
||||
try {
|
||||
ourClient
|
||||
|
@ -123,7 +121,7 @@ public class ResourceProviderR4CacheTest extends BaseResourceProviderR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testCacheSuppressed() throws IOException {
|
||||
public void testCacheSuppressed() {
|
||||
|
||||
Patient pt1 = new Patient();
|
||||
pt1.addName().setFamily("FAM");
|
||||
|
@ -152,7 +150,7 @@ public class ResourceProviderR4CacheTest extends BaseResourceProviderR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testCacheUsedNormally() throws IOException {
|
||||
public void testCacheUsedNormally() {
|
||||
|
||||
Patient pt1 = new Patient();
|
||||
pt1.addName().setFamily("FAM");
|
||||
|
|
|
@ -2,10 +2,11 @@ package ca.uhn.fhir.jpa.provider.r4;
|
|||
|
||||
import ca.uhn.fhir.jpa.config.TestR4Config;
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl;
|
||||
import ca.uhn.fhir.jpa.util.TestUtil;
|
||||
import ca.uhn.fhir.model.api.TemporalPrecisionEnum;
|
||||
import ca.uhn.fhir.model.primitive.InstantDt;
|
||||
|
@ -21,10 +22,7 @@ import ca.uhn.fhir.rest.client.api.IHttpResponse;
|
|||
import ca.uhn.fhir.rest.client.interceptor.CapturingInterceptor;
|
||||
import ca.uhn.fhir.rest.gclient.StringClientParam;
|
||||
import ca.uhn.fhir.rest.param.*;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.*;
|
||||
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
|
@ -54,6 +52,7 @@ import org.hl7.fhir.r4.model.Questionnaire.QuestionnaireItemType;
|
|||
import org.hl7.fhir.r4.model.Subscription.SubscriptionChannelType;
|
||||
import org.hl7.fhir.r4.model.Subscription.SubscriptionStatus;
|
||||
import org.junit.*;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.test.util.AopTestUtils;
|
||||
import org.springframework.transaction.TransactionStatus;
|
||||
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
|
||||
|
@ -82,6 +81,8 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
|||
public static final int LARGE_NUMBER = 77;
|
||||
private SearchCoordinatorSvcImpl mySearchCoordinatorSvcRaw;
|
||||
private CapturingInterceptor myCapturingInterceptor = new CapturingInterceptor();
|
||||
@Autowired
|
||||
private ISearchDao mySearchEntityDao;
|
||||
|
||||
@Override
|
||||
@After
|
||||
|
@ -3937,9 +3938,10 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
|||
.count(5)
|
||||
.returnBundle(Bundle.class)
|
||||
.execute();
|
||||
mySearchCacheSvc.flushLastUpdated();
|
||||
|
||||
final String uuid1 = toSearchUuidFromLinkNext(result1);
|
||||
Search search1 = newTxTemplate().execute(theStatus -> mySearchEntityDao.findByUuid(uuid1));
|
||||
Search search1 = newTxTemplate().execute(theStatus -> mySearchEntityDao.findByUuidAndFetchIncludes(uuid1).orElseThrow(()->new InternalErrorException("")));
|
||||
Date lastReturned1 = search1.getSearchLastReturned();
|
||||
|
||||
Bundle result2 = ourClient
|
||||
|
@ -3949,9 +3951,10 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
|||
.count(5)
|
||||
.returnBundle(Bundle.class)
|
||||
.execute();
|
||||
mySearchCacheSvc.flushLastUpdated();
|
||||
|
||||
final String uuid2 = toSearchUuidFromLinkNext(result2);
|
||||
Search search2 = newTxTemplate().execute(theStatus -> mySearchEntityDao.findByUuid(uuid2));
|
||||
Search search2 = newTxTemplate().execute(theStatus -> mySearchEntityDao.findByUuidAndFetchIncludes(uuid2).orElseThrow(()->new InternalErrorException("")));
|
||||
Date lastReturned2 = search2.getSearchLastReturned();
|
||||
|
||||
assertTrue(lastReturned2.getTime() > lastReturned1.getTime());
|
||||
|
@ -3965,6 +3968,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
|||
.count(5)
|
||||
.returnBundle(Bundle.class)
|
||||
.execute();
|
||||
mySearchCacheSvc.flushLastUpdated();
|
||||
|
||||
String uuid3 = toSearchUuidFromLinkNext(result3);
|
||||
|
||||
|
@ -3989,9 +3993,10 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
|||
.forResource("Organization")
|
||||
.returnBundle(Bundle.class)
|
||||
.execute();
|
||||
mySearchCacheSvc.flushLastUpdated();
|
||||
|
||||
final String uuid1 = toSearchUuidFromLinkNext(result1);
|
||||
Search search1 = newTxTemplate().execute(theStatus -> mySearchEntityDao.findByUuid(uuid1));
|
||||
Search search1 = newTxTemplate().execute(theStatus -> mySearchEntityDao.findByUuidAndFetchIncludes(uuid1).orElseThrow(()->new InternalErrorException("")));
|
||||
Date lastReturned1 = search1.getSearchLastReturned();
|
||||
|
||||
sleepOneClick();
|
||||
|
@ -4001,9 +4006,10 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
|||
.forResource("Organization")
|
||||
.returnBundle(Bundle.class)
|
||||
.execute();
|
||||
mySearchCacheSvc.flushLastUpdated();
|
||||
|
||||
final String uuid2 = toSearchUuidFromLinkNext(result2);
|
||||
Search search2 = newTxTemplate().execute(theStatus -> mySearchEntityDao.findByUuid(uuid2));
|
||||
Search search2 = newTxTemplate().execute(theStatus -> mySearchEntityDao.findByUuidAndFetchIncludes(uuid2).orElseThrow(()->new InternalErrorException("")));
|
||||
Date lastReturned2 = search2.getSearchLastReturned();
|
||||
|
||||
assertTrue(lastReturned2.getTime() > lastReturned1.getTime());
|
||||
|
|
|
@ -1,9 +1,15 @@
|
|||
package ca.uhn.fhir.jpa.provider.r4;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.*;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchIncludeDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.entity.SearchInclude;
|
||||
import ca.uhn.fhir.jpa.entity.SearchResult;
|
||||
import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
|
||||
import ca.uhn.fhir.jpa.search.StaleSearchDeletingSvcImpl;
|
||||
import ca.uhn.fhir.jpa.search.cache.DatabaseSearchCacheSvcImpl;
|
||||
import ca.uhn.fhir.rest.gclient.IClientExecutable;
|
||||
import ca.uhn.fhir.rest.gclient.IQuery;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
||||
|
@ -16,6 +22,7 @@ import org.junit.After;
|
|||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.test.util.AopTestUtils;
|
||||
|
||||
import java.util.Date;
|
||||
|
@ -27,22 +34,28 @@ import static org.junit.Assert.*;
|
|||
public class StaleSearchDeletingSvcR4Test extends BaseResourceProviderR4Test {
|
||||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(StaleSearchDeletingSvcR4Test.class);
|
||||
@Autowired
|
||||
private ISearchDao mySearchEntityDao;
|
||||
@Autowired
|
||||
private ISearchResultDao mySearchResultDao;
|
||||
@Autowired
|
||||
private ISearchIncludeDao mySearchIncludeDao;
|
||||
|
||||
@Override
|
||||
@After()
|
||||
public void after() throws Exception {
|
||||
super.after();
|
||||
StaleSearchDeletingSvcImpl staleSearchDeletingSvc = AopTestUtils.getTargetObject(myStaleSearchDeletingSvc);
|
||||
staleSearchDeletingSvc.setCutoffSlackForUnitTest(StaleSearchDeletingSvcImpl.DEFAULT_CUTOFF_SLACK);
|
||||
StaleSearchDeletingSvcImpl.setMaximumResultsToDeleteForUnitTest(StaleSearchDeletingSvcImpl.DEFAULT_MAX_RESULTS_TO_DELETE_IN_ONE_STMT);
|
||||
StaleSearchDeletingSvcImpl.setMaximumResultsToDeleteInOnePassForUnitTest(StaleSearchDeletingSvcImpl.DEFAULT_MAX_RESULTS_TO_DELETE_IN_ONE_PAS);
|
||||
DatabaseSearchCacheSvcImpl staleSearchDeletingSvc = AopTestUtils.getTargetObject(mySearchCacheSvc);
|
||||
staleSearchDeletingSvc.setCutoffSlackForUnitTest(DatabaseSearchCacheSvcImpl.DEFAULT_CUTOFF_SLACK);
|
||||
DatabaseSearchCacheSvcImpl.setMaximumResultsToDeleteForUnitTest(DatabaseSearchCacheSvcImpl.DEFAULT_MAX_RESULTS_TO_DELETE_IN_ONE_STMT);
|
||||
DatabaseSearchCacheSvcImpl.setMaximumResultsToDeleteInOnePassForUnitTest(DatabaseSearchCacheSvcImpl.DEFAULT_MAX_RESULTS_TO_DELETE_IN_ONE_PAS);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Before
|
||||
public void before() throws Exception {
|
||||
super.before();
|
||||
StaleSearchDeletingSvcImpl staleSearchDeletingSvc = AopTestUtils.getTargetObject(myStaleSearchDeletingSvc);
|
||||
DatabaseSearchCacheSvcImpl staleSearchDeletingSvc = AopTestUtils.getTargetObject(mySearchCacheSvc);
|
||||
staleSearchDeletingSvc.setCutoffSlackForUnitTest(0);
|
||||
}
|
||||
|
||||
|
@ -96,8 +109,8 @@ public class StaleSearchDeletingSvcR4Test extends BaseResourceProviderR4Test {
|
|||
|
||||
@Test
|
||||
public void testDeleteVeryLargeSearch() {
|
||||
StaleSearchDeletingSvcImpl.setMaximumResultsToDeleteForUnitTest(10);
|
||||
StaleSearchDeletingSvcImpl.setMaximumResultsToDeleteInOnePassForUnitTest(10);
|
||||
DatabaseSearchCacheSvcImpl.setMaximumResultsToDeleteForUnitTest(10);
|
||||
DatabaseSearchCacheSvcImpl.setMaximumResultsToDeleteInOnePassForUnitTest(10);
|
||||
|
||||
runInTransaction(() -> {
|
||||
Search search = new Search();
|
||||
|
@ -139,7 +152,7 @@ public class StaleSearchDeletingSvcR4Test extends BaseResourceProviderR4Test {
|
|||
|
||||
@Test
|
||||
public void testDeleteVerySmallSearch() {
|
||||
StaleSearchDeletingSvcImpl.setMaximumResultsToDeleteForUnitTest(10);
|
||||
DatabaseSearchCacheSvcImpl.setMaximumResultsToDeleteForUnitTest(10);
|
||||
|
||||
runInTransaction(() -> {
|
||||
Search search = new Search();
|
||||
|
@ -149,8 +162,7 @@ public class StaleSearchDeletingSvcR4Test extends BaseResourceProviderR4Test {
|
|||
search.setSearchType(SearchTypeEnum.SEARCH);
|
||||
search.setResourceType("Patient");
|
||||
search.setSearchLastReturned(DateUtils.addDays(new Date(), -10000));
|
||||
search = mySearchEntityDao.save(search);
|
||||
|
||||
mySearchEntityDao.save(search);
|
||||
});
|
||||
|
||||
// It should take one pass to delete the search fully
|
||||
|
|
|
@ -2,7 +2,6 @@ package ca.uhn.fhir.jpa.provider.r5;
|
|||
|
||||
import ca.uhn.fhir.jpa.config.WebsocketDispatcherConfig;
|
||||
import ca.uhn.fhir.jpa.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
||||
import ca.uhn.fhir.jpa.dao.r5.BaseJpaR5Test;
|
||||
import ca.uhn.fhir.jpa.provider.GraphQLProvider;
|
||||
import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider;
|
||||
|
@ -64,7 +63,6 @@ public abstract class BaseResourceProviderR5Test extends BaseJpaR5Test {
|
|||
protected static String ourServerBase;
|
||||
protected static SearchParamRegistryR5 ourSearchParamRegistry;
|
||||
private static DatabaseBackedPagingProvider ourPagingProvider;
|
||||
protected static ISearchDao mySearchEntityDao;
|
||||
protected static ISearchCoordinatorSvc mySearchCoordinatorSvc;
|
||||
private static GenericWebApplicationContext ourWebApplicationContext;
|
||||
private static SubscriptionMatcherInterceptor ourSubscriptionMatcherInterceptor;
|
||||
|
@ -166,7 +164,6 @@ public abstract class BaseResourceProviderR5Test extends BaseJpaR5Test {
|
|||
WebApplicationContext wac = WebApplicationContextUtils.getWebApplicationContext(subsServletHolder.getServlet().getServletConfig().getServletContext());
|
||||
myValidationSupport = wac.getBean(JpaValidationSupportChainR5.class);
|
||||
mySearchCoordinatorSvc = wac.getBean(ISearchCoordinatorSvc.class);
|
||||
mySearchEntityDao = wac.getBean(ISearchDao.class);
|
||||
ourSearchParamRegistry = wac.getBean(SearchParamRegistryR5.class);
|
||||
ourSubscriptionMatcherInterceptor = wac.getBean(SubscriptionMatcherInterceptor.class);
|
||||
ourSubscriptionMatcherInterceptor.start();
|
||||
|
|
|
@ -3,13 +3,11 @@ package ca.uhn.fhir.jpa.search;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.jpa.dao.*;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchIncludeDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.entity.SearchResult;
|
||||
import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
|
||||
import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
|
||||
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
|
||||
import ca.uhn.fhir.jpa.search.cache.ISearchResultCacheSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.util.BaseIterator;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Patient;
|
||||
|
@ -19,7 +17,6 @@ import ca.uhn.fhir.rest.api.server.RequestDetails;
|
|||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
|
@ -34,7 +31,6 @@ import org.mockito.junit.MockitoJUnitRunner;
|
|||
import org.mockito.stubbing.Answer;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.domain.PageImpl;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.TransactionStatus;
|
||||
|
@ -53,20 +49,18 @@ public class SearchCoordinatorSvcImplTest {
|
|||
private static final Logger ourLog = LoggerFactory.getLogger(SearchCoordinatorSvcImplTest.class);
|
||||
private static FhirContext ourCtx = FhirContext.forDstu3();
|
||||
@Captor
|
||||
ArgumentCaptor<Iterable<SearchResult>> mySearchResultIterCaptor;
|
||||
ArgumentCaptor<List<Long>> mySearchResultIterCaptor;
|
||||
@Mock
|
||||
private IFhirResourceDao<?> myCallingDao;
|
||||
@Mock
|
||||
private EntityManager myEntityManager;
|
||||
private int myExpectedNumberOfSearchBuildersCreated = 2;
|
||||
@Mock
|
||||
private ISearchBuilder mySearchBuider;
|
||||
private ISearchBuilder mySearchBuilder;
|
||||
@Mock
|
||||
private ISearchDao mySearchDao;
|
||||
private ISearchCacheSvc mySearchCacheSvc;
|
||||
@Mock
|
||||
private ISearchIncludeDao mySearchIncludeDao;
|
||||
@Mock
|
||||
private ISearchResultDao mySearchResultDao;
|
||||
private ISearchResultCacheSvc mySearchResultCacheSvc;
|
||||
private SearchCoordinatorSvcImpl mySvc;
|
||||
@Mock
|
||||
private PlatformTransactionManager myTxManager;
|
||||
|
@ -90,16 +84,14 @@ public class SearchCoordinatorSvcImplTest {
|
|||
mySvc.setEntityManagerForUnitTest(myEntityManager);
|
||||
mySvc.setTransactionManagerForUnitTest(myTxManager);
|
||||
mySvc.setContextForUnitTest(ourCtx);
|
||||
mySvc.setSearchDaoForUnitTest(mySearchDao);
|
||||
mySvc.setSearchDaoIncludeForUnitTest(mySearchIncludeDao);
|
||||
mySvc.setSearchDaoResultForUnitTest(mySearchResultDao);
|
||||
mySvc.setSearchCacheServicesForUnitTest(mySearchCacheSvc, mySearchResultCacheSvc);
|
||||
mySvc.setDaoRegistryForUnitTest(myDaoRegistry);
|
||||
mySvc.setInterceptorBroadcasterForUnitTest(myInterceptorBroadcaster);
|
||||
|
||||
myDaoConfig = new DaoConfig();
|
||||
mySvc.setDaoConfigForUnitTest(myDaoConfig);
|
||||
|
||||
when(myCallingDao.newSearchBuilder()).thenReturn(mySearchBuider);
|
||||
when(myCallingDao.newSearchBuilder()).thenReturn(mySearchBuilder);
|
||||
|
||||
when(myTxManager.getTransaction(any())).thenReturn(mock(TransactionStatus.class));
|
||||
|
||||
|
@ -107,7 +99,7 @@ public class SearchCoordinatorSvcImplTest {
|
|||
PersistedJpaBundleProvider provider = (PersistedJpaBundleProvider) theInvocation.getArguments()[0];
|
||||
provider.setSearchCoordinatorSvc(mySvc);
|
||||
provider.setPlatformTransactionManager(myTxManager);
|
||||
provider.setSearchDao(mySearchDao);
|
||||
provider.setSearchCacheSvc(mySearchCacheSvc);
|
||||
provider.setEntityManager(myEntityManager);
|
||||
provider.setContext(ourCtx);
|
||||
provider.setInterceptorBroadcaster(myInterceptorBroadcaster);
|
||||
|
@ -143,7 +135,7 @@ public class SearchCoordinatorSvcImplTest {
|
|||
|
||||
List<Long> pids = createPidSequence(10, 800);
|
||||
IResultIterator iter = new FailAfterNIterator(new SlowIterator(pids.iterator(), 2), 300);
|
||||
when(mySearchBuider.createQuery(Mockito.same(params), any(), any())).thenReturn(iter);
|
||||
when(mySearchBuilder.createQuery(Mockito.same(params), any(), any())).thenReturn(iter);
|
||||
|
||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective(), null);
|
||||
assertNotNull(result.getUuid());
|
||||
|
@ -159,23 +151,42 @@ public class SearchCoordinatorSvcImplTest {
|
|||
|
||||
@Test
|
||||
public void testAsyncSearchLargeResultSetBigCountSameCoordinator() {
|
||||
List<Long> allResults = new ArrayList<>();
|
||||
doAnswer(t->{
|
||||
List<Long> oldResults = t.getArgument(1, List.class);
|
||||
List<Long> newResults = t.getArgument(2, List.class);
|
||||
ourLog.info("Saving {} new results - have {} old results", newResults.size(), oldResults.size());
|
||||
assertEquals(allResults.size(), oldResults.size());
|
||||
allResults.addAll(newResults);
|
||||
return null;
|
||||
}).when(mySearchResultCacheSvc).storeResults(any(),anyList(),anyList());
|
||||
|
||||
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
params.add("name", new StringParam("ANAME"));
|
||||
|
||||
List<Long> pids = createPidSequence(10, 800);
|
||||
SlowIterator iter = new SlowIterator(pids.iterator(), 1);
|
||||
when(mySearchBuider.createQuery(any(), any(), any())).thenReturn(iter);
|
||||
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(Collection.class), any(Collection.class), any(List.class), anyBoolean(), any());
|
||||
when(mySearchBuilder.createQuery(any(), any(), any())).thenReturn(iter);
|
||||
doAnswer(loadPids()).when(mySearchBuilder).loadResourcesByPid(any(Collection.class), any(Collection.class), any(List.class), anyBoolean(), any());
|
||||
|
||||
when(mySearchResultDao.findWithSearchUuid(any(), any())).thenAnswer(t -> {
|
||||
when(mySearchResultCacheSvc.fetchResultPids(any(), anyInt(), anyInt())).thenAnswer(t -> {
|
||||
List<Long> returnedValues = iter.getReturnedValues();
|
||||
Pageable page = (Pageable) t.getArguments()[1];
|
||||
int offset = (int) page.getOffset();
|
||||
int end = (int) (page.getOffset() + page.getPageSize());
|
||||
int offset = t.getArgument(1, Integer.class);
|
||||
int end = t.getArgument(2, Integer.class);
|
||||
end = Math.min(end, returnedValues.size());
|
||||
offset = Math.min(offset, returnedValues.size());
|
||||
ourLog.info("findWithSearchUuid {} - {} out of {} values", offset, end, returnedValues.size());
|
||||
return new PageImpl<>(returnedValues.subList(offset, end));
|
||||
return returnedValues.subList(offset, end);
|
||||
});
|
||||
|
||||
when(mySearchResultCacheSvc.fetchAllResultPids(any())).thenReturn(allResults);
|
||||
|
||||
when(mySearchCacheSvc.tryToMarkSearchAsInProgress(any())).thenAnswer(t->{
|
||||
Search search = t.getArgument(0, Search.class);
|
||||
assertEquals(SearchStatusEnum.PASSCMPLET, search.getStatus());
|
||||
search.setStatus(SearchStatusEnum.LOADING);
|
||||
return Optional.of(search);
|
||||
});
|
||||
|
||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective(), null);
|
||||
|
@ -184,12 +195,14 @@ public class SearchCoordinatorSvcImplTest {
|
|||
|
||||
List<IBaseResource> resources;
|
||||
|
||||
when(mySearchDao.save(any())).thenAnswer(t -> {
|
||||
when(mySearchCacheSvc.save(any())).thenAnswer(t -> {
|
||||
Search search = (Search) t.getArguments()[0];
|
||||
myCurrentSearch = search;
|
||||
return search;
|
||||
});
|
||||
when(mySearchDao.findByUuid(any())).thenAnswer(t -> myCurrentSearch);
|
||||
when(mySearchCacheSvc.fetchByUuid(any())).thenAnswer(t -> {
|
||||
return Optional.ofNullable(myCurrentSearch);
|
||||
});
|
||||
IFhirResourceDao dao = myCallingDao;
|
||||
when(myDaoRegistry.getResourceDao(any(String.class))).thenReturn(dao);
|
||||
|
||||
|
@ -199,17 +212,11 @@ public class SearchCoordinatorSvcImplTest {
|
|||
assertEquals("799", resources.get(789).getIdElement().getValueAsString());
|
||||
|
||||
ArgumentCaptor<Search> searchCaptor = ArgumentCaptor.forClass(Search.class);
|
||||
verify(mySearchDao, atLeastOnce()).save(searchCaptor.capture());
|
||||
|
||||
verify(mySearchResultDao, atLeastOnce()).saveAll(mySearchResultIterCaptor.capture());
|
||||
List<SearchResult> allResults = new ArrayList<>();
|
||||
for (Iterable<SearchResult> next : mySearchResultIterCaptor.getAllValues()) {
|
||||
allResults.addAll(Lists.newArrayList(next));
|
||||
}
|
||||
verify(mySearchCacheSvc, atLeastOnce()).save(searchCaptor.capture());
|
||||
|
||||
assertEquals(790, allResults.size());
|
||||
assertEquals(10, allResults.get(0).getResourcePid().longValue());
|
||||
assertEquals(799, allResults.get(789).getResourcePid().longValue());
|
||||
assertEquals(10, allResults.get(0).longValue());
|
||||
assertEquals(799, allResults.get(789).longValue());
|
||||
|
||||
myExpectedNumberOfSearchBuildersCreated = 4;
|
||||
}
|
||||
|
@ -221,9 +228,9 @@ public class SearchCoordinatorSvcImplTest {
|
|||
|
||||
List<Long> pids = createPidSequence(10, 800);
|
||||
SlowIterator iter = new SlowIterator(pids.iterator(), 2);
|
||||
when(mySearchBuider.createQuery(same(params), any(), any())).thenReturn(iter);
|
||||
when(mySearchBuilder.createQuery(same(params), any(), any())).thenReturn(iter);
|
||||
|
||||
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(Collection.class), any(Collection.class), any(List.class), anyBoolean(), any());
|
||||
doAnswer(loadPids()).when(mySearchBuilder).loadResourcesByPid(any(Collection.class), any(Collection.class), any(List.class), anyBoolean(), any());
|
||||
|
||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective(), null);
|
||||
assertNotNull(result.getUuid());
|
||||
|
@ -249,16 +256,16 @@ public class SearchCoordinatorSvcImplTest {
|
|||
|
||||
List<Long> pids = createPidSequence(10, 800);
|
||||
IResultIterator iter = new SlowIterator(pids.iterator(), 2);
|
||||
when(mySearchBuider.createQuery(same(params), any(), any())).thenReturn(iter);
|
||||
when(mySearchDao.save(any())).thenAnswer(t -> t.getArguments()[0]);
|
||||
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(Collection.class), any(Collection.class), any(List.class), anyBoolean(), any());
|
||||
when(mySearchBuilder.createQuery(same(params), any(), any())).thenReturn(iter);
|
||||
when(mySearchCacheSvc.save(any())).thenAnswer(t -> t.getArguments()[0]);
|
||||
doAnswer(loadPids()).when(mySearchBuilder).loadResourcesByPid(any(Collection.class), any(Collection.class), any(List.class), anyBoolean(), any());
|
||||
|
||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective(), null);
|
||||
assertNotNull(result.getUuid());
|
||||
assertEquals(null, result.size());
|
||||
|
||||
ArgumentCaptor<Search> searchCaptor = ArgumentCaptor.forClass(Search.class);
|
||||
verify(mySearchDao, atLeast(1)).save(searchCaptor.capture());
|
||||
verify(mySearchCacheSvc, atLeast(1)).save(searchCaptor.capture());
|
||||
Search search = searchCaptor.getValue();
|
||||
assertEquals(SearchTypeEnum.SEARCH, search.getSearchType());
|
||||
|
||||
|
@ -271,7 +278,7 @@ public class SearchCoordinatorSvcImplTest {
|
|||
assertEquals("10", resources.get(0).getIdElement().getValueAsString());
|
||||
assertEquals("19", resources.get(9).getIdElement().getValueAsString());
|
||||
|
||||
when(mySearchDao.findByUuid(eq(result.getUuid()))).thenReturn(search);
|
||||
when(mySearchCacheSvc.fetchByUuid(eq(result.getUuid()))).thenReturn(Optional.of(search));
|
||||
|
||||
/*
|
||||
* Now call from a new bundle provider. This simulates a separate HTTP
|
||||
|
@ -293,9 +300,9 @@ public class SearchCoordinatorSvcImplTest {
|
|||
|
||||
List<Long> pids = createPidSequence(10, 100);
|
||||
SlowIterator iter = new SlowIterator(pids.iterator(), 2);
|
||||
when(mySearchBuider.createQuery(same(params), any(), any())).thenReturn(iter);
|
||||
when(mySearchBuilder.createQuery(same(params), any(), any())).thenReturn(iter);
|
||||
|
||||
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(Collection.class), any(Collection.class), any(List.class), anyBoolean(), any());
|
||||
doAnswer(loadPids()).when(mySearchBuilder).loadResourcesByPid(any(Collection.class), any(Collection.class), any(List.class), anyBoolean(), any());
|
||||
|
||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective(), null);
|
||||
assertNotNull(result.getUuid());
|
||||
|
@ -324,8 +331,8 @@ public class SearchCoordinatorSvcImplTest {
|
|||
search.setSearchType(SearchTypeEnum.SEARCH);
|
||||
search.setResourceType("Patient");
|
||||
|
||||
when(mySearchDao.findByUuid(eq(uuid))).thenReturn(search);
|
||||
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(Collection.class), any(Collection.class), any(List.class), anyBoolean(), any());
|
||||
when(mySearchCacheSvc.fetchByUuid(eq(uuid))).thenReturn(Optional.of(search));
|
||||
doAnswer(loadPids()).when(mySearchBuilder).loadResourcesByPid(any(Collection.class), any(Collection.class), any(List.class), anyBoolean(), any());
|
||||
|
||||
PersistedJpaBundleProvider provider;
|
||||
List<IBaseResource> resources;
|
||||
|
@ -337,16 +344,13 @@ public class SearchCoordinatorSvcImplTest {
|
|||
// ignore
|
||||
}
|
||||
|
||||
when(mySearchResultDao.findWithSearchUuid(any(Search.class), any(Pageable.class))).thenAnswer(theInvocation -> {
|
||||
Pageable page = (Pageable) theInvocation.getArguments()[1];
|
||||
|
||||
when(mySearchResultCacheSvc.fetchResultPids(any(Search.class), anyInt(), anyInt())).thenAnswer(theInvocation -> {
|
||||
ArrayList<Long> results = new ArrayList<>();
|
||||
int max = (page.getPageNumber() * page.getPageSize()) + page.getPageSize();
|
||||
for (long i = page.getOffset(); i < max; i++) {
|
||||
for (long i = theInvocation.getArgument(1, Integer.class); i < theInvocation.getArgument(2, Integer.class); i++) {
|
||||
results.add(i + 10L);
|
||||
}
|
||||
|
||||
return new PageImpl<>(results);
|
||||
return results;
|
||||
});
|
||||
search.setStatus(SearchStatusEnum.FINISHED);
|
||||
}).start();
|
||||
|
@ -377,9 +381,9 @@ public class SearchCoordinatorSvcImplTest {
|
|||
params.add("name", new StringParam("ANAME"));
|
||||
|
||||
List<Long> pids = createPidSequence(10, 800);
|
||||
when(mySearchBuider.createQuery(same(params), any(), any())).thenReturn(new ResultIterator(pids.iterator()));
|
||||
when(mySearchBuilder.createQuery(same(params), any(), any())).thenReturn(new ResultIterator(pids.iterator()));
|
||||
|
||||
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(Collection.class), any(Collection.class), any(List.class), anyBoolean(), any());
|
||||
doAnswer(loadPids()).when(mySearchBuilder).loadResourcesByPid(any(Collection.class), any(Collection.class), any(List.class), anyBoolean(), any());
|
||||
|
||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective(), null);
|
||||
assertNull(result.getUuid());
|
||||
|
@ -398,10 +402,10 @@ public class SearchCoordinatorSvcImplTest {
|
|||
params.add("name", new StringParam("ANAME"));
|
||||
|
||||
List<Long> pids = createPidSequence(10, 800);
|
||||
when(mySearchBuider.createQuery(Mockito.same(params), any(), nullable(RequestDetails.class))).thenReturn(new ResultIterator(pids.iterator()));
|
||||
when(mySearchBuilder.createQuery(Mockito.same(params), any(), nullable(RequestDetails.class))).thenReturn(new ResultIterator(pids.iterator()));
|
||||
|
||||
pids = createPidSequence(10, 110);
|
||||
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(eq(pids), any(Collection.class), any(List.class), anyBoolean(), nullable(RequestDetails.class));
|
||||
doAnswer(loadPids()).when(mySearchBuilder).loadResourcesByPid(eq(pids), any(Collection.class), any(List.class), anyBoolean(), nullable(RequestDetails.class));
|
||||
|
||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective(), null);
|
||||
assertNull(result.getUuid());
|
||||
|
|
|
@ -20,16 +20,17 @@ import ca.uhn.fhir.rest.api.MethodOutcome;
|
|||
import ca.uhn.fhir.rest.server.IResourceProvider;
|
||||
import ca.uhn.fhir.rest.server.RestfulServer;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.test.utilities.JettyUtil;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.eclipse.jetty.server.Server;
|
||||
import org.eclipse.jetty.servlet.ServletContextHandler;
|
||||
import org.eclipse.jetty.servlet.ServletHolder;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.junit.*;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import javax.validation.constraints.NotNull;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
@ -38,8 +39,6 @@ import static ca.uhn.fhir.jpa.subscription.resthook.RestHookTestDstu3Test.logAll
|
|||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import ca.uhn.fhir.test.utilities.JettyUtil;
|
||||
|
||||
/**
|
||||
* Test the rest-hook subscriptions
|
||||
*/
|
||||
|
|
|
@ -17,6 +17,7 @@ import ca.uhn.fhir.rest.api.MethodOutcome;
|
|||
import ca.uhn.fhir.rest.server.IResourceProvider;
|
||||
import ca.uhn.fhir.rest.server.RestfulServer;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.test.utilities.JettyUtil;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.eclipse.jetty.server.Server;
|
||||
import org.eclipse.jetty.servlet.ServletContextHandler;
|
||||
|
@ -24,11 +25,11 @@ import org.eclipse.jetty.servlet.ServletHolder;
|
|||
import org.hl7.fhir.dstu3.model.*;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.junit.*;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.validation.constraints.NotNull;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
@ -40,8 +41,6 @@ import static org.awaitility.Awaitility.await;
|
|||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import ca.uhn.fhir.test.utilities.JettyUtil;
|
||||
|
||||
/**
|
||||
* Test the rest-hook subscriptions
|
||||
*/
|
||||
|
|
|
@ -185,7 +185,11 @@ public class JdbcUtils {
|
|||
DatabaseMetaData metadata;
|
||||
try {
|
||||
metadata = connection.getMetaData();
|
||||
ResultSet indexes = metadata.getCrossReference(connection.getCatalog(), connection.getSchema(), massageIdentifier(metadata, theTableName), connection.getCatalog(), connection.getSchema(), massageIdentifier(metadata, theForeignTable));
|
||||
String catalog = connection.getCatalog();
|
||||
String schema = connection.getSchema();
|
||||
String parentTable = massageIdentifier(metadata, theTableName);
|
||||
String foreignTable = massageIdentifier(metadata, theForeignTable);
|
||||
ResultSet indexes = metadata.getCrossReference(catalog, schema, parentTable, catalog, schema, foreignTable);
|
||||
|
||||
Set<String> columnNames = new HashSet<>();
|
||||
while (indexes.next()) {
|
||||
|
|
|
@ -21,7 +21,6 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
|
|
@ -95,6 +95,13 @@ public abstract class BaseTableColumnTypeTask<T extends BaseTableTask> extends B
|
|||
setColumnType(ColumnTypeEnum.BLOB, DriverTypeEnum.POSTGRES_9_4, "oid");
|
||||
setColumnType(ColumnTypeEnum.BLOB, DriverTypeEnum.MSSQL_2012, "varbinary(MAX)");
|
||||
|
||||
setColumnType(ColumnTypeEnum.CLOB, DriverTypeEnum.H2_EMBEDDED, "clob");
|
||||
setColumnType(ColumnTypeEnum.CLOB, DriverTypeEnum.DERBY_EMBEDDED, "clob(100000)");
|
||||
setColumnType(ColumnTypeEnum.CLOB, DriverTypeEnum.MARIADB_10_1, "longtext");
|
||||
setColumnType(ColumnTypeEnum.CLOB, DriverTypeEnum.MYSQL_5_7, "longtext");
|
||||
setColumnType(ColumnTypeEnum.CLOB, DriverTypeEnum.ORACLE_12C, "clob");
|
||||
setColumnType(ColumnTypeEnum.CLOB, DriverTypeEnum.POSTGRES_9_4, "text");
|
||||
setColumnType(ColumnTypeEnum.CLOB, DriverTypeEnum.MSSQL_2012, "varchar(MAX)");
|
||||
}
|
||||
|
||||
public ColumnTypeEnum getColumnType() {
|
||||
|
@ -214,8 +221,15 @@ public abstract class BaseTableColumnTypeTask<T extends BaseTableTask> extends B
|
|||
Assert.isTrue(theColumnLength == null, "Must not supply a column length");
|
||||
return "blob";
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
CLOB {
|
||||
@Override
|
||||
public String getDescriptor(Long theColumnLength) {
|
||||
Assert.isTrue(theColumnLength == null, "Must not supply a column length");
|
||||
return "clob";
|
||||
}
|
||||
};
|
||||
|
||||
public abstract String getDescriptor(Long theColumnLength);
|
||||
|
||||
|
|
|
@ -0,0 +1,91 @@
|
|||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2019 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
public class DropForeignKeyTask extends BaseTableTask<DropForeignKeyTask> {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(DropForeignKeyTask.class);
|
||||
private String myConstraintName;
|
||||
private String myParentTableName;
|
||||
|
||||
public void setConstraintName(String theConstraintName) {
|
||||
myConstraintName = theConstraintName;
|
||||
}
|
||||
|
||||
public void setParentTableName(String theParentTableName) {
|
||||
myParentTableName = theParentTableName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void validate() {
|
||||
super.validate();
|
||||
|
||||
Validate.isTrue(isNotBlank(myConstraintName));
|
||||
Validate.isTrue(isNotBlank(myParentTableName));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute() throws SQLException {
|
||||
|
||||
Set<String> existing = JdbcUtils.getForeignKeys(getConnectionProperties(), myParentTableName, getTableName());
|
||||
if (!existing.contains(myConstraintName)) {
|
||||
ourLog.info("Don't have constraint named {} - No action performed", myConstraintName);
|
||||
return;
|
||||
}
|
||||
|
||||
String sql = null;
|
||||
String sql2 = null;
|
||||
switch (getDriverType()) {
|
||||
case MYSQL_5_7:
|
||||
// Lousy MYQL....
|
||||
sql = "alter table " + getTableName() + " drop constraint " + myConstraintName;
|
||||
sql2 = "alter table " + getTableName() + " drop index " + myConstraintName;
|
||||
break;
|
||||
case MARIADB_10_1:
|
||||
case POSTGRES_9_4:
|
||||
case DERBY_EMBEDDED:
|
||||
case H2_EMBEDDED:
|
||||
case ORACLE_12C:
|
||||
case MSSQL_2012:
|
||||
sql = "alter table " + getTableName() + " drop constraint " + myConstraintName;
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
|
||||
executeSql(getTableName(), sql);
|
||||
if (isNotBlank(sql2)) {
|
||||
executeSql(getTableName(), sql2);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,101 @@
|
|||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2019 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
public class DropIdGeneratorTask extends BaseTask<DropIdGeneratorTask> {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(DropIdGeneratorTask.class);
|
||||
private final String myGeneratorName;
|
||||
|
||||
public DropIdGeneratorTask(String theGeneratorName) {
|
||||
myGeneratorName = theGeneratorName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void validate() {
|
||||
Validate.notBlank(myGeneratorName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute() throws SQLException {
|
||||
Set<String> tableNames = JdbcUtils.getTableNames(getConnectionProperties());
|
||||
String sql = null;
|
||||
|
||||
switch (getDriverType()) {
|
||||
case MARIADB_10_1:
|
||||
case MYSQL_5_7:
|
||||
// These require a separate table
|
||||
if (tableNames.contains(myGeneratorName)) {
|
||||
|
||||
String initSql = "delete from " + myGeneratorName;
|
||||
executeSql(myGeneratorName, initSql);
|
||||
|
||||
String creationSql = "drop table " + myGeneratorName;
|
||||
executeSql(myGeneratorName, creationSql);
|
||||
|
||||
}
|
||||
break;
|
||||
case DERBY_EMBEDDED:
|
||||
case H2_EMBEDDED:
|
||||
sql = "drop sequence " + myGeneratorName;
|
||||
break;
|
||||
case POSTGRES_9_4:
|
||||
sql = "drop sequence " + myGeneratorName;
|
||||
break;
|
||||
case ORACLE_12C:
|
||||
sql = "drop sequence " + myGeneratorName;
|
||||
break;
|
||||
case MSSQL_2012:
|
||||
sql = "drop sequence " + myGeneratorName;
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
|
||||
if (isNotBlank(sql)) {
|
||||
Set<String> sequenceNames =
|
||||
JdbcUtils.getSequenceNames(getConnectionProperties())
|
||||
.stream()
|
||||
.map(String::toLowerCase)
|
||||
.collect(Collectors.toSet());
|
||||
ourLog.debug("Currently have sequences: {}", sequenceNames);
|
||||
if (!sequenceNames.contains(myGeneratorName.toLowerCase())) {
|
||||
ourLog.info("Sequence {} does not exist - No action performed", myGeneratorName);
|
||||
return;
|
||||
}
|
||||
|
||||
executeSql(myGeneratorName, sql);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -87,12 +87,19 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
.addForeignKey("FK_TRM_VSCD_VS_PID")
|
||||
.toColumn("VALUESET_PID")
|
||||
.references("TRM_VALUESET", "PID");
|
||||
// Drop HFJ_SEARCH_RESULT foreign keys
|
||||
version.onTable("HFJ_SEARCH_RESULT").dropForeignKey("FK_SEARCHRES_RES", "HFJ_RESOURCE");
|
||||
version.onTable("HFJ_SEARCH_RESULT").dropForeignKey("FK_SEARCHRES_SEARCH", "HFJ_SEARCH");
|
||||
|
||||
// TermValueSet
|
||||
version.startSectionWithMessage("Processing table: TRM_VALUESET");
|
||||
Builder.BuilderWithTableName termValueSetTable = version.onTable("TRM_VALUESET");
|
||||
termValueSetTable.addColumn("TOTAL_CONCEPTS").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
|
||||
termValueSetTable.addColumn("TOTAL_CONCEPT_DESIGNATIONS").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
|
||||
termValueSetTable
|
||||
.dropIndex("IDX_VALUESET_EXP_STATUS");
|
||||
|
||||
version.dropIdGenerator("SEQ_SEARCHPARM_ID");
|
||||
|
||||
// TermValueSetConcept
|
||||
version.startSectionWithMessage("Processing table: TRM_VALUESET_CONCEPT");
|
||||
|
@ -124,22 +131,54 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
.renameColumn("myConceptMapUrl", "CONCEPT_MAP_URL", false, true)
|
||||
.renameColumn("mySourceValueSet", "SOURCE_VS", false, true)
|
||||
.renameColumn("myTargetValueSet", "TARGET_VS", false, true);
|
||||
version.onTable("TRM_CONCEPT_MAP_GROUP")
|
||||
.modifyColumn("CONCEPT_MAP_URL").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
|
||||
version.onTable("TRM_CONCEPT_MAP_GROUP")
|
||||
.modifyColumn("SOURCE_VERSION").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
|
||||
version.onTable("TRM_CONCEPT_MAP_GROUP")
|
||||
.modifyColumn("SOURCE_VS").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
|
||||
version.onTable("TRM_CONCEPT_MAP_GROUP")
|
||||
.modifyColumn("TARGET_VERSION").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
|
||||
version.onTable("TRM_CONCEPT_MAP_GROUP")
|
||||
.modifyColumn("TARGET_VS").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
|
||||
|
||||
version.onTable("TRM_CONCEPT_MAP_GRP_ELEMENT")
|
||||
.renameColumn("myConceptMapUrl", "CONCEPT_MAP_URL", false, true)
|
||||
.renameColumn("mySystem", "SYSTEM_URL", false, true)
|
||||
.renameColumn("mySystemVersion", "SYSTEM_VERSION", false, true)
|
||||
.renameColumn("myValueSet", "VALUESET_URL", false, true);
|
||||
version.onTable("TRM_CONCEPT_MAP_GRP_ELEMENT")
|
||||
.modifyColumn("CONCEPT_MAP_URL").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
|
||||
version.onTable("TRM_CONCEPT_MAP_GRP_ELEMENT")
|
||||
.modifyColumn("SOURCE_CODE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 500);
|
||||
version.onTable("TRM_CONCEPT_MAP_GRP_ELEMENT")
|
||||
.modifyColumn("SYSTEM_URL").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
|
||||
version.onTable("TRM_CONCEPT_MAP_GRP_ELEMENT")
|
||||
.modifyColumn("SYSTEM_VERSION").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
|
||||
version.onTable("TRM_CONCEPT_MAP_GRP_ELEMENT")
|
||||
.modifyColumn("VALUESET_URL").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
|
||||
|
||||
version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT")
|
||||
.renameColumn("myConceptMapUrl", "CONCEPT_MAP_URL", false, true)
|
||||
.renameColumn("mySystem", "SYSTEM_URL", false, true)
|
||||
.renameColumn("mySystemVersion", "SYSTEM_VERSION", false, true)
|
||||
.renameColumn("myValueSet", "VALUESET_URL", false, true);
|
||||
version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT")
|
||||
.modifyColumn("CONCEPT_MAP_URL").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
|
||||
version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT")
|
||||
.modifyColumn("SYSTEM_URL").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
|
||||
version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT")
|
||||
.modifyColumn("SYSTEM_VERSION").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
|
||||
version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT")
|
||||
.modifyColumn("TARGET_CODE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 500);
|
||||
version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT")
|
||||
.modifyColumn("VALUESET_URL").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
|
||||
|
||||
version.onTable("TRM_CONCEPT")
|
||||
.renameColumn("CODE", "CODEVAL", false, true);
|
||||
|
||||
|
||||
|
||||
// TermValueSet
|
||||
version.startSectionWithMessage("Processing table: TRM_VALUESET");
|
||||
version.addIdGenerator("SEQ_VALUESET_PID");
|
||||
|
@ -159,6 +198,8 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
|
||||
version.onTable("TRM_VALUESET")
|
||||
.renameColumn("NAME", "VSNAME", true, true);
|
||||
version.onTable("TRM_VALUESET")
|
||||
.modifyColumn("RES_ID").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
|
||||
|
||||
Builder.BuilderWithTableName termValueSetTableChange = version.onTable("TRM_VALUESET");
|
||||
termValueSetTableChange.addColumn("EXPANSION_STATUS").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, TermValueSet.MAX_EXPANSION_STATUS_LENGTH);
|
||||
|
@ -215,6 +256,47 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
version.startSectionWithMessage("Processing table: TRM_CODESYSTEM_VER");
|
||||
Builder.BuilderWithTableName termCodeSystemVersionTable = version.onTable("TRM_CODESYSTEM_VER");
|
||||
termCodeSystemVersionTable.addColumn("CS_DISPLAY").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, TermCodeSystemVersion.MAX_VERSION_LENGTH);
|
||||
|
||||
// ResourceReindexJobEntry
|
||||
version.addIdGenerator("SEQ_RES_REINDEX_JOB");
|
||||
Builder.BuilderAddTableByColumns reindex = version.addTableByColumns("HFJ_RES_REINDEX_JOB", "PID");
|
||||
reindex.addColumn("PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
|
||||
reindex.addColumn("RES_TYPE").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, Constants.MAX_RESOURCE_NAME_LENGTH);
|
||||
reindex.addColumn("UPDATE_THRESHOLD_HIGH").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.DATE_TIMESTAMP);
|
||||
reindex.addColumn("JOB_DELETED").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.BOOLEAN);
|
||||
reindex.addColumn("UPDATE_THRESHOLD_LOW").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.DATE_TIMESTAMP);
|
||||
reindex.addColumn("SUSPENDED_UNTIL").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.DATE_TIMESTAMP);
|
||||
reindex.addColumn("REINDEX_COUNT").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.INT);
|
||||
|
||||
// Search
|
||||
version.onTable("HFJ_SEARCH")
|
||||
.addColumn("SEARCH_DELETED").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.BOOLEAN);
|
||||
version.onTable("HFJ_SEARCH")
|
||||
.modifyColumn("SEARCH_LAST_RETURNED").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.DATE_TIMESTAMP);
|
||||
version.onTable("HFJ_SEARCH")
|
||||
.addColumn("SEARCH_PARAM_MAP").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.BLOB);
|
||||
version.onTable("HFJ_SEARCH")
|
||||
.modifyColumn("SEARCH_UUID").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, Search.UUID_COLUMN_LENGTH);
|
||||
|
||||
version.onTable("HFJ_SEARCH_PARM").dropThisTable();
|
||||
|
||||
version.onTable("HFJ_SPIDX_COORDS").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
||||
version.onTable("HFJ_SPIDX_DATE").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
||||
version.onTable("HFJ_SPIDX_STRING").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
||||
version.onTable("HFJ_SPIDX_STRING").addColumn("HASH_IDENTITY").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
|
||||
version.onTable("HFJ_SPIDX_STRING").addIndex("IDX_SP_STRING_HASH_IDENT").unique(false).withColumns("HASH_IDENTITY");
|
||||
version.onTable("HFJ_SPIDX_COORDS").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
||||
version.onTable("HFJ_SPIDX_QUANTITY").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
||||
version.onTable("HFJ_SPIDX_QUANTITY").dropColumn("HASH_UNITS_AND_VALPREFIX");
|
||||
version.onTable("HFJ_SPIDX_QUANTITY").dropColumn("HASH_VALPREFIX");
|
||||
version.onTable("HFJ_SPIDX_NUMBER").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
||||
version.onTable("HFJ_SPIDX_TOKEN").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
||||
version.onTable("HFJ_SPIDX_URI").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
||||
version.onTable("HFJ_SPIDX_URI").modifyColumn("SP_URI").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 254);
|
||||
|
||||
version.onTable("TRM_CODESYSTEM").modifyColumn("CODE_SYSTEM_URI").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
|
||||
version.onTable("TRM_CODESYSTEM").modifyColumn("CS_NAME").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
|
||||
version.onTable("TRM_CODESYSTEM_VER").modifyColumn("CS_VERSION_ID").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
|
||||
}
|
||||
|
||||
|
||||
|
@ -245,6 +327,14 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
.addSql(DriverTypeEnum.MYSQL_5_7, " create table HFJ_RES_REINDEX_JOB (PID bigint not null, JOB_DELETED bit not null, RES_TYPE varchar(255), SUSPENDED_UNTIL datetime(6), UPDATE_THRESHOLD_HIGH datetime(6) not null, UPDATE_THRESHOLD_LOW datetime(6), primary key (PID))")
|
||||
.addSql(DriverTypeEnum.ORACLE_12C, "create table HFJ_RES_REINDEX_JOB (PID number(19,0) not null, JOB_DELETED number(1,0) not null, RES_TYPE varchar2(255 char), SUSPENDED_UNTIL timestamp, UPDATE_THRESHOLD_HIGH timestamp not null, UPDATE_THRESHOLD_LOW timestamp, primary key (PID))");
|
||||
|
||||
version.onTable("TRM_CONCEPT_DESIG").addColumn("CS_VER_PID").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
|
||||
version.onTable("TRM_CONCEPT_DESIG").addForeignKey("FK_CONCEPTDESIG_CSV").toColumn("CS_VER_PID").references("TRM_CODESYSTEM_VER", "PID");
|
||||
|
||||
version.onTable("TRM_CONCEPT_PROPERTY").addColumn("CS_VER_PID").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
|
||||
version.onTable("TRM_CONCEPT_PROPERTY").addForeignKey("FK_CONCEPTPROP_CSV").toColumn("CS_VER_PID").references("TRM_CODESYSTEM_VER", "PID");
|
||||
|
||||
version.onTable("TRM_CONCEPT").addColumn("PARENT_PIDS").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.CLOB);
|
||||
|
||||
}
|
||||
|
||||
private void init350() {
|
||||
|
@ -299,6 +389,8 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
.addIndex("IDX_SP_DATE_HASH")
|
||||
.unique(false)
|
||||
.withColumns("HASH_IDENTITY", "SP_VALUE_LOW", "SP_VALUE_HIGH");
|
||||
spidxDate
|
||||
.dropIndex("IDX_SP_DATE");
|
||||
spidxDate
|
||||
.addTask(new CalculateHashesTask()
|
||||
.setColumnName("HASH_IDENTITY")
|
||||
|
@ -531,6 +623,9 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_DESIG");
|
||||
version
|
||||
.addTableRawSql("TRM_CONCEPT_DESIG")
|
||||
.addSql(DriverTypeEnum.H2_EMBEDDED, "create table TRM_CONCEPT_DESIG (PID bigint not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.H2_EMBEDDED, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER")
|
||||
.addSql(DriverTypeEnum.H2_EMBEDDED, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT")
|
||||
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_DESIG (PID bigint not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER")
|
||||
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT")
|
||||
|
@ -658,6 +753,10 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
.addSql(DriverTypeEnum.MSSQL_2012, "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(500) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID))")
|
||||
.addSql(DriverTypeEnum.MSSQL_2012, "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)")
|
||||
.addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT");
|
||||
|
||||
version.onTable("HFJ_IDX_CMP_STRING_UNIQ").modifyColumn("IDX_STRING").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
|
||||
|
||||
|
||||
}
|
||||
|
||||
private Boolean columnToBoolean(Object theValue) {
|
||||
|
|
|
@ -117,6 +117,11 @@ public class BaseMigrationTasks<T extends Enum> {
|
|||
addTask(task);
|
||||
}
|
||||
|
||||
public void dropIdGenerator(String theIdGeneratorName) {
|
||||
DropIdGeneratorTask task = new DropIdGeneratorTask(theIdGeneratorName);
|
||||
addTask(task);
|
||||
}
|
||||
|
||||
public class BuilderAddTableRawSql {
|
||||
|
||||
private final AddTableRawSqlTask myTask;
|
||||
|
@ -241,6 +246,19 @@ public class BaseMigrationTasks<T extends Enum> {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param theFkName the name of the foreign key
|
||||
* @param theParentTableName the name of the table that exports the foreign key
|
||||
*/
|
||||
public void dropForeignKey(String theFkName, String theParentTableName) {
|
||||
DropForeignKeyTask task = new DropForeignKeyTask();
|
||||
task.setConstraintName(theFkName);
|
||||
task.setTableName(getTableName());
|
||||
task.setParentTableName(theParentTableName);
|
||||
addTask(task);
|
||||
}
|
||||
|
||||
public class BuilderAddIndexWithName {
|
||||
private final String myIndexName;
|
||||
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.hasItem;
|
||||
import static org.junit.Assert.assertThat;
|
||||
|
||||
public class AddIndexTest extends BaseTest {
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.collection.IsCollectionWithSize.hasSize;
|
||||
import static org.junit.Assert.assertThat;
|
||||
|
||||
public class DropForeignKeyTaskTest extends BaseTest {
|
||||
|
||||
@Test
|
||||
public void testDropForeignKey() throws SQLException {
|
||||
executeSql("create table PARENT (PID bigint not null, TEXTCOL varchar(255), primary key (PID))");
|
||||
executeSql("create table CHILD (PID bigint not null, PARENTREF bigint)");
|
||||
executeSql("alter table CHILD add constraint FK_MOM foreign key (PARENTREF) references PARENT(PID)");
|
||||
|
||||
assertThat(JdbcUtils.getForeignKeys(getConnectionProperties(), "PARENT", "CHILD"), hasSize(1));
|
||||
|
||||
DropForeignKeyTask task = new DropForeignKeyTask();
|
||||
task.setTableName("CHILD");
|
||||
task.setParentTableName("PARENT");
|
||||
task.setConstraintName("FK_MOM");
|
||||
getMigrator().addTask(task);
|
||||
|
||||
getMigrator().migrate();
|
||||
|
||||
assertThat(JdbcUtils.getForeignKeys(getConnectionProperties(), "PARENT", "CHILD"), empty());
|
||||
|
||||
// Make sure additional calls don't crash
|
||||
getMigrator().migrate();
|
||||
getMigrator().migrate();
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
||||
import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
|
||||
import ca.uhn.fhir.util.VersionEnum;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.junit.Assert.assertThat;
|
||||
|
||||
public class DropIdGeneratorTaskTest extends BaseTest {
|
||||
|
||||
|
||||
@Test
|
||||
public void testAddIdGenerator() throws SQLException {
|
||||
executeSql("create sequence SEQ_FOO start with 1 increment by 50");
|
||||
assertThat(JdbcUtils.getSequenceNames(getConnectionProperties()), containsInAnyOrder("SEQ_FOO"));
|
||||
|
||||
MyMigrationTasks migrator = new MyMigrationTasks();
|
||||
getMigrator().addTasks(migrator.getTasks(VersionEnum.V3_3_0, VersionEnum.V3_6_0));
|
||||
getMigrator().migrate();
|
||||
|
||||
assertThat(JdbcUtils.getSequenceNames(getConnectionProperties()), empty());
|
||||
}
|
||||
|
||||
|
||||
|
||||
private static class MyMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
||||
|
||||
public MyMigrationTasks() {
|
||||
Builder v = forVersion(VersionEnum.V3_5_0);
|
||||
v.dropIdGenerator("SEQ_FOO");
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -877,6 +877,11 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements
|
|||
public boolean conformsToProfile(Object appContext, Base item, String url) throws FHIRException {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueSet resolveValueSet(Object appContext, String url) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static <T extends Enum<?>> String extractSystem(Enumeration<T> theBoundCode) {
|
||||
|
|
|
@ -21,12 +21,10 @@ package ca.uhn.fhir.jpa.searchparam.extractor;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.context.ConfigurationException;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.jpa.model.entity.*;
|
||||
import ca.uhn.fhir.jpa.model.util.StringNormalizer;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParamConstants;
|
||||
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
|
|
|
@ -18,7 +18,10 @@ import org.hl7.fhir.instance.model.api.IIdType;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.messaging.*;
|
||||
import org.springframework.messaging.Message;
|
||||
import org.springframework.messaging.MessageChannel;
|
||||
import org.springframework.messaging.MessageHandler;
|
||||
import org.springframework.messaging.MessagingException;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.Collection;
|
||||
|
|
|
@ -848,7 +848,7 @@ public class RestfulServer extends HttpServlet implements IRestfulServer<Servlet
|
|||
@SuppressWarnings("WeakerAccess")
|
||||
protected void handleRequest(RequestTypeEnum theRequestType, HttpServletRequest theRequest, HttpServletResponse theResponse) throws ServletException, IOException {
|
||||
String fhirServerBase;
|
||||
ServletRequestDetails requestDetails = new ServletRequestDetails(getInterceptorService());
|
||||
ServletRequestDetails requestDetails = newRequestDetails();
|
||||
requestDetails.setServer(this);
|
||||
requestDetails.setRequestType(theRequestType);
|
||||
requestDetails.setServletRequest(theRequest);
|
||||
|
@ -1095,6 +1095,10 @@ public class RestfulServer extends HttpServlet implements IRestfulServer<Servlet
|
|||
}
|
||||
}
|
||||
|
||||
protected ServletRequestDetails newRequestDetails() {
|
||||
return new ServletRequestDetails(getInterceptorService());
|
||||
}
|
||||
|
||||
protected void addRequestIdToResponse(ServletRequestDetails theRequestDetails, String theRequestId) {
|
||||
theRequestDetails.getResponse().addHeader(Constants.HEADER_REQUEST_ID, theRequestId);
|
||||
}
|
||||
|
|
|
@ -157,9 +157,6 @@ public class ResourceParameter implements IParameter {
|
|||
}
|
||||
}
|
||||
if (isBlank(ctValue)) {
|
||||
/*
|
||||
* If the client didn't send a content type, try to guess
|
||||
*/
|
||||
String body;
|
||||
try {
|
||||
body = IOUtils.toString(requestReader);
|
||||
|
@ -170,12 +167,9 @@ public class ResourceParameter implements IParameter {
|
|||
if (isBlank(body)) {
|
||||
return null;
|
||||
}
|
||||
encoding = EncodingEnum.detectEncodingNoDefault(body);
|
||||
if (encoding == null) {
|
||||
|
||||
String msg = ctx.getLocalizer().getMessage(ResourceParameter.class, "noContentTypeInRequest", restOperationType);
|
||||
throw new InvalidRequestException(msg);
|
||||
}
|
||||
requestReader = new InputStreamReader(new ByteArrayInputStream(theRequest.loadRequestContents()), charset);
|
||||
} else {
|
||||
String msg = ctx.getLocalizer().getMessage(ResourceParameter.class, "invalidContentTypeInRequest", ctValue, restOperationType);
|
||||
throw new InvalidRequestException(msg);
|
||||
|
|
|
@ -336,6 +336,18 @@ public final class HapiWorkerContext implements IWorkerContext, ValueSetExpander
|
|||
expandedValueSet = new ValueSetExpansionOutcome(expansion);
|
||||
}
|
||||
|
||||
/*
|
||||
* We'll just accept all mimetypes, since this is pretty much impossible to exhaustively
|
||||
* validate.
|
||||
*/
|
||||
if (theVs != null && "http://hl7.org/fhir/ValueSet/mimetypes".equals(theVs.getUrl())) {
|
||||
ConceptDefinitionComponent definition = new ConceptDefinitionComponent();
|
||||
definition.setCode(wantCode);
|
||||
definition.setDisplay(wantCode);
|
||||
ValidationResult retVal = new ValidationResult(definition);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
if (expandedValueSet == null) {
|
||||
expandedValueSet = expand(theVs, null);
|
||||
}
|
||||
|
|
|
@ -389,6 +389,11 @@ public final class HapiWorkerContext implements IWorkerContext, ValueSetExpander
|
|||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getLinkForUrl(String corePath, String url) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getTypeNames() {
|
||||
throw new UnsupportedOperationException();
|
||||
|
|
|
@ -77,6 +77,23 @@ public class CreateR4Test {
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateFailsIfNoContentTypeProvided() throws Exception {
|
||||
|
||||
HttpPost httpPost = new HttpPost("http://localhost:" + ourPort + "/Patient");
|
||||
httpPost.setEntity(new StringEntity("{\"resourceType\":\"Patient\", \"id\":\"999\", \"status\":\"active\"}", (ContentType) null));
|
||||
try (CloseableHttpResponse status = ourClient.execute(httpPost)) {
|
||||
|
||||
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
|
||||
ourLog.info("Response was:\n{}", responseContent);
|
||||
|
||||
assertEquals(400, status.getStatusLine().getStatusCode());
|
||||
assertThat(responseContent, containsString("No Content-Type header was provided in the request. This is required for \\\"CREATE\\\" operation"));
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* #472
|
||||
*/
|
||||
|
|
|
@ -232,9 +232,9 @@ public class OperationGenericServer2R4Test {
|
|||
|
||||
HttpGet httpPost = new HttpGet("http://localhost:" + myPort + "/Patient/123/$OP_INSTANCE");
|
||||
try (CloseableHttpResponse status = ourClient.execute(httpPost)) {
|
||||
assertEquals(200, status.getStatusLine().getStatusCode());
|
||||
String response = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
ourLog.info(response);
|
||||
assertEquals(200, status.getStatusLine().getStatusCode());
|
||||
status.getEntity().getContent().close();
|
||||
|
||||
assertEquals("123", ourLastId.getIdPart());
|
||||
|
|
|
@ -66,7 +66,8 @@ public class GraphQLEngineTest {
|
|||
engine.setGraphQL(Parser.parse("{valueQuantity{value,unit}}"));
|
||||
engine.execute();
|
||||
|
||||
ObjectValue output = engine.getOutput();
|
||||
GraphQLResponse output = engine.getOutput();
|
||||
output.setWriteWrapper(false);
|
||||
StringBuilder outputBuilder = new StringBuilder();
|
||||
output.write(outputBuilder, 0, "\n");
|
||||
|
||||
|
@ -100,7 +101,8 @@ public class GraphQLEngineTest {
|
|||
engine.setServices(createStorageServices());
|
||||
engine.execute();
|
||||
|
||||
ObjectValue output = engine.getOutput();
|
||||
GraphQLResponse output = engine.getOutput();
|
||||
output.setWriteWrapper(false);
|
||||
StringBuilder outputBuilder = new StringBuilder();
|
||||
output.write(outputBuilder, 0, "\n");
|
||||
|
||||
|
|
|
@ -438,4 +438,9 @@ public final class HapiWorkerContext implements IWorkerContext, ValueSetExpander
|
|||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getLinkForUrl(String corePath, String url) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -339,6 +339,11 @@ public class FhirInstanceValidator extends BaseValidatorBridge implements IValid
|
|||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getLinkForUrl(String corePath, String url) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void generateSnapshot(org.hl7.fhir.r5.model.StructureDefinition p) throws FHIRException {
|
||||
// nothing yet
|
||||
|
|
|
@ -522,6 +522,11 @@ public class FhirInstanceValidator extends BaseValidatorBridge implements IValid
|
|||
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getLinkForUrl(String corePath, String url) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Parameters getExpansionParameters() {
|
||||
return myExpansionProfile;
|
||||
|
|
|
@ -669,6 +669,11 @@ public class FhirInstanceValidator extends org.hl7.fhir.r4.hapi.validation.BaseV
|
|||
return convertValidationResult(result);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getLinkForUrl(String corePath, String url) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static class ResourceKey {
|
||||
|
|
|
@ -145,6 +145,12 @@ public class SnapshotGeneratingValidationSupport implements IValidationSupport {
|
|||
public boolean prependLinks() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getLinkForUrl(String corePath, String url) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -282,6 +282,11 @@ public class FhirInstanceValidator extends org.hl7.fhir.r5.hapi.validation.BaseV
|
|||
// nothing yet
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getLinkForUrl(String corePath, String url) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.hl7.fhir.r5.model.Parameters getExpansionParameters() {
|
||||
return myExpansionProfile;
|
||||
|
|
|
@ -131,6 +131,11 @@ public class SnapshotGeneratingValidationSupport implements IValidationSupport {
|
|||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getLinkForUrl(String corePath, String url) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public BindingResolution resolveBinding(StructureDefinition def, String url, String path) throws FHIRException {
|
||||
return null;
|
||||
|
|
2
pom.xml
2
pom.xml
|
@ -562,7 +562,7 @@
|
|||
|
||||
<properties>
|
||||
|
||||
<fhir_core_version>4.0.2-SNAPSHOT</fhir_core_version>
|
||||
<fhir_core_version>4.0.8-SNAPSHOT</fhir_core_version>
|
||||
<ucum_version>1.0.2</ucum_version>
|
||||
|
||||
<!-- configure timestamp in MANIFEST.MF for maven-war-provider -->
|
||||
|
|
|
@ -73,6 +73,10 @@
|
|||
The informational message returned in an OperationOutcome when a delete failed due to cascades not being enabled
|
||||
contained an incorrect example. This has been corrected.
|
||||
</action>
|
||||
<action type="change">
|
||||
Two foreign keys have been dropped from the HFJ_SEARCH_RESULT table used by the FHIR search query cache. These
|
||||
constraints did not add value and caused unneccessary contention when used under high load.
|
||||
</action>
|
||||
<action type="change">
|
||||
An inefficient regex expression in UrlUtil was replaced with a much more efficient hand-written
|
||||
checker. This regex was causing a noticable performance drop when feeding large numbers of transactions
|
||||
|
@ -106,6 +110,17 @@
|
|||
A note has been added to the downloads page explaning the removal of the hapi-fhir-utilities
|
||||
module. Thanks to Andrew Fitzgerald for the PR!
|
||||
</action>
|
||||
<action type="change">
|
||||
REST servers will no longer try to guess the content type for HTTP requests where a body
|
||||
is provided but no Content-Type header is included. These requests are invalid, and will now
|
||||
result in an HTTP 400. This change corrects an error where some interceptors (notably
|
||||
the RequestValidatingInterceptor, but not including any HAPI FHIR security interceptors)
|
||||
could be bypassed if a Content Type was not included.
|
||||
</action>
|
||||
<action type="fix">
|
||||
The GraphQL provider did not wrap the respone in a "data" element as described in the FHIR
|
||||
specification. This has been corrected.
|
||||
</action>
|
||||
</release>
|
||||
<release version="4.0.1" date="2019-09-03" description="Igloo (Point Release)">
|
||||
<action type="fix">
|
||||
|
|
Loading…
Reference in New Issue