mirror of
https://github.com/hapifhir/hapi-fhir.git
synced 2025-03-25 01:18:37 +00:00
Reduce memory overhead of searches (#6471)
This commit is contained in:
parent
7913fca90d
commit
265a5cd9bf
@ -0,0 +1,8 @@
|
||||
---
|
||||
type: perf
|
||||
issue: 6469
|
||||
title: "Searching for a large number of resources can use a lot of
|
||||
memory, due to the nature of deduplication of results in memory.
|
||||
We will instead push this responsibility to the db to save
|
||||
reduce this overhead.
|
||||
"
|
@ -376,6 +376,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
|
||||
myContext.getResourceDefinition(theResourceType).getImplementingClass();
|
||||
final ISearchBuilder<JpaPid> sb = mySearchBuilderFactory.newSearchBuilder(theResourceType, resourceTypeClass);
|
||||
sb.setFetchSize(mySyncSize);
|
||||
sb.setRequireTotal(theParams.getCount() != null);
|
||||
|
||||
final Integer loadSynchronousUpTo = getLoadSynchronousUpToOrNull(theCacheControlDirective);
|
||||
boolean isOffsetQuery = theParams.isOffsetQuery();
|
||||
@ -393,7 +394,6 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
|
||||
|
||||
try {
|
||||
return direct.get();
|
||||
|
||||
} catch (ResourceNotFoundInIndexException theE) {
|
||||
// some resources were not found in index, so we will inform this and resort to JPA search
|
||||
ourLog.warn(
|
||||
@ -401,6 +401,12 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
|
||||
}
|
||||
}
|
||||
|
||||
// we need a max to fetch for synchronous searches;
|
||||
// otherwise we'll explode memory.
|
||||
Integer maxToLoad = getSynchronousMaxResultsToFetch(theParams, loadSynchronousUpTo);
|
||||
ourLog.debug("Setting a max fetch value of {} for synchronous search", maxToLoad);
|
||||
sb.setMaxResultsToFetch(maxToLoad);
|
||||
|
||||
ourLog.debug("Search {} is loading in synchronous mode", searchUuid);
|
||||
return mySynchronousSearchSvc.executeQuery(
|
||||
theParams, theRequestDetails, searchUuid, sb, loadSynchronousUpTo, theRequestPartitionId);
|
||||
@ -434,6 +440,35 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
|
||||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* The max results to return if this is a synchronous search.
|
||||
*
|
||||
* We'll look in this order:
|
||||
* * load synchronous up to (on params)
|
||||
* * param count (+ offset)
|
||||
* * StorageSettings fetch size default max
|
||||
* *
|
||||
*/
|
||||
private Integer getSynchronousMaxResultsToFetch(SearchParameterMap theParams, Integer theLoadSynchronousUpTo) {
|
||||
if (theLoadSynchronousUpTo != null) {
|
||||
return theLoadSynchronousUpTo;
|
||||
}
|
||||
|
||||
if (theParams.getCount() != null) {
|
||||
int valToReturn = theParams.getCount() + 1;
|
||||
if (theParams.getOffset() != null) {
|
||||
valToReturn += theParams.getOffset();
|
||||
}
|
||||
return valToReturn;
|
||||
}
|
||||
|
||||
if (myStorageSettings.getFetchSizeDefaultMaximum() != null) {
|
||||
return myStorageSettings.getFetchSizeDefaultMaximum();
|
||||
}
|
||||
|
||||
return myStorageSettings.getInternalSynchronousSearchSize();
|
||||
}
|
||||
|
||||
private void validateSearch(SearchParameterMap theParams) {
|
||||
validateIncludes(theParams.getIncludes(), Constants.PARAM_INCLUDE);
|
||||
validateIncludes(theParams.getRevIncludes(), Constants.PARAM_REVINCLUDE);
|
||||
|
@ -249,7 +249,7 @@ public class SynchronousSearchSvcImpl implements ISynchronousSearchSvc {
|
||||
resources, theRequestDetails, myInterceptorBroadcaster);
|
||||
|
||||
SimpleBundleProvider bundleProvider = new SimpleBundleProvider(resources);
|
||||
if (hasACount) {
|
||||
if (hasACount && theSb.requiresTotal()) {
|
||||
bundleProvider.setTotalResourcesRequestedReturned(receivedResourceCount);
|
||||
}
|
||||
if (theParams.isOffsetQuery()) {
|
||||
|
@ -61,6 +61,7 @@ import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.jpa.search.SearchConstants;
|
||||
import ca.uhn.fhir.jpa.search.builder.models.ResolvedSearchQueryExecutor;
|
||||
import ca.uhn.fhir.jpa.search.builder.models.SearchQueryProperties;
|
||||
import ca.uhn.fhir.jpa.search.builder.sql.GeneratedSql;
|
||||
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
|
||||
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryExecutor;
|
||||
@ -206,11 +207,19 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||
private SearchParameterMap myParams;
|
||||
private String mySearchUuid;
|
||||
private int myFetchSize;
|
||||
private Integer myMaxResultsToFetch;
|
||||
|
||||
private boolean myRequiresTotal;
|
||||
|
||||
/**
|
||||
* @see SearchBuilder#setDeduplicateInDatabase(boolean)
|
||||
*/
|
||||
private Set<JpaPid> myPidSet;
|
||||
|
||||
private boolean myHasNextIteratorQuery = false;
|
||||
private RequestPartitionId myRequestPartitionId;
|
||||
|
||||
private SearchQueryProperties mySearchProperties;
|
||||
|
||||
@Autowired(required = false)
|
||||
private IFulltextSearchSvc myFulltextSearchSvc;
|
||||
|
||||
@ -261,6 +270,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||
myDaoRegistry = theDaoRegistry;
|
||||
myContext = theContext;
|
||||
myIdHelperService = theIdHelperService;
|
||||
|
||||
mySearchProperties = new SearchQueryProperties();
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
@ -270,7 +281,21 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||
|
||||
@Override
|
||||
public void setMaxResultsToFetch(Integer theMaxResultsToFetch) {
|
||||
myMaxResultsToFetch = theMaxResultsToFetch;
|
||||
mySearchProperties.setMaxResultsRequested(theMaxResultsToFetch);
|
||||
}
|
||||
|
||||
public void setDeduplicateInDatabase(boolean theShouldDeduplicateInDB) {
|
||||
mySearchProperties.setDeduplicateInDatabase(theShouldDeduplicateInDB);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setRequireTotal(boolean theRequireTotal) {
|
||||
myRequiresTotal = theRequireTotal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean requiresTotal() {
|
||||
return myRequiresTotal;
|
||||
}
|
||||
|
||||
private void searchForIdsWithAndOr(
|
||||
@ -279,6 +304,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||
@Nonnull SearchParameterMap theParams,
|
||||
RequestDetails theRequest) {
|
||||
myParams = theParams;
|
||||
mySearchProperties.setSortSpec(myParams.getSort());
|
||||
|
||||
// Remove any empty parameters
|
||||
theParams.clean();
|
||||
@ -349,7 +375,12 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||
return myFulltextSearchSvc.count(myResourceName, theParams.clone());
|
||||
}
|
||||
|
||||
List<ISearchQueryExecutor> queries = createQuery(theParams.clone(), null, null, null, true, theRequest, null);
|
||||
SearchQueryProperties properties = mySearchProperties.clone();
|
||||
properties.setDoCountOnlyFlag(true);
|
||||
properties.setSortSpec(null); // counts don't require sorts
|
||||
properties.setMaxResultsRequested(null);
|
||||
properties.setOffset(null);
|
||||
List<ISearchQueryExecutor> queries = createQuery(theParams.clone(), properties, theRequest, null);
|
||||
if (queries.isEmpty()) {
|
||||
return 0L;
|
||||
} else {
|
||||
@ -389,19 +420,24 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||
myCriteriaBuilder = myEntityManager.getCriteriaBuilder();
|
||||
// we mutate the params. Make a private copy.
|
||||
myParams = theParams.clone();
|
||||
mySearchProperties.setSortSpec(myParams.getSort());
|
||||
mySearchUuid = theSearchUuid;
|
||||
myRequestPartitionId = theRequestPartitionId;
|
||||
}
|
||||
|
||||
/**
|
||||
* The query created can be either a count query or the
|
||||
* actual query.
|
||||
* This is why it takes a SearchQueryProperties object
|
||||
* (and doesn't use the local version of it).
|
||||
* The properties may differ slightly for whichever
|
||||
* query this is.
|
||||
*/
|
||||
private List<ISearchQueryExecutor> createQuery(
|
||||
SearchParameterMap theParams,
|
||||
SortSpec sort,
|
||||
Integer theOffset,
|
||||
Integer theMaximumResults,
|
||||
boolean theCountOnlyFlag,
|
||||
SearchQueryProperties theSearchProperties,
|
||||
RequestDetails theRequest,
|
||||
SearchRuntimeDetails theSearchRuntimeDetails) {
|
||||
|
||||
ArrayList<ISearchQueryExecutor> queries = new ArrayList<>();
|
||||
|
||||
if (checkUseHibernateSearch()) {
|
||||
@ -412,7 +448,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||
List<JpaPid> fulltextMatchIds = null;
|
||||
int resultCount = 0;
|
||||
if (myParams.isLastN()) {
|
||||
fulltextMatchIds = executeLastNAgainstIndex(theMaximumResults);
|
||||
fulltextMatchIds = executeLastNAgainstIndex(theSearchProperties.getMaxResultsRequested());
|
||||
resultCount = fulltextMatchIds.size();
|
||||
} else if (myParams.getEverythingMode() != null) {
|
||||
fulltextMatchIds = queryHibernateSearchForEverythingPids(theRequest);
|
||||
@ -469,8 +505,9 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||
|
||||
if (canSkipDatabase) {
|
||||
ourLog.trace("Query finished after HSearch. Skip db query phase");
|
||||
if (theMaximumResults != null) {
|
||||
fulltextExecutor = SearchQueryExecutors.limited(fulltextExecutor, theMaximumResults);
|
||||
if (theSearchProperties.hasMaxResultsRequested()) {
|
||||
fulltextExecutor = SearchQueryExecutors.limited(
|
||||
fulltextExecutor, theSearchProperties.getMaxResultsRequested());
|
||||
}
|
||||
queries.add(fulltextExecutor);
|
||||
} else {
|
||||
@ -483,13 +520,11 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||
SearchBuilder.getMaximumPageSize(),
|
||||
// for each list of (SearchBuilder.getMaximumPageSize())
|
||||
// we create a chunked query and add it to 'queries'
|
||||
t -> doCreateChunkedQueries(
|
||||
theParams, t, theOffset, sort, theCountOnlyFlag, theRequest, queries));
|
||||
t -> doCreateChunkedQueries(theParams, t, theSearchProperties, theRequest, queries));
|
||||
}
|
||||
} else {
|
||||
// do everything in the database.
|
||||
createChunkedQuery(
|
||||
theParams, sort, theOffset, theMaximumResults, theCountOnlyFlag, theRequest, null, queries);
|
||||
createChunkedQuery(theParams, theSearchProperties, theRequest, null, queries);
|
||||
}
|
||||
|
||||
return queries;
|
||||
@ -584,16 +619,15 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||
private void doCreateChunkedQueries(
|
||||
SearchParameterMap theParams,
|
||||
List<JpaPid> thePids,
|
||||
Integer theOffset,
|
||||
SortSpec sort,
|
||||
boolean theCount,
|
||||
SearchQueryProperties theSearchQueryProperties,
|
||||
RequestDetails theRequest,
|
||||
ArrayList<ISearchQueryExecutor> theQueries) {
|
||||
|
||||
if (thePids.size() < getMaximumPageSize()) {
|
||||
thePids = normalizeIdListForInClause(thePids);
|
||||
}
|
||||
createChunkedQuery(theParams, sort, theOffset, thePids.size(), theCount, theRequest, thePids, theQueries);
|
||||
theSearchQueryProperties.setMaxResultsRequested(thePids.size());
|
||||
createChunkedQuery(theParams, theSearchQueryProperties, theRequest, thePids, theQueries);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -643,33 +677,22 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||
|
||||
private void createChunkedQuery(
|
||||
SearchParameterMap theParams,
|
||||
SortSpec sort,
|
||||
Integer theOffset,
|
||||
Integer theMaximumResults,
|
||||
boolean theCountOnlyFlag,
|
||||
SearchQueryProperties theSearchProperties,
|
||||
RequestDetails theRequest,
|
||||
List<JpaPid> thePidList,
|
||||
List<ISearchQueryExecutor> theSearchQueryExecutors) {
|
||||
if (myParams.getEverythingMode() != null) {
|
||||
createChunkedQueryForEverythingSearch(
|
||||
theRequest,
|
||||
theParams,
|
||||
theOffset,
|
||||
theMaximumResults,
|
||||
theCountOnlyFlag,
|
||||
thePidList,
|
||||
theSearchQueryExecutors);
|
||||
theRequest, theParams, theSearchProperties, thePidList, theSearchQueryExecutors);
|
||||
} else {
|
||||
createChunkedQueryNormalSearch(
|
||||
theParams, sort, theOffset, theCountOnlyFlag, theRequest, thePidList, theSearchQueryExecutors);
|
||||
theParams, theSearchProperties, theRequest, thePidList, theSearchQueryExecutors);
|
||||
}
|
||||
}
|
||||
|
||||
private void createChunkedQueryNormalSearch(
|
||||
SearchParameterMap theParams,
|
||||
SortSpec sort,
|
||||
Integer theOffset,
|
||||
boolean theCountOnlyFlag,
|
||||
SearchQueryProperties theSearchProperties,
|
||||
RequestDetails theRequest,
|
||||
List<JpaPid> thePidList,
|
||||
List<ISearchQueryExecutor> theSearchQueryExecutors) {
|
||||
@ -681,7 +704,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||
myResourceName,
|
||||
mySqlBuilderFactory,
|
||||
myDialectProvider,
|
||||
theCountOnlyFlag);
|
||||
theSearchProperties.isDoCountOnlyFlag());
|
||||
QueryStack queryStack3 = new QueryStack(
|
||||
theRequest,
|
||||
theParams,
|
||||
@ -759,9 +782,12 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||
}
|
||||
|
||||
/*
|
||||
* If offset is present, we want deduplicate the results by using GROUP BY
|
||||
* If offset is present, we want to deduplicate the results by using GROUP BY;
|
||||
* OR
|
||||
* if the MaxResultsToFetch is null, we are requesting "everything",
|
||||
* so we'll let the db do the deduplication (instead of in-memory)
|
||||
*/
|
||||
if (theOffset != null) {
|
||||
if (theSearchProperties.isDeduplicateInDatabase()) {
|
||||
queryStack3.addGrouping();
|
||||
queryStack3.setUseAggregate(true);
|
||||
}
|
||||
@ -772,24 +798,27 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||
* If we have a sort, we wrap the criteria search (the search that actually
|
||||
* finds the appropriate resources) in an outer search which is then sorted
|
||||
*/
|
||||
if (sort != null) {
|
||||
assert !theCountOnlyFlag;
|
||||
if (theSearchProperties.hasSort()) {
|
||||
assert !theSearchProperties.isDoCountOnlyFlag();
|
||||
|
||||
createSort(queryStack3, sort, theParams);
|
||||
createSort(queryStack3, theSearchProperties.getSortSpec(), theParams);
|
||||
}
|
||||
|
||||
/*
|
||||
* Now perform the search
|
||||
*/
|
||||
executeSearch(theOffset, theSearchQueryExecutors, sqlBuilder);
|
||||
executeSearch(theSearchProperties, theSearchQueryExecutors, sqlBuilder);
|
||||
}
|
||||
|
||||
private void executeSearch(
|
||||
Integer theOffset, List<ISearchQueryExecutor> theSearchQueryExecutors, SearchQueryBuilder sqlBuilder) {
|
||||
GeneratedSql generatedSql = sqlBuilder.generate(theOffset, myMaxResultsToFetch);
|
||||
SearchQueryProperties theProperties,
|
||||
List<ISearchQueryExecutor> theSearchQueryExecutors,
|
||||
SearchQueryBuilder sqlBuilder) {
|
||||
GeneratedSql generatedSql =
|
||||
sqlBuilder.generate(theProperties.getOffset(), theProperties.getMaxResultsRequested());
|
||||
if (!generatedSql.isMatchNothing()) {
|
||||
SearchQueryExecutor executor =
|
||||
mySqlBuilderFactory.newSearchQueryExecutor(generatedSql, myMaxResultsToFetch);
|
||||
mySqlBuilderFactory.newSearchQueryExecutor(generatedSql, theProperties.getMaxResultsRequested());
|
||||
theSearchQueryExecutors.add(executor);
|
||||
}
|
||||
}
|
||||
@ -797,9 +826,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||
private void createChunkedQueryForEverythingSearch(
|
||||
RequestDetails theRequest,
|
||||
SearchParameterMap theParams,
|
||||
Integer theOffset,
|
||||
Integer theMaximumResults,
|
||||
boolean theCountOnlyFlag,
|
||||
SearchQueryProperties theSearchQueryProperties,
|
||||
List<JpaPid> thePidList,
|
||||
List<ISearchQueryExecutor> theSearchQueryExecutors) {
|
||||
|
||||
@ -811,7 +838,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||
null,
|
||||
mySqlBuilderFactory,
|
||||
myDialectProvider,
|
||||
theCountOnlyFlag);
|
||||
theSearchQueryProperties.isDoCountOnlyFlag());
|
||||
|
||||
QueryStack queryStack3 = new QueryStack(
|
||||
theRequest,
|
||||
@ -822,7 +849,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||
mySearchParamRegistry,
|
||||
myPartitionSettings);
|
||||
|
||||
JdbcTemplate jdbcTemplate = initializeJdbcTemplate(theMaximumResults);
|
||||
JdbcTemplate jdbcTemplate = initializeJdbcTemplate(theSearchQueryProperties.getMaxResultsRequested());
|
||||
|
||||
Set<JpaPid> targetPids = new HashSet<>();
|
||||
if (myParams.get(IAnyResource.SP_RES_ID) != null) {
|
||||
@ -845,8 +872,9 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||
myResourceName,
|
||||
mySqlBuilderFactory,
|
||||
myDialectProvider,
|
||||
theCountOnlyFlag);
|
||||
GeneratedSql allTargetsSql = fetchPidsSqlBuilder.generate(theOffset, myMaxResultsToFetch);
|
||||
theSearchQueryProperties.isDoCountOnlyFlag());
|
||||
GeneratedSql allTargetsSql = fetchPidsSqlBuilder.generate(
|
||||
theSearchQueryProperties.getOffset(), mySearchProperties.getMaxResultsRequested());
|
||||
String sql = allTargetsSql.getSql();
|
||||
Object[] args = allTargetsSql.getBindVariables().toArray(new Object[0]);
|
||||
|
||||
@ -872,7 +900,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||
* If offset is present, we want deduplicate the results by using GROUP BY
|
||||
* ORDER BY is required to make sure we return unique results for each page
|
||||
*/
|
||||
if (theOffset != null) {
|
||||
if (theSearchQueryProperties.hasOffset()) {
|
||||
queryStack3.addGrouping();
|
||||
queryStack3.addOrdering();
|
||||
queryStack3.setUseAggregate(true);
|
||||
@ -881,7 +909,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||
/*
|
||||
* Now perform the search
|
||||
*/
|
||||
executeSearch(theOffset, theSearchQueryExecutors, sqlBuilder);
|
||||
executeSearch(theSearchQueryProperties, theSearchQueryExecutors, sqlBuilder);
|
||||
}
|
||||
|
||||
private void addPidListPredicate(List<JpaPid> thePidList, SearchQueryBuilder theSqlBuilder) {
|
||||
@ -2655,15 +2683,15 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||
|
||||
// If we don't have a query yet, create one
|
||||
if (myResultsIterator == null) {
|
||||
if (myMaxResultsToFetch == null) {
|
||||
myMaxResultsToFetch = calculateMaxResultsToFetch();
|
||||
if (!mySearchProperties.hasMaxResultsRequested()) {
|
||||
mySearchProperties.setMaxResultsRequested(calculateMaxResultsToFetch());
|
||||
}
|
||||
|
||||
/*
|
||||
* assigns the results iterator
|
||||
* and populates the myQueryList.
|
||||
*/
|
||||
initializeIteratorQuery(myOffset, myMaxResultsToFetch);
|
||||
initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested());
|
||||
}
|
||||
|
||||
if (myNext == null) {
|
||||
@ -2694,23 +2722,54 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||
}
|
||||
|
||||
if (nextPid != null) {
|
||||
if (myPidSet.add(nextPid) && doNotSkipNextPidForEverything()) {
|
||||
myNext = nextPid;
|
||||
myNonSkipCount++;
|
||||
break;
|
||||
if (!myPidSet.contains(nextPid)) {
|
||||
if (!mySearchProperties.isDeduplicateInDatabase()) {
|
||||
/*
|
||||
* We only add to the map if we aren't fetching "everything";
|
||||
* otherwise, we let the de-duplication happen in the database
|
||||
* (see createChunkedQueryNormalSearch above), because it
|
||||
* saves memory that way.
|
||||
*/
|
||||
myPidSet.add(nextPid);
|
||||
}
|
||||
if (doNotSkipNextPidForEverything()) {
|
||||
myNext = nextPid;
|
||||
myNonSkipCount++;
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
mySkipCount++;
|
||||
}
|
||||
}
|
||||
|
||||
if (!myResultsIterator.hasNext()) {
|
||||
if (myMaxResultsToFetch != null && (mySkipCount + myNonSkipCount == myMaxResultsToFetch)) {
|
||||
if (mySearchProperties.hasMaxResultsRequested()
|
||||
&& (mySkipCount + myNonSkipCount == mySearchProperties.getMaxResultsRequested())) {
|
||||
if (mySkipCount > 0 && myNonSkipCount == 0) {
|
||||
|
||||
sendProcessingMsgAndFirePerformanceHook();
|
||||
// need the next iterator; increase the maxsize
|
||||
// (we should always do this)
|
||||
int maxResults = mySearchProperties.getMaxResultsRequested() + 1000;
|
||||
mySearchProperties.setMaxResultsRequested(maxResults);
|
||||
|
||||
myMaxResultsToFetch += 1000;
|
||||
initializeIteratorQuery(myOffset, myMaxResultsToFetch);
|
||||
if (!mySearchProperties.isDeduplicateInDatabase()) {
|
||||
// if we're not using the database to deduplicate
|
||||
// we should recheck our memory usage
|
||||
// the prefetch size check is future proofing
|
||||
int prefetchSize = myStorageSettings
|
||||
.getSearchPreFetchThresholds()
|
||||
.size();
|
||||
if (prefetchSize > 0) {
|
||||
if (myStorageSettings
|
||||
.getSearchPreFetchThresholds()
|
||||
.get(prefetchSize - 1)
|
||||
< mySearchProperties.getMaxResultsRequested()) {
|
||||
mySearchProperties.setDeduplicateInDatabase(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2740,7 +2799,11 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||
}
|
||||
}
|
||||
|
||||
mySearchRuntimeDetails.setFoundMatchesCount(myPidSet.size());
|
||||
if (!mySearchProperties.hasMaxResultsRequested()) {
|
||||
mySearchRuntimeDetails.setFoundIndexMatchesCount(myNonSkipCount);
|
||||
} else {
|
||||
mySearchRuntimeDetails.setFoundMatchesCount(myPidSet.size());
|
||||
}
|
||||
|
||||
} finally {
|
||||
// search finished - fire hooks
|
||||
@ -2794,7 +2857,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||
String msg = "Pass completed with no matching results seeking rows "
|
||||
+ myPidSet.size() + "-" + mySkipCount
|
||||
+ ". This indicates an inefficient query! Retrying with new max count of "
|
||||
+ myMaxResultsToFetch;
|
||||
+ mySearchProperties.getMaxResultsRequested();
|
||||
firePerformanceWarning(myRequest, msg);
|
||||
}
|
||||
|
||||
@ -2809,8 +2872,14 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
||||
if (myParams.getEverythingMode() != null) {
|
||||
offset = 0;
|
||||
}
|
||||
myQueryList = createQuery(
|
||||
myParams, mySort, offset, theMaxResultsToFetch, false, myRequest, mySearchRuntimeDetails);
|
||||
|
||||
SearchQueryProperties properties = mySearchProperties.clone();
|
||||
properties
|
||||
.setOffset(offset)
|
||||
.setMaxResultsRequested(theMaxResultsToFetch)
|
||||
.setDoCountOnlyFlag(false)
|
||||
.setDeduplicateInDatabase(properties.isDeduplicateInDatabase() || offset != null);
|
||||
myQueryList = createQuery(myParams, properties, myRequest, mySearchRuntimeDetails);
|
||||
}
|
||||
|
||||
mySearchRuntimeDetails.setQueryStopwatch(new StopWatch());
|
||||
|
@ -0,0 +1,124 @@
|
||||
package ca.uhn.fhir.jpa.search.builder.models;
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.rest.api.SortSpec;
|
||||
|
||||
public class SearchQueryProperties {
|
||||
|
||||
/**
|
||||
* True if this query is only to fetch the count (and not any results).
|
||||
*
|
||||
* True means this is a count only query
|
||||
*/
|
||||
private boolean myDoCountOnlyFlag;
|
||||
/**
|
||||
* Whether or not we do deduplication of results in memory
|
||||
* (using a hashset, etc), or push this to the database
|
||||
* (using GROUP BY, etc).
|
||||
*
|
||||
* True means use the database
|
||||
*/
|
||||
private boolean myDeduplicateInDB;
|
||||
|
||||
/**
|
||||
* The maximum number of results to fetch (when we want it limited).
|
||||
* Can be null if we are fetching everything or paging.
|
||||
*/
|
||||
private Integer myMaxResultsRequested;
|
||||
/**
|
||||
* The offset for the results to fetch.
|
||||
*
|
||||
* null if the first page, some number if it's a later page
|
||||
*/
|
||||
private Integer myOffset;
|
||||
|
||||
/**
|
||||
* The sort spec for this search
|
||||
*/
|
||||
private SortSpec mySortSpec;
|
||||
|
||||
public boolean isDoCountOnlyFlag() {
|
||||
return myDoCountOnlyFlag;
|
||||
}
|
||||
|
||||
public SearchQueryProperties setDoCountOnlyFlag(boolean theDoCountOnlyFlag) {
|
||||
myDoCountOnlyFlag = theDoCountOnlyFlag;
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean isDeduplicateInDatabase() {
|
||||
return myDeduplicateInDB;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set of PIDs of results that have already been returned in a search.
|
||||
*
|
||||
* Searches use pre-fetch thresholds to avoid returning every result in the db
|
||||
* (see {@link JpaStorageSettings mySearchPreFetchThresholds}). These threshold values
|
||||
* dictate the usage of this set.
|
||||
*
|
||||
* Results from searches returning *less* than a prefetch threshold are put into this set
|
||||
* for 2 purposes:
|
||||
* 1) skipping already seen resources. ie, client requesting next "page" of
|
||||
* results should skip previously returned results
|
||||
* 2) deduplication of returned results. ie, searches can return duplicate resources (due to
|
||||
* sort and filter criteria), so this set will be used to avoid returning duplicate results.
|
||||
*
|
||||
* NOTE: if a client requests *more* resources than *all* prefetch thresholds,
|
||||
* we push the work of "deduplication" to the database. No newly seen resource
|
||||
* will be stored in this set (to avoid this set exploding in size and the JVM running out memory).
|
||||
* We will, however, still use it to skip previously seen results.
|
||||
*/
|
||||
public SearchQueryProperties setDeduplicateInDatabase(boolean theDeduplicateInDBFlag) {
|
||||
myDeduplicateInDB = theDeduplicateInDBFlag;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Integer getMaxResultsRequested() {
|
||||
return myMaxResultsRequested;
|
||||
}
|
||||
|
||||
public SearchQueryProperties setMaxResultsRequested(Integer theMaxResultsRequested) {
|
||||
myMaxResultsRequested = theMaxResultsRequested;
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean hasMaxResultsRequested() {
|
||||
return myMaxResultsRequested != null;
|
||||
}
|
||||
|
||||
public Integer getOffset() {
|
||||
return myOffset;
|
||||
}
|
||||
|
||||
public boolean hasOffset() {
|
||||
return myOffset != null;
|
||||
}
|
||||
|
||||
public SearchQueryProperties setOffset(Integer theOffset) {
|
||||
myOffset = theOffset;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SortSpec getSortSpec() {
|
||||
return mySortSpec;
|
||||
}
|
||||
|
||||
public boolean hasSort() {
|
||||
return mySortSpec != null;
|
||||
}
|
||||
|
||||
public SearchQueryProperties setSortSpec(SortSpec theSortSpec) {
|
||||
mySortSpec = theSortSpec;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SearchQueryProperties clone() {
|
||||
return new SearchQueryProperties()
|
||||
.setMaxResultsRequested(myMaxResultsRequested)
|
||||
.setSortSpec(mySortSpec)
|
||||
.setOffset(myOffset)
|
||||
.setDoCountOnlyFlag(myDoCountOnlyFlag)
|
||||
.setDeduplicateInDatabase(myDeduplicateInDB);
|
||||
}
|
||||
}
|
@ -522,7 +522,6 @@ public class SearchQueryBuilder {
|
||||
* Generate and return the SQL generated by this builder
|
||||
*/
|
||||
public GeneratedSql generate(@Nullable Integer theOffset, @Nullable Integer theMaxResultsToFetch) {
|
||||
|
||||
getOrCreateFirstPredicateBuilder();
|
||||
|
||||
mySelect.validate();
|
||||
|
@ -593,6 +593,14 @@ public class SearchTask implements Callable<Void> {
|
||||
|
||||
if (next == -1) {
|
||||
sb.setMaxResultsToFetch(null);
|
||||
/*
|
||||
* If we're past the last prefetch threshold then
|
||||
* we're potentially fetiching unlimited amounts of data.
|
||||
* We'll move responsibility for deduplication to the database in this case
|
||||
* so that we don't run the risk of blowing out the memory
|
||||
* in the app server
|
||||
*/
|
||||
sb.setDeduplicateInDatabase(true);
|
||||
} else {
|
||||
// we want at least 1 more than our requested amount
|
||||
// so we know that there are other results
|
||||
|
@ -9,6 +9,8 @@ import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.searchparam.extractor.ISearchParamExtractor;
|
||||
import ca.uhn.fhir.mdm.api.IMdmResourceDaoSvc;
|
||||
import ca.uhn.fhir.mdm.util.MdmResourceUtil;
|
||||
import ca.uhn.fhir.rest.api.SortOrderEnum;
|
||||
import ca.uhn.fhir.rest.api.SortSpec;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.param.StringOrListParam;
|
||||
@ -23,8 +25,10 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.awaitility.Awaitility.await;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
@ -95,8 +99,9 @@ public class MdmResourceDaoSvcTest extends BaseMdmR4Test {
|
||||
public void testSearchForMultiplePatientsByIdInPartitionedEnvironment() {
|
||||
// setup
|
||||
int resourceCount = 3;
|
||||
// keep alphabetical
|
||||
String[] idPrefaces = new String[] {
|
||||
"RED", "BLUE", "GREEN"
|
||||
"BLUE", "GREEN", "RED"
|
||||
};
|
||||
|
||||
SearchParameterMap map;
|
||||
@ -120,11 +125,17 @@ public class MdmResourceDaoSvcTest extends BaseMdmR4Test {
|
||||
patientIds.add(new StringParam("Patient/" +
|
||||
patientOnPartition.getIdElement().getIdPart()
|
||||
));
|
||||
await().atLeast(100, TimeUnit.MILLISECONDS);
|
||||
}
|
||||
|
||||
// test
|
||||
map = SearchParameterMap.newSynchronous();
|
||||
map.add("_id", patientIds);
|
||||
// we'll use a sort to ensure consistent ordering of returned values
|
||||
SortSpec sort = new SortSpec();
|
||||
sort.setOrder(SortOrderEnum.ASC);
|
||||
sort.setParamName("_id");
|
||||
map.setSort(sort);
|
||||
result = myPatientDao.search(map, new SystemRequestDetails());
|
||||
|
||||
// verify
|
||||
@ -132,6 +143,7 @@ public class MdmResourceDaoSvcTest extends BaseMdmR4Test {
|
||||
assertFalse(result.isEmpty());
|
||||
List<IBaseResource> resources = result.getAllResources();
|
||||
assertThat(resources).hasSize(resourceCount);
|
||||
|
||||
int count = 0;
|
||||
for (IBaseResource resource : resources) {
|
||||
String id = idPrefaces[count++];
|
||||
|
@ -86,7 +86,6 @@ public class FhirResourceDaoDstu2SearchFtTest extends BaseJpaDstu2Test {
|
||||
map = new SearchParameterMap();
|
||||
map.add(Constants.PARAM_TEXT, new StringParam("DIVBBB"));
|
||||
assertThat(toUnqualifiedVersionlessIds(myPatientDao.search(map))).containsExactly(pId1);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -1,10 +1,5 @@
|
||||
package ca.uhn.fhir.jpa.provider.dstu3;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
||||
@ -30,9 +25,7 @@ import ca.uhn.fhir.rest.gclient.StringClientParam;
|
||||
import ca.uhn.fhir.rest.param.DateRangeParam;
|
||||
import ca.uhn.fhir.rest.param.NumberParam;
|
||||
import ca.uhn.fhir.rest.param.ParamPrefixEnum;
|
||||
import ca.uhn.fhir.rest.param.StringAndListParam;
|
||||
import ca.uhn.fhir.rest.param.StringOrListParam;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.server.IPagingProvider;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
|
||||
@ -133,6 +126,7 @@ import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.http.HttpStatus;
|
||||
@ -161,7 +155,11 @@ import java.util.stream.Collectors;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
|
||||
@ -184,10 +182,8 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
|
||||
mySearchCoordinatorSvcRaw.setLoadingThrottleForUnitTests(null);
|
||||
mySearchCoordinatorSvcRaw.setSyncSizeForUnitTests(QueryParameterUtils.DEFAULT_SYNC_SIZE);
|
||||
mySearchCoordinatorSvcRaw.setNeverUseLocalSearchForUnitTests(false);
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSearchBySourceTransactionId() {
|
||||
|
||||
@ -1485,53 +1481,51 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
|
||||
ourLog.info(ids.toString());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEverythingInstanceWithContentFilter() {
|
||||
Patient pt1 = new Patient();
|
||||
pt1.addName().setFamily("Everything").addGiven("Arthur");
|
||||
IIdType ptId1 = myPatientDao.create(pt1, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Patient pt2 = new Patient();
|
||||
pt2.addName().setFamily("Everything").addGiven("Arthur");
|
||||
IIdType ptId2 = myPatientDao.create(pt2, mySrd).getId().toUnqualifiedVersionless();
|
||||
@Test
|
||||
public void testEverythingInstanceWithContentFilter() {
|
||||
Patient pt1 = new Patient();
|
||||
pt1.addName().setFamily("Everything").addGiven("Arthur");
|
||||
IIdType ptId1 = myPatientDao.create(pt1, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Device dev1 = new Device();
|
||||
dev1.setManufacturer("Some Manufacturer");
|
||||
IIdType devId1 = myDeviceDao.create(dev1, mySrd).getId().toUnqualifiedVersionless();
|
||||
Patient pt2 = new Patient();
|
||||
pt2.addName().setFamily("Everything").addGiven("Arthur");
|
||||
IIdType ptId2 = myPatientDao.create(pt2, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Device dev2 = new Device();
|
||||
dev2.setManufacturer("Some Manufacturer 2");
|
||||
myDeviceDao.create(dev2, mySrd).getId().toUnqualifiedVersionless();
|
||||
Device dev1 = new Device();
|
||||
dev1.setManufacturer("Some Manufacturer");
|
||||
IIdType devId1 = myDeviceDao.create(dev1, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Observation obs1 = new Observation();
|
||||
obs1.getText().setDivAsString("<div>OBSTEXT1</div>");
|
||||
obs1.getSubject().setReferenceElement(ptId1);
|
||||
obs1.getCode().addCoding().setCode("CODE1");
|
||||
obs1.setValue(new StringType("obsvalue1"));
|
||||
obs1.getDevice().setReferenceElement(devId1);
|
||||
IIdType obsId1 = myObservationDao.create(obs1, mySrd).getId().toUnqualifiedVersionless();
|
||||
Device dev2 = new Device();
|
||||
dev2.setManufacturer("Some Manufacturer 2");
|
||||
myDeviceDao.create(dev2, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Observation obs2 = new Observation();
|
||||
obs2.getSubject().setReferenceElement(ptId1);
|
||||
obs2.getCode().addCoding().setCode("CODE2");
|
||||
obs2.setValue(new StringType("obsvalue2"));
|
||||
IIdType obsId2 = myObservationDao.create(obs2, mySrd).getId().toUnqualifiedVersionless();
|
||||
// create an observation that links to Dev1 and Patient1
|
||||
Observation obs1 = new Observation();
|
||||
obs1.getText().setDivAsString("<div>OBSTEXT1</div>");
|
||||
obs1.getSubject().setReferenceElement(ptId1);
|
||||
obs1.getCode().addCoding().setCode("CODE1");
|
||||
obs1.setValue(new StringType("obsvalue1"));
|
||||
obs1.getDevice().setReferenceElement(devId1);
|
||||
IIdType obsId1 = myObservationDao.create(obs1, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Observation obs3 = new Observation();
|
||||
obs3.getSubject().setReferenceElement(ptId2);
|
||||
obs3.getCode().addCoding().setCode("CODE3");
|
||||
obs3.setValue(new StringType("obsvalue3"));
|
||||
IIdType obsId3 = myObservationDao.create(obs3, mySrd).getId().toUnqualifiedVersionless();
|
||||
Observation obs2 = new Observation();
|
||||
obs2.getSubject().setReferenceElement(ptId1);
|
||||
obs2.getCode().addCoding().setCode("CODE2");
|
||||
obs2.setValue(new StringType("obsvalue2"));
|
||||
IIdType obsId2 = myObservationDao.create(obs2, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
List<IIdType> actual;
|
||||
StringAndListParam param;
|
||||
Observation obs3 = new Observation();
|
||||
obs3.getSubject().setReferenceElement(ptId2);
|
||||
obs3.getCode().addCoding().setCode("CODE3");
|
||||
obs3.setValue(new StringType("obsvalue3"));
|
||||
IIdType obsId3 = myObservationDao.create(obs3, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
ourLog.info("Pt1:{} Pt2:{} Obs1:{} Obs2:{} Obs3:{}", ptId1.getIdPart(), ptId2.getIdPart(), obsId1.getIdPart(), obsId2.getIdPart(), obsId3.getIdPart());
|
||||
List<IIdType> actual;
|
||||
|
||||
param = new StringAndListParam();
|
||||
param.addAnd(new StringOrListParam().addOr(new StringParam("obsvalue1")));
|
||||
ourLog.info("Pt1:{} Pt2:{} Obs1:{} Obs2:{} Obs3:{}", ptId1.getIdPart(), ptId2.getIdPart(), obsId1.getIdPart(), obsId2.getIdPart(), obsId3.getIdPart());
|
||||
|
||||
//@formatter:off
|
||||
//@formatter:off
|
||||
Parameters response = myClient
|
||||
.operation()
|
||||
.onInstance(ptId1)
|
||||
@ -1540,10 +1534,9 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
|
||||
.execute();
|
||||
//@formatter:on
|
||||
|
||||
actual = toUnqualifiedVersionlessIds((Bundle) response.getParameter().get(0).getResource());
|
||||
assertThat(actual).containsExactlyInAnyOrder(ptId1, obsId1, devId1);
|
||||
|
||||
}
|
||||
actual = toUnqualifiedVersionlessIds((Bundle) response.getParameter().get(0).getResource());
|
||||
assertThat(actual).containsExactlyInAnyOrder(ptId1, obsId1, devId1);
|
||||
}
|
||||
|
||||
/**
|
||||
* See #147"Patient"
|
||||
@ -2627,35 +2620,40 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
|
||||
|
||||
@Test
|
||||
public void testEverythingWithNoPagingProvider() {
|
||||
myRestServer.setPagingProvider(null);
|
||||
IPagingProvider pagingProvider = myRestServer.getPagingProvider();
|
||||
try {
|
||||
myRestServer.setPagingProvider(null);
|
||||
|
||||
Patient p = new Patient();
|
||||
p.setActive(true);
|
||||
String pid = myPatientDao.create(p).getId().toUnqualifiedVersionless().getValue();
|
||||
Patient p = new Patient();
|
||||
p.setActive(true);
|
||||
String pid = myPatientDao.create(p).getId().toUnqualifiedVersionless().getValue();
|
||||
|
||||
for (int i = 0; i < 20; i++) {
|
||||
Observation o = new Observation();
|
||||
o.getSubject().setReference(pid);
|
||||
o.addIdentifier().setSystem("foo").setValue(Integer.toString(i));
|
||||
myObservationDao.create(o);
|
||||
for (int i = 0; i < 20; i++) {
|
||||
Observation o = new Observation();
|
||||
o.getSubject().setReference(pid);
|
||||
o.addIdentifier().setSystem("foo").setValue(Integer.toString(i));
|
||||
myObservationDao.create(o);
|
||||
}
|
||||
|
||||
mySearchCoordinatorSvcRaw.setLoadingThrottleForUnitTests(50);
|
||||
mySearchCoordinatorSvcRaw.setSyncSizeForUnitTests(10);
|
||||
mySearchCoordinatorSvcRaw.setNeverUseLocalSearchForUnitTests(true);
|
||||
|
||||
Bundle response = myClient
|
||||
.operation()
|
||||
.onInstance(new IdType(pid))
|
||||
.named("everything")
|
||||
.withSearchParameter(Parameters.class, "_count", new NumberParam(10))
|
||||
.returnResourceType(Bundle.class)
|
||||
.useHttpGet()
|
||||
.execute();
|
||||
|
||||
assertThat(response.getEntry()).hasSize(10);
|
||||
assertNull(response.getTotalElement().getValue());
|
||||
assertNull(response.getLink("next"));
|
||||
} finally {
|
||||
myRestServer.setPagingProvider(pagingProvider);
|
||||
}
|
||||
|
||||
mySearchCoordinatorSvcRaw.setLoadingThrottleForUnitTests(50);
|
||||
mySearchCoordinatorSvcRaw.setSyncSizeForUnitTests(10);
|
||||
mySearchCoordinatorSvcRaw.setNeverUseLocalSearchForUnitTests(true);
|
||||
|
||||
Bundle response = myClient
|
||||
.operation()
|
||||
.onInstance(new IdType(pid))
|
||||
.named("everything")
|
||||
.withSearchParameter(Parameters.class, "_count", new NumberParam(10))
|
||||
.returnResourceType(Bundle.class)
|
||||
.useHttpGet()
|
||||
.execute();
|
||||
|
||||
assertThat(response.getEntry()).hasSize(10);
|
||||
assertNull(response.getTotalElement().getValue());
|
||||
assertNull(response.getLink("next"));
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -4073,7 +4071,6 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
|
||||
ourLog.info(StringUtils.join(names, '\n'));
|
||||
|
||||
assertThat(names).containsExactly("Daniel Adams", "Aaron Alexis", "Carol Allen", "Ruth Black", "Brian Brooks", "Amy Clark", "Susan Clark", "Anthony Coleman", "Lisa Coleman", "Steven Coleman", "Ruth Cook", "Betty Davis", "Joshua Diaz", "Brian Gracia", "Sarah Graham", "Stephan Graham");
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -165,7 +165,7 @@ public class FhirResourceDaoR4ComboNonUniqueParamTest extends BaseComboParamsR4T
|
||||
assertThat(actual).containsExactlyInAnyOrder(id1.toUnqualifiedVersionless().getValue());
|
||||
|
||||
assertThat(myCaptureQueriesListener.getSelectQueries().stream().map(t -> t.getSql(true, false)).toList()).contains(
|
||||
"SELECT t0.RES_ID FROM HFJ_IDX_CMB_TOK_NU t0 WHERE (t0.HASH_COMPLETE = '-2634469377090377342')"
|
||||
"SELECT t0.RES_ID FROM HFJ_IDX_CMB_TOK_NU t0 WHERE (t0.HASH_COMPLETE = '-2634469377090377342') fetch first '10000' rows only"
|
||||
);
|
||||
|
||||
logCapturedMessages();
|
||||
@ -291,7 +291,7 @@ public class FhirResourceDaoR4ComboNonUniqueParamTest extends BaseComboParamsR4T
|
||||
assertThat(actual).containsExactlyInAnyOrder(id1.toUnqualifiedVersionless().getValue());
|
||||
|
||||
String sql = myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false);
|
||||
String expected = "SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_IDX_CMB_TOK_NU t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_DATE t2 ON (t1.RES_ID = t2.RES_ID) WHERE ((t0.HASH_COMPLETE = '-2634469377090377342') AND ((t2.HASH_IDENTITY = '5247847184787287691') AND (((t2.SP_VALUE_LOW_DATE_ORDINAL >= '20210202') AND (t2.SP_VALUE_LOW_DATE_ORDINAL <= '20210202')) AND ((t2.SP_VALUE_HIGH_DATE_ORDINAL <= '20210202') AND (t2.SP_VALUE_HIGH_DATE_ORDINAL >= '20210202')))))";
|
||||
String expected = "SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_IDX_CMB_TOK_NU t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_DATE t2 ON (t1.RES_ID = t2.RES_ID) WHERE ((t0.HASH_COMPLETE = '-2634469377090377342') AND ((t2.HASH_IDENTITY = '5247847184787287691') AND (((t2.SP_VALUE_LOW_DATE_ORDINAL >= '20210202') AND (t2.SP_VALUE_LOW_DATE_ORDINAL <= '20210202')) AND ((t2.SP_VALUE_HIGH_DATE_ORDINAL <= '20210202') AND (t2.SP_VALUE_HIGH_DATE_ORDINAL >= '20210202'))))) fetch first '10000' rows only";
|
||||
assertEquals(expected, sql);
|
||||
|
||||
logCapturedMessages();
|
||||
@ -323,7 +323,7 @@ public class FhirResourceDaoR4ComboNonUniqueParamTest extends BaseComboParamsR4T
|
||||
assertThat(actual).containsExactlyInAnyOrder(id1.toUnqualifiedVersionless().getValue());
|
||||
|
||||
String sql = myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false);
|
||||
String expected = "SELECT t0.RES_ID FROM HFJ_IDX_CMB_TOK_NU t0 INNER JOIN HFJ_SPIDX_STRING t1 ON (t0.RES_ID = t1.RES_ID) WHERE ((t0.HASH_COMPLETE = '7545664593829342272') AND ((t1.HASH_NORM_PREFIX = '6206712800146298788') AND (t1.SP_VALUE_NORMALIZED LIKE 'JAY%')))";
|
||||
String expected = "SELECT t0.RES_ID FROM HFJ_IDX_CMB_TOK_NU t0 INNER JOIN HFJ_SPIDX_STRING t1 ON (t0.RES_ID = t1.RES_ID) WHERE ((t0.HASH_COMPLETE = '7545664593829342272') AND ((t1.HASH_NORM_PREFIX = '6206712800146298788') AND (t1.SP_VALUE_NORMALIZED LIKE 'JAY%'))) fetch first '10000' rows only";
|
||||
assertEquals(expected, sql);
|
||||
|
||||
logCapturedMessages();
|
||||
@ -363,7 +363,7 @@ public class FhirResourceDaoR4ComboNonUniqueParamTest extends BaseComboParamsR4T
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertThat(actual).contains(id1.toUnqualifiedVersionless().getValue());
|
||||
|
||||
String expected = "SELECT t0.RES_ID FROM HFJ_IDX_CMB_TOK_NU t0 WHERE (t0.HASH_COMPLETE = '7196518367857292879')";
|
||||
String expected = "SELECT t0.RES_ID FROM HFJ_IDX_CMB_TOK_NU t0 WHERE (t0.HASH_COMPLETE = '7196518367857292879') fetch first '10000' rows only";
|
||||
assertEquals(expected, myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false));
|
||||
|
||||
logCapturedMessages();
|
||||
@ -398,7 +398,7 @@ public class FhirResourceDaoR4ComboNonUniqueParamTest extends BaseComboParamsR4T
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertThat(actual).contains(id1.toUnqualifiedVersionless().getValue());
|
||||
|
||||
String expected = "SELECT t0.RES_ID FROM HFJ_IDX_CMB_TOK_NU t0 WHERE (t0.HASH_COMPLETE = '2591238402961312979')";
|
||||
String expected = "SELECT t0.RES_ID FROM HFJ_IDX_CMB_TOK_NU t0 WHERE (t0.HASH_COMPLETE = '2591238402961312979') fetch first '10000' rows only";
|
||||
assertEquals(expected, myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false));
|
||||
}
|
||||
|
||||
@ -460,9 +460,8 @@ public class FhirResourceDaoR4ComboNonUniqueParamTest extends BaseComboParamsR4T
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertThat(actual).contains("Patient/A");
|
||||
|
||||
String expected = "SELECT t0.RES_ID FROM HFJ_IDX_CMB_TOK_NU t0 INNER JOIN HFJ_IDX_CMB_TOK_NU t1 ON (t0.RES_ID = t1.RES_ID) WHERE ((t0.HASH_COMPLETE = '822090206952728926') AND (t1.HASH_COMPLETE = '-8088946700286918311'))";
|
||||
String expected = "SELECT t0.RES_ID FROM HFJ_IDX_CMB_TOK_NU t0 INNER JOIN HFJ_IDX_CMB_TOK_NU t1 ON (t0.RES_ID = t1.RES_ID) WHERE ((t0.HASH_COMPLETE = '822090206952728926') AND (t1.HASH_COMPLETE = '-8088946700286918311')) fetch first '10000' rows only";
|
||||
assertEquals(expected, myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false));
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
@ -496,7 +495,7 @@ public class FhirResourceDaoR4ComboNonUniqueParamTest extends BaseComboParamsR4T
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertThat(actual).contains("Patient/A");
|
||||
|
||||
String expected = "SELECT t0.RES_ID FROM HFJ_IDX_CMB_TOK_NU t0 INNER JOIN HFJ_SPIDX_STRING t1 ON (t0.RES_ID = t1.RES_ID) WHERE ((t0.HASH_COMPLETE = '822090206952728926') AND ((t1.HASH_NORM_PREFIX = '-3664262414674370905') AND (t1.SP_VALUE_NORMALIZED LIKE 'JONES%')))";
|
||||
String expected = "SELECT t0.RES_ID FROM HFJ_IDX_CMB_TOK_NU t0 INNER JOIN HFJ_SPIDX_STRING t1 ON (t0.RES_ID = t1.RES_ID) WHERE ((t0.HASH_COMPLETE = '822090206952728926') AND ((t1.HASH_NORM_PREFIX = '-3664262414674370905') AND (t1.SP_VALUE_NORMALIZED LIKE 'JONES%'))) fetch first '10000' rows only";
|
||||
assertEquals(expected, myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false));
|
||||
|
||||
}
|
||||
@ -519,7 +518,7 @@ public class FhirResourceDaoR4ComboNonUniqueParamTest extends BaseComboParamsR4T
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertThat(actual).contains("Observation/O1");
|
||||
|
||||
String expected = "SELECT t0.RES_ID FROM HFJ_IDX_CMB_TOK_NU t0 WHERE (t0.HASH_COMPLETE IN ('2445648980345828396','-6884698528022589694','-8034948665712960724') )";
|
||||
String expected = "SELECT t0.RES_ID FROM HFJ_IDX_CMB_TOK_NU t0 WHERE (t0.HASH_COMPLETE IN ('2445648980345828396','-6884698528022589694','-8034948665712960724') ) fetch first '10000' rows only";
|
||||
assertEquals(expected, myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false));
|
||||
|
||||
logCapturedMessages();
|
||||
|
@ -516,7 +516,7 @@ public class FhirResourceDaoR4ComboUniqueParamTest extends BaseComboParamsR4Test
|
||||
myCaptureQueriesListener.logFirstSelectQueryForCurrentThread();
|
||||
assertThat(toUnqualifiedVersionlessIdValues(outcome)).containsExactlyInAnyOrder(id1);
|
||||
String unformattedSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false);
|
||||
assertEquals("SELECT t0.RES_ID FROM HFJ_IDX_CMP_STRING_UNIQ t0 WHERE (t0.IDX_STRING IN ('Patient?identifier=urn%7C111','Patient?identifier=urn%7C222') )", unformattedSql);
|
||||
assertEquals("SELECT t0.RES_ID FROM HFJ_IDX_CMP_STRING_UNIQ t0 WHERE (t0.IDX_STRING IN ('Patient?identifier=urn%7C111','Patient?identifier=urn%7C222') ) fetch first '10000' rows only", unformattedSql);
|
||||
|
||||
}
|
||||
|
||||
@ -553,11 +553,10 @@ public class FhirResourceDaoR4ComboUniqueParamTest extends BaseComboParamsR4Test
|
||||
"'Patient?family=Family1&gender=http%3A%2F%2Fhl7.org%2Ffhir%2Fadministrative-gender%7Cfemale'," +
|
||||
"'Patient?family=Family1&gender=http%3A%2F%2Fhl7.org%2Ffhir%2Fadministrative-gender%7Cmale'," +
|
||||
"'Patient?family=Family2&gender=http%3A%2F%2Fhl7.org%2Ffhir%2Fadministrative-gender%7Cfemale'," +
|
||||
"'Patient?family=Family2&gender=http%3A%2F%2Fhl7.org%2Ffhir%2Fadministrative-gender%7Cmale') )", unformattedSql);
|
||||
"'Patient?family=Family2&gender=http%3A%2F%2Fhl7.org%2Ffhir%2Fadministrative-gender%7Cmale') )" +
|
||||
" fetch first '10000' rows only", unformattedSql);
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
public void testDoubleMatchingOnAnd_Search_NonMatching() {
|
||||
Pair<String, String> ids = prepareDoubleMatchingSearchParameterAndPatient();
|
||||
|
@ -1508,7 +1508,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
|
||||
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false)).contains("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0");
|
||||
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false)).contains("fetch next '6'");
|
||||
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false)).contains("fetch next '11'");
|
||||
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false)).contains("offset '5'");
|
||||
assertEquals(0, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
|
||||
|
@ -28,7 +28,6 @@ import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap.EverythingModeEnum;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.jpa.test.config.TestHSearchAddInConfig;
|
||||
import ca.uhn.fhir.jpa.util.SqlQuery;
|
||||
import ca.uhn.fhir.jpa.util.TestUtil;
|
||||
import ca.uhn.fhir.model.api.Include;
|
||||
import ca.uhn.fhir.model.api.TemporalPrecisionEnum;
|
||||
|
@ -15,6 +15,7 @@ import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.jpa.util.QueryParameterUtils;
|
||||
import ca.uhn.fhir.jpa.util.SqlQuery;
|
||||
import ca.uhn.fhir.rest.api.SearchTotalModeEnum;
|
||||
import ca.uhn.fhir.rest.api.SortSpec;
|
||||
import ca.uhn.fhir.rest.api.SummaryEnum;
|
||||
@ -33,6 +34,7 @@ import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.exception.ExceptionUtils;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.BaseResource;
|
||||
import org.hl7.fhir.r4.model.BodyStructure;
|
||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
@ -65,14 +67,18 @@ import java.util.Locale;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.leftPad;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.awaitility.Awaitility.await;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
|
||||
@ -556,6 +562,67 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* We want to use the db to deduplicate in the "fetch everything"
|
||||
* case because it's more memory efficient.
|
||||
*/
|
||||
@Test
|
||||
public void search_whenPastPreFetchLimit_usesDBToDeduplicate() {
|
||||
// setup
|
||||
IBundleProvider results;
|
||||
List<SqlQuery> queries;
|
||||
List<String> ids;
|
||||
|
||||
create200Patients();
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
// set the prefetch thresholds low so we don't need to
|
||||
// search for tons of resources
|
||||
myStorageSettings.setSearchPreFetchThresholds(List.of(5, 10, -1));
|
||||
|
||||
// basic search map
|
||||
SearchParameterMap map = new SearchParameterMap();
|
||||
map.setSort(new SortSpec(BaseResource.SP_RES_LAST_UPDATED));
|
||||
|
||||
// test
|
||||
results = myPatientDao.search(map, null);
|
||||
String uuid = results.getUuid();
|
||||
ourLog.debug("** Search returned UUID: {}", uuid);
|
||||
assertNotNull(results);
|
||||
ids = toUnqualifiedVersionlessIdValues(results, 0, 9, true);
|
||||
assertEquals(9, ids.size());
|
||||
|
||||
// first search was < 10 (our max pre-fetch value); so we should
|
||||
// expect no "group by" queries (we deduplicate in memory)
|
||||
queries = findGroupByQueries();
|
||||
assertTrue(queries.isEmpty());
|
||||
myCaptureQueriesListener.clear();
|
||||
|
||||
ids = toUnqualifiedVersionlessIdValues(results, 10, 100, true);
|
||||
assertEquals(90, ids.size());
|
||||
|
||||
// we are now requesting > 10 results, meaning we should be using the
|
||||
// database to deduplicate any values not fetched yet;
|
||||
// so we *do* expect to see a "group by" query
|
||||
queries = findGroupByQueries();
|
||||
assertFalse(queries.isEmpty());
|
||||
assertEquals(1, queries.size());
|
||||
SqlQuery query = queries.get(0);
|
||||
String sql = query.getSql(true, false);
|
||||
// we expect a "GROUP BY t0.RES_ID" (but we'll be ambiguous about the table
|
||||
// name, just in case)
|
||||
Pattern p = Pattern.compile("GROUP BY .+\\.RES_ID");
|
||||
Matcher m = p.matcher(sql);
|
||||
assertTrue(m.find());
|
||||
}
|
||||
|
||||
private List<SqlQuery> findGroupByQueries() {
|
||||
List<SqlQuery> queries = myCaptureQueriesListener.getSelectQueries();
|
||||
queries = queries.stream().filter(q -> q.getSql(true, false).toLowerCase().contains("group by"))
|
||||
.collect(Collectors.toList());
|
||||
return queries;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFetchMoreThanFirstPageSizeInFirstPage() {
|
||||
create200Patients();
|
||||
@ -786,14 +853,12 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
||||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* A search with a big list of OR clauses for references should use a single SELECT ... WHERE .. IN
|
||||
* and not a whole bunch of SQL ORs.
|
||||
*/
|
||||
@Test
|
||||
public void testReferenceOrLinksUseInList() {
|
||||
|
||||
List<Long> ids = new ArrayList<>();
|
||||
for (int i = 0; i < 5; i++) {
|
||||
Organization org = new Organization();
|
||||
@ -806,7 +871,6 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
||||
myPatientDao.create(pt).getId().getIdPartAsLong();
|
||||
}
|
||||
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
SearchParameterMap map = new SearchParameterMap();
|
||||
map.add(Patient.SP_ORGANIZATION, new ReferenceOrListParam()
|
||||
@ -828,7 +892,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
||||
|
||||
String resultingQueryNotFormatted = queries.get(0);
|
||||
assertThat(StringUtils.countMatches(resultingQueryNotFormatted, "Patient.managingOrganization")).as(resultingQueryNotFormatted).isEqualTo(1);
|
||||
assertThat(resultingQueryNotFormatted).matches("^SELECT .* WHERE .*TARGET_RESOURCE_ID IN \\(.*\\)$");
|
||||
assertThat(resultingQueryNotFormatted).matches("^SELECT .* WHERE .*TARGET_RESOURCE_ID IN \\(.*\\) .* fetch first '10000' rows only$");
|
||||
|
||||
// Ensure that the search actually worked
|
||||
assertEquals(5, search.size().intValue());
|
||||
|
@ -66,44 +66,44 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
|
||||
new SqlGenerationTestCase(
|
||||
"single string - no hfj_resource root",
|
||||
"Patient?name=FOO",
|
||||
"SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))",
|
||||
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.PARTITION_ID = ?) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)))"
|
||||
"SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) fetch first ? rows only",
|
||||
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.PARTITION_ID = ?) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))) fetch first ? rows only"
|
||||
)
|
||||
, new SqlGenerationTestCase(
|
||||
"two regular params - should use hfj_resource as root",
|
||||
"Patient?name=smith&active=true",
|
||||
"SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_VALUE = ?))",
|
||||
"SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON ((t1.PARTITION_ID = t0.PARTITION_ID) AND (t1.RES_ID = t0.RES_ID)) INNER JOIN HFJ_SPIDX_TOKEN t2 ON ((t1.PARTITION_ID = t2.PARTITION_ID) AND (t1.RES_ID = t2.RES_ID)) WHERE (((t0.PARTITION_ID = ?) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))) AND ((t2.PARTITION_ID = ?) AND (t2.HASH_VALUE = ?)))"
|
||||
"SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_VALUE = ?)) fetch first ? rows only",
|
||||
"SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON ((t1.PARTITION_ID = t0.PARTITION_ID) AND (t1.RES_ID = t0.RES_ID)) INNER JOIN HFJ_SPIDX_TOKEN t2 ON ((t1.PARTITION_ID = t2.PARTITION_ID) AND (t1.RES_ID = t2.RES_ID)) WHERE (((t0.PARTITION_ID = ?) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))) AND ((t2.PARTITION_ID = ?) AND (t2.HASH_VALUE = ?))) fetch first ? rows only"
|
||||
)
|
||||
, new SqlGenerationTestCase(
|
||||
"token not as a NOT IN subselect",
|
||||
"Encounter?class:not=not-there",
|
||||
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.RES_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))",
|
||||
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID = ?) AND ((t0.PARTITION_ID,t0.RES_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )))"
|
||||
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.RES_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )) fetch first ? rows only",
|
||||
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID = ?) AND ((t0.PARTITION_ID,t0.RES_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))) fetch first ? rows only"
|
||||
)
|
||||
, new SqlGenerationTestCase(
|
||||
"token not on chain join - NOT IN from hfj_res_link target columns",
|
||||
"Observation?encounter.class:not=not-there",
|
||||
"SELECT t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))",
|
||||
"SELECT t0.PARTITION_ID,t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.PARTITION_ID = ?) AND ((t0.TARGET_RES_PARTITION_ID,t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )))"
|
||||
"SELECT t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )) fetch first ? rows only",
|
||||
"SELECT t0.PARTITION_ID,t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.PARTITION_ID = ?) AND ((t0.TARGET_RES_PARTITION_ID,t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))) fetch first ? rows only"
|
||||
)
|
||||
, new SqlGenerationTestCase(
|
||||
"bare sort",
|
||||
"Patient?_sort=name",
|
||||
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = ?)) WHERE ((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST",
|
||||
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST"
|
||||
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = ?)) WHERE ((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only",
|
||||
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only"
|
||||
)
|
||||
, new SqlGenerationTestCase(
|
||||
"sort with predicate",
|
||||
"Patient?active=true&_sort=name",
|
||||
"SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_TOKEN t0 ON (t1.RES_ID = t0.RES_ID) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.RES_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE (t0.HASH_VALUE = ?) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST",
|
||||
"SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_TOKEN t0 ON ((t1.PARTITION_ID = t0.PARTITION_ID) AND (t1.RES_ID = t0.RES_ID)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.PARTITION_ID = t2.PARTITION_ID) AND (t1.RES_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE ((t0.PARTITION_ID = ?) AND (t0.HASH_VALUE = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST"
|
||||
"SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_TOKEN t0 ON (t1.RES_ID = t0.RES_ID) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.RES_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE (t0.HASH_VALUE = ?) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only",
|
||||
"SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_TOKEN t0 ON ((t1.PARTITION_ID = t0.PARTITION_ID) AND (t1.RES_ID = t0.RES_ID)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.PARTITION_ID = t2.PARTITION_ID) AND (t1.RES_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE ((t0.PARTITION_ID = ?) AND (t0.HASH_VALUE = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only"
|
||||
)
|
||||
, new SqlGenerationTestCase(
|
||||
"chained sort",
|
||||
"Patient?_sort=Practitioner:general-practitioner.name",
|
||||
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_LINK t1 ON ((t0.RES_ID = t1.SRC_RESOURCE_ID) AND (t1.SRC_PATH = ?)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.TARGET_RESOURCE_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE ((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST",
|
||||
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_LINK t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.SRC_RESOURCE_ID) AND (t1.SRC_PATH = ?)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.TARGET_RES_PARTITION_ID = t2.PARTITION_ID) AND (t1.TARGET_RESOURCE_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST"
|
||||
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_LINK t1 ON ((t0.RES_ID = t1.SRC_RESOURCE_ID) AND (t1.SRC_PATH = ?)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.TARGET_RESOURCE_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE ((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only",
|
||||
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_LINK t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.SRC_RESOURCE_ID) AND (t1.SRC_PATH = ?)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.TARGET_RES_PARTITION_ID = t2.PARTITION_ID) AND (t1.TARGET_RESOURCE_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only"
|
||||
)
|
||||
);
|
||||
}
|
||||
@ -155,7 +155,7 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
|
||||
myPatientDao.search(map);
|
||||
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
|
||||
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false);
|
||||
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))", sql);
|
||||
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) fetch first ? rows only", sql);
|
||||
|
||||
}
|
||||
|
||||
@ -164,7 +164,6 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
|
||||
*/
|
||||
@Test
|
||||
public void testTwoRegularSearchParams() {
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
SearchParameterMap map = SearchParameterMap.newSynchronous()
|
||||
.add(Patient.SP_NAME, new StringParam("FOO"))
|
||||
@ -172,9 +171,7 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
|
||||
myPatientDao.search(map);
|
||||
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
|
||||
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false);
|
||||
assertEquals("SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_SYS_AND_VALUE = ?))", sql);
|
||||
|
||||
|
||||
assertEquals("SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_SYS_AND_VALUE = ?)) fetch first ? rows only", sql);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@ -200,7 +197,7 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
|
||||
assertEquals(3, myCaptureQueriesListener.logSelectQueries().size());
|
||||
// Query 1 - Find resources: Make sure we search for tag type+system+code always
|
||||
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false);
|
||||
assertEquals("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_TAG t1 ON (t0.RES_ID = t1.RES_ID) INNER JOIN HFJ_TAG_DEF t2 ON (t1.TAG_ID = t2.TAG_ID) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t2.TAG_TYPE = ?) AND (t2.TAG_SYSTEM = ?) AND (t2.TAG_CODE = ?)))", sql);
|
||||
assertEquals("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_TAG t1 ON (t0.RES_ID = t1.RES_ID) INNER JOIN HFJ_TAG_DEF t2 ON (t1.TAG_ID = t2.TAG_ID) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t2.TAG_TYPE = ?) AND (t2.TAG_SYSTEM = ?) AND (t2.TAG_CODE = ?))) fetch first ? rows only", sql);
|
||||
// Query 2 - Load resource contents
|
||||
sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(false, false);
|
||||
assertThat(sql).contains("where rht1_0.RES_ID in (?)");
|
||||
@ -242,9 +239,11 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
|
||||
.add(Constants.PARAM_PROFILE, new UriParam(code));
|
||||
IBundleProvider outcome = myPatientDao.search(map, mySrd);
|
||||
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
|
||||
|
||||
// Query 1 - Find resources: Just a standard token search in this mode
|
||||
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false);
|
||||
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_URI t0 WHERE (t0.HASH_URI = ?)", sql);
|
||||
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_URI t0 WHERE (t0.HASH_URI = ?) fetch first ? rows only", sql);
|
||||
|
||||
// Query 2 - Load resourece contents
|
||||
sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(false, false);
|
||||
assertThat(sql).contains("where rht1_0.RES_ID in (?)");
|
||||
@ -268,11 +267,10 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
|
||||
|
||||
// Verify
|
||||
if (theIncludeHashIdentity) {
|
||||
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE ((t0.HASH_IDENTITY = '7001889285610424179') AND (t0.HASH_SYS_AND_VALUE = '-2780914544385068076'))", myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false));
|
||||
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE ((t0.HASH_IDENTITY = '7001889285610424179') AND (t0.HASH_SYS_AND_VALUE = '-2780914544385068076')) fetch first '10000' rows only", myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false));
|
||||
} else {
|
||||
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_SYS_AND_VALUE = '-2780914544385068076')", myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false));
|
||||
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_SYS_AND_VALUE = '-2780914544385068076') fetch first '10000' rows only", myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class MyPartitionInterceptor {
|
||||
|
@ -1459,10 +1459,8 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
||||
IBundleProvider searchOutcome = myPatientDao.search(map, mySrd);
|
||||
assertEquals(0, searchOutcome.size());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSearch_MissingParamString_SearchAllPartitions() {
|
||||
myPartitionSettings.setIncludePartitionInSearchHashes(false);
|
||||
@ -1858,10 +1856,8 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
||||
// NOTE: the query is changed, only one SP_VALUE_LOW and SP_VALUE_HIGH
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "SP_VALUE_LOW"));
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "SP_VALUE_HIGH"));
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSearch_DateParam_SearchSpecificPartitions() {
|
||||
myPartitionSettings.setIncludePartitionInSearchHashes(false);
|
||||
|
@ -81,7 +81,7 @@ public class ForceOffsetSearchModeInterceptorTest extends BaseResourceProviderR4
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
|
||||
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false)).contains("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0");
|
||||
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false)).contains("fetch next '6' rows only");
|
||||
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false)).contains("fetch next '11' rows only");
|
||||
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false)).contains("offset '5'");
|
||||
assertEquals(0, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
|
||||
@ -161,7 +161,7 @@ public class ForceOffsetSearchModeInterceptorTest extends BaseResourceProviderR4
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
|
||||
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false)).contains("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0");
|
||||
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false)).contains("fetch next '8' rows only");
|
||||
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false)).contains("fetch next '15' rows only");
|
||||
assertEquals(0, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
@ -273,7 +273,7 @@ public class PatientIdPartitionInterceptorTest extends BaseResourceProviderR4Tes
|
||||
myCaptureQueriesListener.clear();
|
||||
myObservationDao.search(SearchParameterMap.newSynchronous(), mySrd);
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertEquals("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE ((t0.RES_TYPE = 'Observation') AND (t0.RES_DELETED_AT IS NULL))", myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false));
|
||||
assertEquals("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE ((t0.RES_TYPE = 'Observation') AND (t0.RES_DELETED_AT IS NULL)) fetch first '10000' rows only", myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -782,9 +782,9 @@ abstract class TestDefinitions implements ITestDataBuilder {
|
||||
} else {
|
||||
assertThat(getSelectSql(0)).contains(" INNER JOIN HFJ_RES_LINK t0 ON (t1.RES_ID = t0.TARGET_RESOURCE_ID) ");
|
||||
if (myIncludePartitionIdsInSql) {
|
||||
assertThat(getSelectSql(0)).endsWith(" WHERE ((t0.PARTITION_ID = '1') AND (t0.SRC_PATH = 'List.entry.item') AND (t0.TARGET_RESOURCE_TYPE = 'Patient') AND (t0.SRC_RESOURCE_ID = '" + listIdLong + "'))");
|
||||
assertThat(getSelectSql(0)).endsWith(" WHERE ((t0.PARTITION_ID = '1') AND (t0.SRC_PATH = 'List.entry.item') AND (t0.TARGET_RESOURCE_TYPE = 'Patient') AND (t0.SRC_RESOURCE_ID = '" + listIdLong + "')) fetch first '10000' rows only");
|
||||
} else {
|
||||
assertThat(getSelectSql(0)).endsWith(" WHERE ((t0.SRC_PATH = 'List.entry.item') AND (t0.TARGET_RESOURCE_TYPE = 'Patient') AND (t0.SRC_RESOURCE_ID = '" + listIdLong + "'))");
|
||||
assertThat(getSelectSql(0)).endsWith(" WHERE ((t0.SRC_PATH = 'List.entry.item') AND (t0.TARGET_RESOURCE_TYPE = 'Patient') AND (t0.SRC_RESOURCE_ID = '" + listIdLong + "')) fetch first '10000' rows only");
|
||||
}
|
||||
}
|
||||
|
||||
@ -816,10 +816,10 @@ abstract class TestDefinitions implements ITestDataBuilder {
|
||||
assertEquals("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = '-9208284524139093953')) WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID IN ('1','2') )) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST", getSelectSql(0));
|
||||
assertThat(getSelectSql(1)).contains(" where (rht1_0.RES_ID,rht1_0.PARTITION_ID) in (('" + id0.getIdPartAsLong() + "','1'),('" + id1.getIdPartAsLong() + "','2'),('-1',NULL),('-1',NULL),('-1',NULL),('-1',NULL),('-1',NULL),('-1',NULL),('-1',NULL),('-1',NULL)) and mrt1_0.RES_VER=rht1_0.RES_VER");
|
||||
} else if (myIncludePartitionIdsInSql) {
|
||||
assertEquals("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = '-9208284524139093953')) WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID IN ('1','2') )) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST", getSelectSql(0));
|
||||
assertEquals("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = '-9208284524139093953')) WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID IN ('1','2') )) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first '10000' rows only", getSelectSql(0));
|
||||
assertThat(getSelectSql(1)).contains(" where rht1_0.RES_ID in ('" + id0.getIdPartAsLong() + "','" + id1.getIdPartAsLong() + "','-1','-1','-1','-1','-1','-1','-1','-1') and mrt1_0.RES_VER=rht1_0.RES_VER");
|
||||
} else {
|
||||
assertEquals("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = '-9208284524139093953')) WHERE ((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST", getSelectSql(0));
|
||||
assertEquals("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = '-9208284524139093953')) WHERE ((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first '10000' rows only", getSelectSql(0));
|
||||
assertThat(getSelectSql(1)).contains(" where rht1_0.RES_ID in ('" + id0.getIdPartAsLong() + "','" + id1.getIdPartAsLong() + "','-1','-1','-1','-1','-1','-1','-1','-1') and mrt1_0.RES_VER=rht1_0.RES_VER");
|
||||
}
|
||||
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
|
||||
@ -870,15 +870,15 @@ abstract class TestDefinitions implements ITestDataBuilder {
|
||||
}
|
||||
} else if (myIncludePartitionIdsInSql) {
|
||||
if (theAccessMetaSourceInformationFromProvenanceTable) {
|
||||
assertEquals("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_VER_PROV t1 ON (t0.RES_ID = t1.RES_PID) WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t1.SOURCE_URI = 'http://foo'))", getSelectSql(0));
|
||||
assertEquals("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_VER_PROV t1 ON (t0.RES_ID = t1.RES_PID) WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t1.SOURCE_URI = 'http://foo')) fetch first '10000' rows only", getSelectSql(0));
|
||||
} else {
|
||||
assertEquals("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_VER t1 ON (t0.RES_ID = t1.RES_ID) WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t1.SOURCE_URI = 'http://foo'))", getSelectSql(0));
|
||||
assertEquals("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_VER t1 ON (t0.RES_ID = t1.RES_ID) WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t1.SOURCE_URI = 'http://foo')) fetch first '10000' rows only", getSelectSql(0));
|
||||
}
|
||||
} else {
|
||||
if (theAccessMetaSourceInformationFromProvenanceTable) {
|
||||
assertEquals("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_VER_PROV t1 ON (t0.RES_ID = t1.RES_PID) WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t1.SOURCE_URI = 'http://foo'))", getSelectSql(0));
|
||||
assertEquals("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_VER_PROV t1 ON (t0.RES_ID = t1.RES_PID) WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t1.SOURCE_URI = 'http://foo')) fetch first '10000' rows only", getSelectSql(0));
|
||||
} else {
|
||||
assertEquals("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_VER t1 ON (t0.RES_ID = t1.RES_ID) WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t1.SOURCE_URI = 'http://foo'))", getSelectSql(0));
|
||||
assertEquals("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_VER t1 ON (t0.RES_ID = t1.RES_ID) WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t1.SOURCE_URI = 'http://foo')) fetch first '10000' rows only", getSelectSql(0));
|
||||
}
|
||||
}
|
||||
|
||||
@ -1003,9 +1003,9 @@ abstract class TestDefinitions implements ITestDataBuilder {
|
||||
// Verify
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
if (myIncludePartitionIdsInSql) {
|
||||
assertThat(getSelectSql(0)).endsWith(" WHERE ((t0.PARTITION_ID = '1') AND (t0.HASH_VALUE = '7943378963388545453'))");
|
||||
assertThat(getSelectSql(0)).endsWith(" WHERE ((t0.PARTITION_ID = '1') AND (t0.HASH_VALUE = '7943378963388545453')) fetch first '10000' rows only");
|
||||
} else {
|
||||
assertThat(getSelectSql(0)).endsWith(" WHERE (t0.HASH_VALUE = '7943378963388545453')");
|
||||
assertThat(getSelectSql(0)).endsWith(" WHERE (t0.HASH_VALUE = '7943378963388545453') fetch first '10000' rows only");
|
||||
}
|
||||
if (myIncludePartitionIdsInPks) {
|
||||
assertThat(getSelectSql(1)).endsWith(" where (rht1_0.RES_ID,rht1_0.PARTITION_ID) in (('" + id + "','1')) and mrt1_0.RES_VER=rht1_0.RES_VER");
|
||||
@ -1066,9 +1066,9 @@ abstract class TestDefinitions implements ITestDataBuilder {
|
||||
|
||||
sql = myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false);
|
||||
if (myIncludePartitionIdsInSql) {
|
||||
assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = '1'))");
|
||||
assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = '1')) fetch first '10000' rows only");
|
||||
} else {
|
||||
assertThat(sql).isEqualTo("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE ((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL))");
|
||||
assertThat(sql).isEqualTo("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE ((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) fetch first '10000' rows only");
|
||||
}
|
||||
|
||||
sql = myCaptureQueriesListener.getSelectQueries().get(1).getSql(true, false);
|
||||
@ -1130,9 +1130,9 @@ abstract class TestDefinitions implements ITestDataBuilder {
|
||||
|
||||
sql = myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false);
|
||||
if (myIncludePartitionIdsInSql) {
|
||||
assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'QuestionnaireResponse') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = '1'))");
|
||||
assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'QuestionnaireResponse') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = '1')) fetch first '10000' rows only");
|
||||
} else {
|
||||
assertThat(sql).isEqualTo("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE ((t0.RES_TYPE = 'QuestionnaireResponse') AND (t0.RES_DELETED_AT IS NULL))");
|
||||
assertThat(sql).isEqualTo("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE ((t0.RES_TYPE = 'QuestionnaireResponse') AND (t0.RES_DELETED_AT IS NULL)) fetch first '10000' rows only");
|
||||
}
|
||||
|
||||
sql = myCaptureQueriesListener.getSelectQueries().get(1).getSql(true, false);
|
||||
@ -1177,9 +1177,9 @@ abstract class TestDefinitions implements ITestDataBuilder {
|
||||
|
||||
sql = myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false);
|
||||
if (myIncludePartitionIdsInSql) {
|
||||
assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = '1'))");
|
||||
assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = '1')) fetch first '10000' rows only");
|
||||
} else {
|
||||
assertThat(sql).isEqualTo("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE ((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL))");
|
||||
assertThat(sql).isEqualTo("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE ((t0.RES_TYPE = 'Patient') AND (t0.RES_DELETED_AT IS NULL)) fetch first '10000' rows only");
|
||||
}
|
||||
|
||||
sql = myCaptureQueriesListener.getSelectQueries().get(1).getSql(true, false);
|
||||
@ -1333,11 +1333,11 @@ abstract class TestDefinitions implements ITestDataBuilder {
|
||||
|
||||
sql = myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false);
|
||||
if (myIncludePartitionIdsInSql && myPartitionSettings.getDefaultPartitionId() == null) {
|
||||
assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Organization') AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID IS NULL) AND (t0.RES_ID = '" + ids.parentOrgPid() + "')))");
|
||||
assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Organization') AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID IS NULL) AND (t0.RES_ID = '" + ids.parentOrgPid() + "'))) fetch first '10000' rows only");
|
||||
} else if (myIncludePartitionIdsInSql) {
|
||||
assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Organization') AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID = '0') AND (t0.RES_ID = '" + ids.parentOrgPid() + "')))");
|
||||
assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Organization') AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID = '0') AND (t0.RES_ID = '" + ids.parentOrgPid() + "'))) fetch first '10000' rows only");
|
||||
} else {
|
||||
assertThat(sql).isEqualTo("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Organization') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.RES_ID = '" + ids.parentOrgPid() + "'))");
|
||||
assertThat(sql).isEqualTo("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Organization') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.RES_ID = '" + ids.parentOrgPid() + "')) fetch first '10000' rows only");
|
||||
}
|
||||
|
||||
sql = myCaptureQueriesListener.getSelectQueries().get(1).getSql(true, false);
|
||||
@ -1400,11 +1400,11 @@ abstract class TestDefinitions implements ITestDataBuilder {
|
||||
|
||||
sql = myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false);
|
||||
if (myIncludePartitionIdsInSql && myPartitionSettings.getDefaultPartitionId() == null) {
|
||||
assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Organization') AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID IS NULL) AND (t0.RES_ID = '" + ids.parentOrgPid() + "')))");
|
||||
assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Organization') AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID IS NULL) AND (t0.RES_ID = '" + ids.parentOrgPid() + "'))) fetch first '10000' rows only");
|
||||
} else if (myIncludePartitionIdsInSql) {
|
||||
assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Organization') AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID = '0') AND (t0.RES_ID = '" + ids.parentOrgPid() + "')))");
|
||||
assertThat(sql).isEqualTo("SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Organization') AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID = '0') AND (t0.RES_ID = '" + ids.parentOrgPid() + "'))) fetch first '10000' rows only");
|
||||
} else {
|
||||
assertThat(sql).isEqualTo("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Organization') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.RES_ID = '" + ids.parentOrgPid() + "'))");
|
||||
assertThat(sql).isEqualTo("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = 'Organization') AND (t0.RES_DELETED_AT IS NULL)) AND (t0.RES_ID = '" + ids.parentOrgPid() + "')) fetch first '10000' rows only");
|
||||
}
|
||||
|
||||
sql = myCaptureQueriesListener.getSelectQueries().get(1).getSql(true, false);
|
||||
@ -1762,8 +1762,8 @@ abstract class TestDefinitions implements ITestDataBuilder {
|
||||
RequestPartitionId.allPartitions(),
|
||||
"single string - no hfj_resource root - all partitions",
|
||||
"Patient?name=FOO",
|
||||
"SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))",
|
||||
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))",
|
||||
"SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) fetch first ? rows only",
|
||||
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) fetch first ? rows only",
|
||||
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))"
|
||||
);
|
||||
SearchMultiPartitionTestCase.add(
|
||||
@ -1771,8 +1771,8 @@ abstract class TestDefinitions implements ITestDataBuilder {
|
||||
RequestPartitionId.fromPartitionIds(PARTITION_1, PARTITION_2),
|
||||
"single string - no hfj_resource root - multiple partitions",
|
||||
"Patient?name=FOO",
|
||||
"SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))",
|
||||
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.PARTITION_ID IN (?,?) ) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)))",
|
||||
"SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) fetch first ? rows only",
|
||||
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.PARTITION_ID IN (?,?) ) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))) fetch first ? rows only",
|
||||
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.PARTITION_ID IN (?,?) ) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)))"
|
||||
);
|
||||
|
||||
@ -1781,8 +1781,8 @@ abstract class TestDefinitions implements ITestDataBuilder {
|
||||
RequestPartitionId.allPartitions(),
|
||||
"two regular params - should use hfj_resource as root - all partitions",
|
||||
"Patient?name=smith&active=true",
|
||||
"SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_VALUE = ?))",
|
||||
"SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_VALUE = ?))",
|
||||
"SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_VALUE = ?)) fetch first ? rows only",
|
||||
"SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_VALUE = ?)) fetch first ? rows only",
|
||||
"SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON ((t1.PARTITION_ID = t0.PARTITION_ID) AND (t1.RES_ID = t0.RES_ID)) INNER JOIN HFJ_SPIDX_TOKEN t2 ON ((t1.PARTITION_ID = t2.PARTITION_ID) AND (t1.RES_ID = t2.RES_ID)) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_VALUE = ?))"
|
||||
);
|
||||
SearchMultiPartitionTestCase.add(
|
||||
@ -1790,8 +1790,8 @@ abstract class TestDefinitions implements ITestDataBuilder {
|
||||
RequestPartitionId.fromPartitionIds(PARTITION_1, PARTITION_2),
|
||||
"two regular params - should use hfj_resource as root - multiple partitions",
|
||||
"Patient?name=smith&active=true",
|
||||
"SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_VALUE = ?))",
|
||||
"SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.PARTITION_ID IN (?,?) ) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))) AND ((t2.PARTITION_ID IN (?,?) ) AND (t2.HASH_VALUE = ?)))",
|
||||
"SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_VALUE = ?)) fetch first ? rows only",
|
||||
"SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.PARTITION_ID IN (?,?) ) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))) AND ((t2.PARTITION_ID IN (?,?) ) AND (t2.HASH_VALUE = ?))) fetch first ? rows only",
|
||||
"SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON ((t1.PARTITION_ID = t0.PARTITION_ID) AND (t1.RES_ID = t0.RES_ID)) INNER JOIN HFJ_SPIDX_TOKEN t2 ON ((t1.PARTITION_ID = t2.PARTITION_ID) AND (t1.RES_ID = t2.RES_ID)) WHERE (((t0.PARTITION_ID IN (?,?) ) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))) AND ((t2.PARTITION_ID IN (?,?) ) AND (t2.HASH_VALUE = ?)))"
|
||||
);
|
||||
|
||||
@ -1800,8 +1800,8 @@ abstract class TestDefinitions implements ITestDataBuilder {
|
||||
RequestPartitionId.allPartitions(),
|
||||
"token not as a NOT IN subselect - all partitions",
|
||||
"Encounter?class:not=not-there",
|
||||
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.RES_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))",
|
||||
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.RES_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))",
|
||||
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.RES_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )) fetch first ? rows only",
|
||||
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.RES_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )) fetch first ? rows only",
|
||||
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID,t0.RES_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))"
|
||||
);
|
||||
SearchMultiPartitionTestCase.add(
|
||||
@ -1809,8 +1809,8 @@ abstract class TestDefinitions implements ITestDataBuilder {
|
||||
RequestPartitionId.fromPartitionIds(PARTITION_1, PARTITION_2),
|
||||
"token not as a NOT IN subselect - multiple partitions",
|
||||
"Encounter?class:not=not-there",
|
||||
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.RES_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))",
|
||||
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID IN (?,?) ) AND ((t0.RES_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )))",
|
||||
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.RES_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )) fetch first ? rows only",
|
||||
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID IN (?,?) ) AND ((t0.RES_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))) fetch first ? rows only",
|
||||
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID IN (?,?) ) AND ((t0.PARTITION_ID,t0.RES_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )))"
|
||||
);
|
||||
|
||||
@ -1819,8 +1819,8 @@ abstract class TestDefinitions implements ITestDataBuilder {
|
||||
RequestPartitionId.allPartitions(),
|
||||
"token not on chain join - NOT IN from hfj_res_link target columns - all partitions",
|
||||
"Observation?encounter.class:not=not-there",
|
||||
"SELECT t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))",
|
||||
"SELECT t0.PARTITION_ID,t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))",
|
||||
"SELECT t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )) fetch first ? rows only",
|
||||
"SELECT t0.PARTITION_ID,t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )) fetch first ? rows only",
|
||||
"SELECT t0.PARTITION_ID,t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.TARGET_RES_PARTITION_ID,t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))"
|
||||
);
|
||||
SearchMultiPartitionTestCase.add(
|
||||
@ -1828,8 +1828,8 @@ abstract class TestDefinitions implements ITestDataBuilder {
|
||||
RequestPartitionId.fromPartitionIds(PARTITION_1, PARTITION_2),
|
||||
"token not on chain join - NOT IN from hfj_res_link target columns - multiple partitions",
|
||||
"Observation?encounter.class:not=not-there",
|
||||
"SELECT t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))",
|
||||
"SELECT t0.PARTITION_ID,t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.PARTITION_ID IN (?,?) ) AND ((t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )))",
|
||||
"SELECT t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )) fetch first ? rows only",
|
||||
"SELECT t0.PARTITION_ID,t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.PARTITION_ID IN (?,?) ) AND ((t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))) fetch first ? rows only",
|
||||
"SELECT t0.PARTITION_ID,t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.PARTITION_ID IN (?,?) ) AND ((t0.TARGET_RES_PARTITION_ID,t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )))"
|
||||
);
|
||||
|
||||
@ -1848,22 +1848,22 @@ abstract class TestDefinitions implements ITestDataBuilder {
|
||||
new SqlGenerationTestCase(
|
||||
"bare sort",
|
||||
"Patient?_sort=name",
|
||||
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = ?)) WHERE ((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST",
|
||||
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST",
|
||||
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = ?)) WHERE ((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only",
|
||||
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only",
|
||||
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST"
|
||||
)
|
||||
, new SqlGenerationTestCase(
|
||||
"sort with predicate",
|
||||
"Patient?active=true&_sort=name",
|
||||
"SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_TOKEN t0 ON (t1.RES_ID = t0.RES_ID) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.RES_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE (t0.HASH_VALUE = ?) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST",
|
||||
"SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_TOKEN t0 ON (t1.RES_ID = t0.RES_ID) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.RES_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE ((t0.PARTITION_ID = ?) AND (t0.HASH_VALUE = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST",
|
||||
"SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_TOKEN t0 ON (t1.RES_ID = t0.RES_ID) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.RES_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE (t0.HASH_VALUE = ?) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only",
|
||||
"SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_TOKEN t0 ON (t1.RES_ID = t0.RES_ID) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.RES_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE ((t0.PARTITION_ID = ?) AND (t0.HASH_VALUE = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only",
|
||||
"SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_TOKEN t0 ON ((t1.PARTITION_ID = t0.PARTITION_ID) AND (t1.RES_ID = t0.RES_ID)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.PARTITION_ID = t2.PARTITION_ID) AND (t1.RES_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE ((t0.PARTITION_ID = ?) AND (t0.HASH_VALUE = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST"
|
||||
)
|
||||
, new SqlGenerationTestCase(
|
||||
"chained sort",
|
||||
"Patient?_sort=Practitioner:general-practitioner.name",
|
||||
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_LINK t1 ON ((t0.RES_ID = t1.SRC_RESOURCE_ID) AND (t1.SRC_PATH = ?)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.TARGET_RESOURCE_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE ((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST",
|
||||
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_LINK t1 ON ((t0.RES_ID = t1.SRC_RESOURCE_ID) AND (t1.SRC_PATH = ?)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.TARGET_RESOURCE_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST",
|
||||
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_LINK t1 ON ((t0.RES_ID = t1.SRC_RESOURCE_ID) AND (t1.SRC_PATH = ?)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.TARGET_RESOURCE_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE ((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only",
|
||||
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_LINK t1 ON ((t0.RES_ID = t1.SRC_RESOURCE_ID) AND (t1.SRC_PATH = ?)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.TARGET_RESOURCE_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only",
|
||||
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_LINK t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.SRC_RESOURCE_ID) AND (t1.SRC_PATH = ?)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.TARGET_RES_PARTITION_ID = t2.PARTITION_ID) AND (t1.TARGET_RESOURCE_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST"
|
||||
)
|
||||
);
|
||||
|
@ -188,8 +188,9 @@ public class JpaStorageSettings extends StorageSettings {
|
||||
|
||||
// start with a tiny number so our first page always loads quickly.
|
||||
// If they fetch the second page, fetch more.
|
||||
// Use prime sizes to avoid empty next links.
|
||||
private List<Integer> mySearchPreFetchThresholds = Arrays.asList(13, 503, 2003, -1);
|
||||
// we'll only fetch (by default) up to 1 million records, because after that, deduplication in local memory is
|
||||
// prohibitive
|
||||
private List<Integer> mySearchPreFetchThresholds = Arrays.asList(13, 503, 2003, 1000003, -1);
|
||||
private List<WarmCacheEntry> myWarmCacheEntries = new ArrayList<>();
|
||||
private boolean myEnforceReferenceTargetTypes = true;
|
||||
private ClientIdStrategyEnum myResourceClientIdStrategy = ClientIdStrategyEnum.ALPHANUMERIC;
|
||||
|
@ -77,6 +77,15 @@ public interface ISearchBuilder<T extends IResourcePersistentId<?>> {
|
||||
|
||||
void setMaxResultsToFetch(Integer theMaxResultsToFetch);
|
||||
|
||||
void setDeduplicateInDatabase(boolean theShouldDeduplicateInDB);
|
||||
|
||||
void setRequireTotal(boolean theRequireTotal);
|
||||
|
||||
/**
|
||||
* True if the results should have a 'total' value
|
||||
*/
|
||||
boolean requiresTotal();
|
||||
|
||||
void loadResourcesByPid(
|
||||
Collection<T> thePids,
|
||||
Collection<T> theIncludedPids,
|
||||
|
@ -11,7 +11,11 @@ import ca.uhn.fhir.rest.annotation.IncludeParam;
|
||||
import ca.uhn.fhir.rest.annotation.OptionalParam;
|
||||
import ca.uhn.fhir.rest.annotation.RequiredParam;
|
||||
import ca.uhn.fhir.rest.annotation.Search;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.param.DateRangeParam;
|
||||
import ca.uhn.fhir.rest.param.StringAndListParam;
|
||||
import ca.uhn.fhir.rest.param.TokenAndListParam;
|
||||
import ca.uhn.fhir.rest.param.TokenOrListParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.server.IResourceProvider;
|
||||
@ -66,7 +70,7 @@ public class OverlayTestApp {
|
||||
FhirContext ctx = FhirContext.forR4Cached();
|
||||
RestfulServer restfulServer = new RestfulServer(ctx);
|
||||
restfulServer.registerProvider(new ProviderWithRequiredAndOptional());
|
||||
restfulServer.registerProvider(new HashMapResourceProvider<>(ctx, Patient.class));
|
||||
restfulServer.registerProvider(new PatientTestResourceProvider(ctx));
|
||||
restfulServer.registerProvider(new HfqlRestProvider(hfqlExecutor));
|
||||
|
||||
ServletContextHandler proxyHandler = new ServletContextHandler();
|
||||
@ -222,4 +226,31 @@ public class OverlayTestApp {
|
||||
|
||||
}
|
||||
|
||||
public static class PatientTestResourceProvider extends HashMapResourceProvider<Patient> {
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @param theFhirContext The FHIR context
|
||||
*/
|
||||
public PatientTestResourceProvider(FhirContext theFhirContext) {
|
||||
super(theFhirContext, Patient.class);
|
||||
}
|
||||
|
||||
@Description(shortDefinition = "This is a provider endpoint with parameters for searching on patients to display")
|
||||
@Search
|
||||
public IBundleProvider findPatients(@RequiredParam(name = Patient.SP_ACTIVE) TokenAndListParam theType,
|
||||
@Description(shortDefinition = "A portion of the given name of the patient")
|
||||
@OptionalParam(name = "given")
|
||||
StringAndListParam theGiven,
|
||||
@Description(shortDefinition = "A portion of the family name of the patient")
|
||||
@OptionalParam(name = "family")
|
||||
StringAndListParam theFamily,
|
||||
RequestDetails theRequestDetails
|
||||
) throws Exception {
|
||||
return searchAll(theRequestDetails);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -288,7 +288,6 @@ public class WebTest {
|
||||
assertThat(diffPage.asNormalizedText()).contains("\"resourceType\": \"Parameters\"");
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testHfqlExecuteQuery() throws IOException {
|
||||
// Load home page
|
||||
@ -317,7 +316,6 @@ public class WebTest {
|
||||
assertThat(table.asNormalizedText()).contains("Simpson");
|
||||
}
|
||||
|
||||
|
||||
private void registerAndUpdatePatient() {
|
||||
Patient p = new Patient();
|
||||
Patient p2 = new Patient();
|
||||
@ -380,7 +378,6 @@ public class WebTest {
|
||||
Parameters parameters = new Parameters();
|
||||
return parameters;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static class MyServletContextHandler extends ServletContextHandler {
|
||||
|
Loading…
x
Reference in New Issue
Block a user