Compare commits

...

3 Commits

Author SHA1 Message Date
leif stawnyczy d0419b731a spotless 2024-11-27 14:41:34 -05:00
leif stawnyczy f7ffe9d62b limiting synchronous queries 2024-11-27 14:40:33 -05:00
leif stawnyczy 99868e1528 refactor 2024-11-27 09:11:00 -05:00
9 changed files with 238 additions and 90 deletions

View File

@ -66,6 +66,7 @@ import ca.uhn.fhir.util.UrlUtil;
import com.google.common.annotations.VisibleForTesting;
import jakarta.annotation.Nonnull;
import jakarta.annotation.Nullable;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.springframework.beans.factory.BeanFactory;
@ -377,6 +378,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
final ISearchBuilder<JpaPid> sb =
mySearchBuilderFactory.newSearchBuilder(theCallingDao, theResourceType, resourceTypeClass);
sb.setFetchSize(mySyncSize);
sb.setRequireTotal(theParams.getCount() != null);
final Integer loadSynchronousUpTo = getLoadSynchronousUpToOrNull(theCacheControlDirective);
boolean isOffsetQuery = theParams.isOffsetQuery();
@ -394,7 +396,6 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
try {
return direct.get();
} catch (ResourceNotFoundInIndexException theE) {
// some resources were not found in index, so we will inform this and resort to JPA search
ourLog.warn(
@ -402,6 +403,14 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
}
}
// we set a max to fetch from the db for synchronous searches;
// otherwise, we would have to load everything into memory (or force the db to do so);
// So let's set a max value here
Integer maxToLoad = ObjectUtils.defaultIfNull(
loadSynchronousUpTo, myStorageSettings.getInternalSynchronousSearchSize());
ourLog.debug("Setting a max fetch value of {} for synchronous search", maxToLoad);
sb.setMaxResultsToFetch(maxToLoad);
ourLog.debug("Search {} is loading in synchronous mode", searchUuid);
return mySynchronousSearchSvc.executeQuery(
theParams, theRequestDetails, searchUuid, sb, loadSynchronousUpTo, theRequestPartitionId);

View File

@ -246,7 +246,7 @@ public class SynchronousSearchSvcImpl implements ISynchronousSearchSvc {
resources, theRequestDetails, myInterceptorBroadcaster);
SimpleBundleProvider bundleProvider = new SimpleBundleProvider(resources);
if (hasACount) {
if (hasACount && theSb.requiresTotal()) {
bundleProvider.setTotalResourcesRequestedReturned(receivedResourceCount);
}
if (theParams.isOffsetQuery()) {

View File

@ -60,6 +60,7 @@ import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
import ca.uhn.fhir.jpa.search.SearchConstants;
import ca.uhn.fhir.jpa.search.builder.models.ResolvedSearchQueryExecutor;
import ca.uhn.fhir.jpa.search.builder.models.SearchQueryProperties;
import ca.uhn.fhir.jpa.search.builder.sql.GeneratedSql;
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryExecutor;
@ -201,7 +202,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
private SearchParameterMap myParams;
private String mySearchUuid;
private int myFetchSize;
private Integer myMaxResultsToFetch;
private boolean myRequiresTotal;
/**
* Set of PIDs of results that have already been returned in a search.
@ -227,6 +229,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
private boolean myHasNextIteratorQuery = false;
private RequestPartitionId myRequestPartitionId;
private SearchQueryProperties mySearchProperties;
@Autowired(required = false)
private IFulltextSearchSvc myFulltextSearchSvc;
@ -272,6 +276,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
myResourceSearchViewDao = theResourceSearchViewDao;
myContext = theContext;
myIdHelperService = theIdHelperService;
mySearchProperties = new SearchQueryProperties();
}
@VisibleForTesting
@ -281,7 +287,21 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
@Override
public void setMaxResultsToFetch(Integer theMaxResultsToFetch) {
myMaxResultsToFetch = theMaxResultsToFetch;
mySearchProperties.setMaxResultsRequested(theMaxResultsToFetch);
}
public void setShouldDeduplicateInDB(boolean theShouldDeduplicateInDB) {
mySearchProperties.setDeduplicateInDBFlag(theShouldDeduplicateInDB);
}
@Override
public void setRequireTotal(boolean theRequireTotal) {
myRequiresTotal = theRequireTotal;
}
@Override
public boolean requiresTotal() {
return myRequiresTotal;
}
private void searchForIdsWithAndOr(
@ -290,6 +310,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
@Nonnull SearchParameterMap theParams,
RequestDetails theRequest) {
myParams = theParams;
mySearchProperties.setSortSpec(myParams.getSort());
// Remove any empty parameters
theParams.clean();
@ -360,7 +381,12 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
return myFulltextSearchSvc.count(myResourceName, theParams.clone());
}
List<ISearchQueryExecutor> queries = createQuery(theParams.clone(), null, null, null, true, theRequest, null);
SearchQueryProperties properties = mySearchProperties.clone();
properties.setDoCountOnlyFlag(true);
properties.setSortSpec(null); // counts don't require sorts
properties.setMaxResultsRequested(null);
properties.setOffset(null);
List<ISearchQueryExecutor> queries = createQuery(theParams.clone(), properties, theRequest, null);
if (queries.isEmpty()) {
return 0L;
} else {
@ -399,19 +425,24 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
myCriteriaBuilder = myEntityManager.getCriteriaBuilder();
// we mutate the params. Make a private copy.
myParams = theParams.clone();
mySearchProperties.setSortSpec(myParams.getSort());
mySearchUuid = theSearchUuid;
myRequestPartitionId = theRequestPartitionId;
}
/**
* The query created can be either a count query or the
* actual query.
* This is why it takes a SearchQueryProperties object
* (and doesn't use the local version of it).
* The properties may differ slightly for whichever
* query this is.
*/
private List<ISearchQueryExecutor> createQuery(
SearchParameterMap theParams,
SortSpec sort,
Integer theOffset,
Integer theMaximumResults,
boolean theCountOnlyFlag,
SearchQueryProperties theSearchProperties,
RequestDetails theRequest,
SearchRuntimeDetails theSearchRuntimeDetails) {
ArrayList<ISearchQueryExecutor> queries = new ArrayList<>();
if (checkUseHibernateSearch()) {
@ -422,7 +453,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
List<JpaPid> fulltextMatchIds = null;
int resultCount = 0;
if (myParams.isLastN()) {
fulltextMatchIds = executeLastNAgainstIndex(theMaximumResults);
fulltextMatchIds = executeLastNAgainstIndex(theSearchProperties.getMaxResultsRequested());
resultCount = fulltextMatchIds.size();
} else if (myParams.getEverythingMode() != null) {
fulltextMatchIds = queryHibernateSearchForEverythingPids(theRequest);
@ -479,8 +510,9 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
if (canSkipDatabase) {
ourLog.trace("Query finished after HSearch. Skip db query phase");
if (theMaximumResults != null) {
fulltextExecutor = SearchQueryExecutors.limited(fulltextExecutor, theMaximumResults);
if (theSearchProperties.hasMaxResultsRequested()) {
fulltextExecutor = SearchQueryExecutors.limited(
fulltextExecutor, theSearchProperties.getMaxResultsRequested());
}
queries.add(fulltextExecutor);
} else {
@ -493,13 +525,11 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
SearchBuilder.getMaximumPageSize(),
// for each list of (SearchBuilder.getMaximumPageSize())
// we create a chunked query and add it to 'queries'
t -> doCreateChunkedQueries(
theParams, t, theOffset, sort, theCountOnlyFlag, theRequest, queries));
t -> doCreateChunkedQueries(theParams, t, theSearchProperties, theRequest, queries));
}
} else {
// do everything in the database.
createChunkedQuery(
theParams, sort, theOffset, theMaximumResults, theCountOnlyFlag, theRequest, null, queries);
createChunkedQuery(theParams, theSearchProperties, theRequest, null, queries);
}
return queries;
@ -594,16 +624,16 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
private void doCreateChunkedQueries(
SearchParameterMap theParams,
List<Long> thePids,
Integer theOffset,
SortSpec sort,
boolean theCount,
SearchQueryProperties theSearchQueryProperties,
RequestDetails theRequest,
ArrayList<ISearchQueryExecutor> theQueries) {
if (thePids.size() < getMaximumPageSize()) {
thePids = normalizeIdListForInClause(thePids);
}
createChunkedQuery(theParams, sort, theOffset, thePids.size(), theCount, theRequest, thePids, theQueries);
// TODO - thesize was the 4th parameter... what is it supposed to be in createchunkedquery?
theSearchQueryProperties.setMaxResultsRequested(theParams.size());
createChunkedQuery(theParams, theSearchQueryProperties, theRequest, thePids, theQueries);
}
/**
@ -653,27 +683,21 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
private void createChunkedQuery(
SearchParameterMap theParams,
SortSpec sort,
Integer theOffset,
Integer theMaximumResults,
boolean theCountOnlyFlag,
SearchQueryProperties theSearchProperties,
RequestDetails theRequest,
List<Long> thePidList,
List<ISearchQueryExecutor> theSearchQueryExecutors) {
if (myParams.getEverythingMode() != null) {
createChunkedQueryForEverythingSearch(
theParams, theOffset, theMaximumResults, theCountOnlyFlag, thePidList, theSearchQueryExecutors);
createChunkedQueryForEverythingSearch(theParams, theSearchProperties, thePidList, theSearchQueryExecutors);
} else {
createChunkedQueryNormalSearch(
theParams, sort, theOffset, theCountOnlyFlag, theRequest, thePidList, theSearchQueryExecutors);
theParams, theSearchProperties, theRequest, thePidList, theSearchQueryExecutors);
}
}
private void createChunkedQueryNormalSearch(
SearchParameterMap theParams,
SortSpec sort,
Integer theOffset,
boolean theCountOnlyFlag,
SearchQueryProperties theSearchProperties,
RequestDetails theRequest,
List<Long> thePidList,
List<ISearchQueryExecutor> theSearchQueryExecutors) {
@ -685,7 +709,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
myResourceName,
mySqlBuilderFactory,
myDialectProvider,
theCountOnlyFlag);
theSearchProperties.isDoCountOnlyFlag());
QueryStack queryStack3 = new QueryStack(
theParams, myStorageSettings, myContext, sqlBuilder, mySearchParamRegistry, myPartitionSettings);
@ -762,7 +786,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
* if the MaxResultsToFetch is null, we are requesting "everything",
* so we'll let the db do the deduplication (instead of in-memory)
*/
if (theOffset != null || (myMaxResultsToFetch == null && !theCountOnlyFlag)) {
if (theSearchProperties.isDeduplicateInDBFlag()) {
queryStack3.addGrouping();
queryStack3.setUseAggregate(true);
}
@ -773,33 +797,34 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
* If we have a sort, we wrap the criteria search (the search that actually
* finds the appropriate resources) in an outer search which is then sorted
*/
if (sort != null) {
assert !theCountOnlyFlag;
if (theSearchProperties.hasSort()) {
assert !theSearchProperties.isDoCountOnlyFlag();
createSort(queryStack3, sort, theParams);
createSort(queryStack3, theSearchProperties.getSortSpec(), theParams);
}
/*
* Now perform the search
*/
executeSearch(theOffset, theSearchQueryExecutors, sqlBuilder);
executeSearch(theSearchProperties, theSearchQueryExecutors, sqlBuilder);
}
private void executeSearch(
Integer theOffset, List<ISearchQueryExecutor> theSearchQueryExecutors, SearchQueryBuilder sqlBuilder) {
GeneratedSql generatedSql = sqlBuilder.generate(theOffset, myMaxResultsToFetch);
SearchQueryProperties theProperties,
List<ISearchQueryExecutor> theSearchQueryExecutors,
SearchQueryBuilder sqlBuilder) {
GeneratedSql generatedSql =
sqlBuilder.generate(theProperties.getOffset(), theProperties.getMaxResultsRequested());
if (!generatedSql.isMatchNothing()) {
SearchQueryExecutor executor =
mySqlBuilderFactory.newSearchQueryExecutor(generatedSql, myMaxResultsToFetch);
mySqlBuilderFactory.newSearchQueryExecutor(generatedSql, theProperties.getMaxResultsRequested());
theSearchQueryExecutors.add(executor);
}
}
private void createChunkedQueryForEverythingSearch(
SearchParameterMap theParams,
Integer theOffset,
Integer theMaximumResults,
boolean theCountOnlyFlag,
SearchQueryProperties theSearchQueryProperties,
List<Long> thePidList,
List<ISearchQueryExecutor> theSearchQueryExecutors) {
@ -811,12 +836,12 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
null,
mySqlBuilderFactory,
myDialectProvider,
theCountOnlyFlag);
theSearchQueryProperties.isDoCountOnlyFlag());
QueryStack queryStack3 = new QueryStack(
theParams, myStorageSettings, myContext, sqlBuilder, mySearchParamRegistry, myPartitionSettings);
JdbcTemplate jdbcTemplate = initializeJdbcTemplate(theMaximumResults);
JdbcTemplate jdbcTemplate = initializeJdbcTemplate(theSearchQueryProperties.getMaxResultsRequested());
Set<Long> targetPids = new HashSet<>();
if (myParams.get(IAnyResource.SP_RES_ID) != null) {
@ -839,8 +864,9 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
myResourceName,
mySqlBuilderFactory,
myDialectProvider,
theCountOnlyFlag);
GeneratedSql allTargetsSql = fetchPidsSqlBuilder.generate(theOffset, myMaxResultsToFetch);
theSearchQueryProperties.isDoCountOnlyFlag());
GeneratedSql allTargetsSql = fetchPidsSqlBuilder.generate(
theSearchQueryProperties.getOffset(), mySearchProperties.getMaxResultsRequested());
String sql = allTargetsSql.getSql();
Object[] args = allTargetsSql.getBindVariables().toArray(new Object[0]);
@ -874,7 +900,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
* If offset is present, we want deduplicate the results by using GROUP BY
* ORDER BY is required to make sure we return unique results for each page
*/
if (theOffset != null) {
if (theSearchQueryProperties.hasOffset()) {
queryStack3.addGrouping();
queryStack3.addOrdering();
queryStack3.setUseAggregate(true);
@ -883,7 +909,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
/*
* Now perform the search
*/
executeSearch(theOffset, theSearchQueryExecutors, sqlBuilder);
executeSearch(theSearchQueryProperties, theSearchQueryExecutors, sqlBuilder);
}
private void addPidListPredicate(List<Long> thePidList, SearchQueryBuilder theSqlBuilder) {
@ -2412,15 +2438,15 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
// If we don't have a query yet, create one
if (myResultsIterator == null) {
if (myMaxResultsToFetch == null) {
myMaxResultsToFetch = calculateMaxResultsToFetch();
if (!mySearchProperties.hasMaxResultsRequested()) {
mySearchProperties.setMaxResultsRequested(calculateMaxResultsToFetch());
}
/*
* assigns the results iterator
* and populates the myQueryList.
*/
initializeIteratorQuery(myOffset, myMaxResultsToFetch);
initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested());
}
if (myNext == null) {
@ -2454,7 +2480,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
JpaPid next = JpaPid.fromId(nextLong);
if (!myPidSet.contains(next)) {
if (myMaxResultsToFetch != null) {
if (mySearchProperties.hasMaxResultsRequested()) {
/*
* We only add to the map if we aren't fetching "everything";
* otherwise, we let the de-duplication happen in the database
@ -2474,13 +2500,13 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
}
if (!myResultsIterator.hasNext()) {
if (myMaxResultsToFetch != null && (mySkipCount + myNonSkipCount == myMaxResultsToFetch)) {
if (mySearchProperties.hasMaxResultsRequested()
&& (mySkipCount + myNonSkipCount == mySearchProperties.getMaxResultsRequested())) {
if (mySkipCount > 0 && myNonSkipCount == 0) {
sendProcessingMsgAndFirePerformanceHook();
myMaxResultsToFetch += 1000;
initializeIteratorQuery(myOffset, myMaxResultsToFetch);
int maxResults = mySearchProperties.getMaxResultsRequested() + 1000;
mySearchProperties.setMaxResultsRequested(maxResults);
initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested());
}
}
}
@ -2510,7 +2536,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
}
}
if (myMaxResultsToFetch == null) {
if (!mySearchProperties.hasMaxResultsRequested()) {
mySearchRuntimeDetails.setFoundIndexMatchesCount(myNonSkipCount);
} else {
mySearchRuntimeDetails.setFoundMatchesCount(myPidSet.size());
@ -2571,7 +2597,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
String msg = "Pass completed with no matching results seeking rows "
+ myPidSet.size() + "-" + mySkipCount
+ ". This indicates an inefficient query! Retrying with new max count of "
+ myMaxResultsToFetch;
+ mySearchProperties.getMaxResultsRequested();
firePerformanceWarning(myRequest, msg);
}
@ -2586,8 +2612,13 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
if (myParams.getEverythingMode() != null) {
offset = 0;
}
myQueryList = createQuery(
myParams, mySort, offset, theMaxResultsToFetch, false, myRequest, mySearchRuntimeDetails);
SearchQueryProperties properties = mySearchProperties.clone();
properties
.setOffset(offset)
.setMaxResultsRequested(theMaxResultsToFetch)
.setDoCountOnlyFlag(false);
myQueryList = createQuery(myParams, properties, myRequest, mySearchRuntimeDetails);
}
mySearchRuntimeDetails.setQueryStopwatch(new StopWatch());

View File

@ -0,0 +1,104 @@
package ca.uhn.fhir.jpa.search.builder.models;
import ca.uhn.fhir.rest.api.SortSpec;
public class SearchQueryProperties {
/**
* True if this query is only to fetch the count (and not any results).
*
* True means this is a count only query
*/
private boolean myDoCountOnlyFlag;
/**
* Whether or not we do deduplication of results in memory
* (using a hashset, etc), or push this to the database
* (using GROUP BY, etc).
*
* True means use the database
*/
private boolean myDeduplicateInDBFlag;
/**
* The maximum number of results to fetch (when we want it limited).
* Can be null if we are fetching everything or paging.
*/
private Integer myMaxResultsRequested;
/**
* The offset for the results to fetch.
*
* null if the first page, some number if it's a later page
*/
private Integer myOffset;
/**
* The sort spec for this search
*/
private SortSpec mySortSpec;
public boolean isDoCountOnlyFlag() {
return myDoCountOnlyFlag;
}
public SearchQueryProperties setDoCountOnlyFlag(boolean theDoCountOnlyFlag) {
myDoCountOnlyFlag = theDoCountOnlyFlag;
return this;
}
public boolean isDeduplicateInDBFlag() {
return myDeduplicateInDBFlag;
}
public SearchQueryProperties setDeduplicateInDBFlag(boolean theDeduplicateInDBFlag) {
myDeduplicateInDBFlag = theDeduplicateInDBFlag;
return this;
}
public Integer getMaxResultsRequested() {
return myMaxResultsRequested;
}
public SearchQueryProperties setMaxResultsRequested(Integer theMaxResultsRequested) {
myMaxResultsRequested = theMaxResultsRequested;
return this;
}
public boolean hasMaxResultsRequested() {
return myMaxResultsRequested != null;
}
public Integer getOffset() {
return myOffset;
}
public boolean hasOffset() {
return myOffset != null;
}
public SearchQueryProperties setOffset(Integer theOffset) {
myOffset = theOffset;
return this;
}
public SortSpec getSortSpec() {
return mySortSpec;
}
public boolean hasSort() {
return mySortSpec != null;
}
public SearchQueryProperties setSortSpec(SortSpec theSortSpec) {
mySortSpec = theSortSpec;
return this;
}
public SearchQueryProperties clone() {
return new SearchQueryProperties()
.setMaxResultsRequested(myMaxResultsRequested)
.setSortSpec(mySortSpec)
.setOffset(myOffset)
.setDoCountOnlyFlag(myDoCountOnlyFlag)
.setDeduplicateInDBFlag(myDeduplicateInDBFlag);
}
}

View File

@ -510,7 +510,6 @@ public class SearchQueryBuilder {
* Generate and return the SQL generated by this builder
*/
public GeneratedSql generate(@Nullable Integer theOffset, @Nullable Integer theMaxResultsToFetch) {
getOrCreateFirstPredicateBuilder();
mySelect.validate();

View File

@ -597,6 +597,7 @@ public class SearchTask implements Callable<Void> {
if (next == -1) {
sb.setMaxResultsToFetch(null);
sb.setShouldDeduplicateInDB(true);
} else {
// we want at least 1 more than our requested amount
// so we know that there are other results

View File

@ -4072,7 +4072,6 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
ourLog.info(StringUtils.join(names, '\n'));
assertThat(names).containsExactly("Daniel Adams", "Aaron Alexis", "Carol Allen", "Ruth Black", "Brian Brooks", "Amy Clark", "Susan Clark", "Anthony Coleman", "Lisa Coleman", "Steven Coleman", "Ruth Cook", "Betty Davis", "Joshua Diaz", "Brian Gracia", "Sarah Graham", "Stephan Graham");
}
/**

View File

@ -64,44 +64,44 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
new SqlGenerationTestCase(
"single string - no hfj_resource root",
"Patient?name=FOO",
"SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))",
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.PARTITION_ID = ?) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)))"
"SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) fetch first ? rows only",
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.PARTITION_ID = ?) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))) fetch first ? rows only"
)
, new SqlGenerationTestCase(
"two regular params - should use hfj_resource as root",
"Patient?name=smith&active=true",
"SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_VALUE = ?))",
"SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON ((t1.PARTITION_ID = t0.PARTITION_ID) AND (t1.RES_ID = t0.RES_ID)) INNER JOIN HFJ_SPIDX_TOKEN t2 ON ((t1.PARTITION_ID = t2.PARTITION_ID) AND (t1.RES_ID = t2.RES_ID)) WHERE (((t0.PARTITION_ID = ?) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))) AND ((t2.PARTITION_ID = ?) AND (t2.HASH_VALUE = ?)))"
"SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_VALUE = ?)) fetch first ? rows only",
"SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON ((t1.PARTITION_ID = t0.PARTITION_ID) AND (t1.RES_ID = t0.RES_ID)) INNER JOIN HFJ_SPIDX_TOKEN t2 ON ((t1.PARTITION_ID = t2.PARTITION_ID) AND (t1.RES_ID = t2.RES_ID)) WHERE (((t0.PARTITION_ID = ?) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))) AND ((t2.PARTITION_ID = ?) AND (t2.HASH_VALUE = ?))) fetch first ? rows only"
)
, new SqlGenerationTestCase(
"token not as a NOT IN subselect",
"Encounter?class:not=not-there",
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.RES_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))",
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID = ?) AND ((t0.PARTITION_ID,t0.RES_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )))"
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.RES_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )) fetch first ? rows only",
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID = ?) AND ((t0.PARTITION_ID,t0.RES_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))) fetch first ? rows only"
)
, new SqlGenerationTestCase(
"token not on chain join - NOT IN from hfj_res_link target columns",
"Observation?encounter.class:not=not-there",
"SELECT t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))",
"SELECT t0.PARTITION_ID,t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.PARTITION_ID = ?) AND ((t0.TARGET_RES_PARTITION_ID,t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )))"
"SELECT t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )) fetch first ? rows only",
"SELECT t0.PARTITION_ID,t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.PARTITION_ID = ?) AND ((t0.TARGET_RES_PARTITION_ID,t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))) fetch first ? rows only"
)
, new SqlGenerationTestCase(
"bare sort",
"Patient?_sort=name",
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = ?)) WHERE ((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST",
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST"
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = ?)) WHERE ((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only",
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only"
)
, new SqlGenerationTestCase(
"sort with predicate",
"Patient?active=true&_sort=name",
"SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_TOKEN t0 ON (t1.RES_ID = t0.RES_ID) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.RES_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE (t0.HASH_VALUE = ?) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST",
"SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_TOKEN t0 ON ((t1.PARTITION_ID = t0.PARTITION_ID) AND (t1.RES_ID = t0.RES_ID)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.PARTITION_ID = t2.PARTITION_ID) AND (t1.RES_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE ((t0.PARTITION_ID = ?) AND (t0.HASH_VALUE = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST"
"SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_TOKEN t0 ON (t1.RES_ID = t0.RES_ID) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.RES_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE (t0.HASH_VALUE = ?) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only",
"SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_TOKEN t0 ON ((t1.PARTITION_ID = t0.PARTITION_ID) AND (t1.RES_ID = t0.RES_ID)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.PARTITION_ID = t2.PARTITION_ID) AND (t1.RES_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE ((t0.PARTITION_ID = ?) AND (t0.HASH_VALUE = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only"
)
, new SqlGenerationTestCase(
"chained sort",
"Patient?_sort=Practitioner:general-practitioner.name",
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_LINK t1 ON ((t0.RES_ID = t1.SRC_RESOURCE_ID) AND (t1.SRC_PATH = ?)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.TARGET_RESOURCE_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE ((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST",
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_LINK t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.SRC_RESOURCE_ID) AND (t1.SRC_PATH = ?)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.TARGET_RES_PARTITION_ID = t2.PARTITION_ID) AND (t1.TARGET_RESOURCE_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST"
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_LINK t1 ON ((t0.RES_ID = t1.SRC_RESOURCE_ID) AND (t1.SRC_PATH = ?)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.TARGET_RESOURCE_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE ((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only",
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_LINK t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.SRC_RESOURCE_ID) AND (t1.SRC_PATH = ?)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.TARGET_RES_PARTITION_ID = t2.PARTITION_ID) AND (t1.TARGET_RESOURCE_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only"
)
);
}
@ -153,7 +153,7 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
myPatientDao.search(map);
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false);
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))", sql);
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) fetch first ? rows only", sql);
}
@ -162,7 +162,6 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
*/
@Test
public void testTwoRegularSearchParams() {
myCaptureQueriesListener.clear();
SearchParameterMap map = SearchParameterMap.newSynchronous()
.add(Patient.SP_NAME, new StringParam("FOO"))
@ -170,14 +169,11 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
myPatientDao.search(map);
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false);
assertEquals("SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_SYS_AND_VALUE = ?))", sql);
assertEquals("SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_SYS_AND_VALUE = ?)) fetch first ? rows only", sql);
}
@Test
public void testSearchByProfile_VersionedMode() {
// Put a tag in so we can search for it
String code = "http://" + UUID.randomUUID();
Patient p = new Patient();
@ -193,7 +189,7 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
assertEquals(3, myCaptureQueriesListener.countSelectQueries());
// Query 1 - Find resources: Make sure we search for tag type+system+code always
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false);
assertEquals("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_TAG t1 ON (t0.RES_ID = t1.RES_ID) INNER JOIN HFJ_TAG_DEF t2 ON (t1.TAG_ID = t2.TAG_ID) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t2.TAG_TYPE = ?) AND (t2.TAG_SYSTEM = ?) AND (t2.TAG_CODE = ?)))", sql);
assertEquals("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_TAG t1 ON (t0.RES_ID = t1.RES_ID) INNER JOIN HFJ_TAG_DEF t2 ON (t1.TAG_ID = t2.TAG_ID) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t2.TAG_TYPE = ?) AND (t2.TAG_SYSTEM = ?) AND (t2.TAG_CODE = ?))) fetch first ? rows only", sql);
// Query 2 - Load resourece contents
sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(false, false);
assertThat(sql).contains("where rsv1_0.RES_ID in (?)");
@ -202,7 +198,6 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
assertThat(sql).contains("from HFJ_RES_TAG rt1_0 join HFJ_TAG_DEF");
assertThat(toUnqualifiedVersionlessIds(outcome)).containsExactly(id);
}
@Test
@ -229,9 +224,11 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
.add(Constants.PARAM_PROFILE, new UriParam(code));
IBundleProvider outcome = myPatientDao.search(map, mySrd);
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
// Query 1 - Find resources: Just a standard token search in this mode
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false);
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_URI t0 WHERE (t0.HASH_URI = ?)", sql);
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_URI t0 WHERE (t0.HASH_URI = ?) fetch first ? rows only", sql);
// Query 2 - Load resourece contents
sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(false, false);
assertThat(sql).contains("where rsv1_0.RES_ID in (?)");
@ -255,11 +252,10 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
// Verify
if (theIncludeHashIdentity) {
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE ((t0.HASH_IDENTITY = '7001889285610424179') AND (t0.HASH_SYS_AND_VALUE = '-2780914544385068076'))", myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false));
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE ((t0.HASH_IDENTITY = '7001889285610424179') AND (t0.HASH_SYS_AND_VALUE = '-2780914544385068076')) fetch first '10000' rows only", myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false));
} else {
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_SYS_AND_VALUE = '-2780914544385068076')", myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false));
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_SYS_AND_VALUE = '-2780914544385068076') fetch first '10000' rows only", myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false));
}
}
public static class MyPartitionInterceptor {

View File

@ -77,6 +77,15 @@ public interface ISearchBuilder<T extends IResourcePersistentId<?>> {
void setMaxResultsToFetch(Integer theMaxResultsToFetch);
void setShouldDeduplicateInDB(boolean theShouldDeduplicateInDB);
void setRequireTotal(boolean theRequireTotal);
/**
* True if the results should have a 'total' value
*/
boolean requiresTotal();
void loadResourcesByPid(
Collection<T> thePids,
Collection<T> theIncludedPids,