Compare commits

..

No commits in common. "d0419b731a7ac8711b66252565bcf017e75dec36" and "c7a6cd98131916920002a21604e6cd4aa5c5c802" have entirely different histories.

9 changed files with 90 additions and 238 deletions

View File

@ -66,7 +66,6 @@ import ca.uhn.fhir.util.UrlUtil;
import com.google.common.annotations.VisibleForTesting;
import jakarta.annotation.Nonnull;
import jakarta.annotation.Nullable;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.springframework.beans.factory.BeanFactory;
@ -378,7 +377,6 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
final ISearchBuilder<JpaPid> sb =
mySearchBuilderFactory.newSearchBuilder(theCallingDao, theResourceType, resourceTypeClass);
sb.setFetchSize(mySyncSize);
sb.setRequireTotal(theParams.getCount() != null);
final Integer loadSynchronousUpTo = getLoadSynchronousUpToOrNull(theCacheControlDirective);
boolean isOffsetQuery = theParams.isOffsetQuery();
@ -396,6 +394,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
try {
return direct.get();
} catch (ResourceNotFoundInIndexException theE) {
// some resources were not found in index, so we will inform this and resort to JPA search
ourLog.warn(
@ -403,14 +402,6 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
}
}
// we set a max to fetch from the db for synchronous searches;
// otherwise, we would have to load everything into memory (or force the db to do so);
// So let's set a max value here
Integer maxToLoad = ObjectUtils.defaultIfNull(
loadSynchronousUpTo, myStorageSettings.getInternalSynchronousSearchSize());
ourLog.debug("Setting a max fetch value of {} for synchronous search", maxToLoad);
sb.setMaxResultsToFetch(maxToLoad);
ourLog.debug("Search {} is loading in synchronous mode", searchUuid);
return mySynchronousSearchSvc.executeQuery(
theParams, theRequestDetails, searchUuid, sb, loadSynchronousUpTo, theRequestPartitionId);

View File

@ -246,7 +246,7 @@ public class SynchronousSearchSvcImpl implements ISynchronousSearchSvc {
resources, theRequestDetails, myInterceptorBroadcaster);
SimpleBundleProvider bundleProvider = new SimpleBundleProvider(resources);
if (hasACount && theSb.requiresTotal()) {
if (hasACount) {
bundleProvider.setTotalResourcesRequestedReturned(receivedResourceCount);
}
if (theParams.isOffsetQuery()) {

View File

@ -60,7 +60,6 @@ import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
import ca.uhn.fhir.jpa.search.SearchConstants;
import ca.uhn.fhir.jpa.search.builder.models.ResolvedSearchQueryExecutor;
import ca.uhn.fhir.jpa.search.builder.models.SearchQueryProperties;
import ca.uhn.fhir.jpa.search.builder.sql.GeneratedSql;
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryExecutor;
@ -202,8 +201,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
private SearchParameterMap myParams;
private String mySearchUuid;
private int myFetchSize;
private boolean myRequiresTotal;
private Integer myMaxResultsToFetch;
/**
* Set of PIDs of results that have already been returned in a search.
@ -229,8 +227,6 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
private boolean myHasNextIteratorQuery = false;
private RequestPartitionId myRequestPartitionId;
private SearchQueryProperties mySearchProperties;
@Autowired(required = false)
private IFulltextSearchSvc myFulltextSearchSvc;
@ -276,8 +272,6 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
myResourceSearchViewDao = theResourceSearchViewDao;
myContext = theContext;
myIdHelperService = theIdHelperService;
mySearchProperties = new SearchQueryProperties();
}
@VisibleForTesting
@ -287,21 +281,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
@Override
public void setMaxResultsToFetch(Integer theMaxResultsToFetch) {
mySearchProperties.setMaxResultsRequested(theMaxResultsToFetch);
}
public void setShouldDeduplicateInDB(boolean theShouldDeduplicateInDB) {
mySearchProperties.setDeduplicateInDBFlag(theShouldDeduplicateInDB);
}
@Override
public void setRequireTotal(boolean theRequireTotal) {
myRequiresTotal = theRequireTotal;
}
@Override
public boolean requiresTotal() {
return myRequiresTotal;
myMaxResultsToFetch = theMaxResultsToFetch;
}
private void searchForIdsWithAndOr(
@ -310,7 +290,6 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
@Nonnull SearchParameterMap theParams,
RequestDetails theRequest) {
myParams = theParams;
mySearchProperties.setSortSpec(myParams.getSort());
// Remove any empty parameters
theParams.clean();
@ -381,12 +360,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
return myFulltextSearchSvc.count(myResourceName, theParams.clone());
}
SearchQueryProperties properties = mySearchProperties.clone();
properties.setDoCountOnlyFlag(true);
properties.setSortSpec(null); // counts don't require sorts
properties.setMaxResultsRequested(null);
properties.setOffset(null);
List<ISearchQueryExecutor> queries = createQuery(theParams.clone(), properties, theRequest, null);
List<ISearchQueryExecutor> queries = createQuery(theParams.clone(), null, null, null, true, theRequest, null);
if (queries.isEmpty()) {
return 0L;
} else {
@ -425,24 +399,19 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
myCriteriaBuilder = myEntityManager.getCriteriaBuilder();
// we mutate the params. Make a private copy.
myParams = theParams.clone();
mySearchProperties.setSortSpec(myParams.getSort());
mySearchUuid = theSearchUuid;
myRequestPartitionId = theRequestPartitionId;
}
/**
* The query created can be either a count query or the
* actual query.
* This is why it takes a SearchQueryProperties object
* (and doesn't use the local version of it).
* The properties may differ slightly for whichever
* query this is.
*/
private List<ISearchQueryExecutor> createQuery(
SearchParameterMap theParams,
SearchQueryProperties theSearchProperties,
SortSpec sort,
Integer theOffset,
Integer theMaximumResults,
boolean theCountOnlyFlag,
RequestDetails theRequest,
SearchRuntimeDetails theSearchRuntimeDetails) {
ArrayList<ISearchQueryExecutor> queries = new ArrayList<>();
if (checkUseHibernateSearch()) {
@ -453,7 +422,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
List<JpaPid> fulltextMatchIds = null;
int resultCount = 0;
if (myParams.isLastN()) {
fulltextMatchIds = executeLastNAgainstIndex(theSearchProperties.getMaxResultsRequested());
fulltextMatchIds = executeLastNAgainstIndex(theMaximumResults);
resultCount = fulltextMatchIds.size();
} else if (myParams.getEverythingMode() != null) {
fulltextMatchIds = queryHibernateSearchForEverythingPids(theRequest);
@ -510,9 +479,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
if (canSkipDatabase) {
ourLog.trace("Query finished after HSearch. Skip db query phase");
if (theSearchProperties.hasMaxResultsRequested()) {
fulltextExecutor = SearchQueryExecutors.limited(
fulltextExecutor, theSearchProperties.getMaxResultsRequested());
if (theMaximumResults != null) {
fulltextExecutor = SearchQueryExecutors.limited(fulltextExecutor, theMaximumResults);
}
queries.add(fulltextExecutor);
} else {
@ -525,11 +493,13 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
SearchBuilder.getMaximumPageSize(),
// for each list of (SearchBuilder.getMaximumPageSize())
// we create a chunked query and add it to 'queries'
t -> doCreateChunkedQueries(theParams, t, theSearchProperties, theRequest, queries));
t -> doCreateChunkedQueries(
theParams, t, theOffset, sort, theCountOnlyFlag, theRequest, queries));
}
} else {
// do everything in the database.
createChunkedQuery(theParams, theSearchProperties, theRequest, null, queries);
createChunkedQuery(
theParams, sort, theOffset, theMaximumResults, theCountOnlyFlag, theRequest, null, queries);
}
return queries;
@ -624,16 +594,16 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
private void doCreateChunkedQueries(
SearchParameterMap theParams,
List<Long> thePids,
SearchQueryProperties theSearchQueryProperties,
Integer theOffset,
SortSpec sort,
boolean theCount,
RequestDetails theRequest,
ArrayList<ISearchQueryExecutor> theQueries) {
if (thePids.size() < getMaximumPageSize()) {
thePids = normalizeIdListForInClause(thePids);
}
// TODO - thesize was the 4th parameter... what is it supposed to be in createchunkedquery?
theSearchQueryProperties.setMaxResultsRequested(theParams.size());
createChunkedQuery(theParams, theSearchQueryProperties, theRequest, thePids, theQueries);
createChunkedQuery(theParams, sort, theOffset, thePids.size(), theCount, theRequest, thePids, theQueries);
}
/**
@ -683,21 +653,27 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
private void createChunkedQuery(
SearchParameterMap theParams,
SearchQueryProperties theSearchProperties,
SortSpec sort,
Integer theOffset,
Integer theMaximumResults,
boolean theCountOnlyFlag,
RequestDetails theRequest,
List<Long> thePidList,
List<ISearchQueryExecutor> theSearchQueryExecutors) {
if (myParams.getEverythingMode() != null) {
createChunkedQueryForEverythingSearch(theParams, theSearchProperties, thePidList, theSearchQueryExecutors);
createChunkedQueryForEverythingSearch(
theParams, theOffset, theMaximumResults, theCountOnlyFlag, thePidList, theSearchQueryExecutors);
} else {
createChunkedQueryNormalSearch(
theParams, theSearchProperties, theRequest, thePidList, theSearchQueryExecutors);
theParams, sort, theOffset, theCountOnlyFlag, theRequest, thePidList, theSearchQueryExecutors);
}
}
private void createChunkedQueryNormalSearch(
SearchParameterMap theParams,
SearchQueryProperties theSearchProperties,
SortSpec sort,
Integer theOffset,
boolean theCountOnlyFlag,
RequestDetails theRequest,
List<Long> thePidList,
List<ISearchQueryExecutor> theSearchQueryExecutors) {
@ -709,7 +685,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
myResourceName,
mySqlBuilderFactory,
myDialectProvider,
theSearchProperties.isDoCountOnlyFlag());
theCountOnlyFlag);
QueryStack queryStack3 = new QueryStack(
theParams, myStorageSettings, myContext, sqlBuilder, mySearchParamRegistry, myPartitionSettings);
@ -786,7 +762,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
* if the MaxResultsToFetch is null, we are requesting "everything",
* so we'll let the db do the deduplication (instead of in-memory)
*/
if (theSearchProperties.isDeduplicateInDBFlag()) {
if (theOffset != null || (myMaxResultsToFetch == null && !theCountOnlyFlag)) {
queryStack3.addGrouping();
queryStack3.setUseAggregate(true);
}
@ -797,34 +773,33 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
* If we have a sort, we wrap the criteria search (the search that actually
* finds the appropriate resources) in an outer search which is then sorted
*/
if (theSearchProperties.hasSort()) {
assert !theSearchProperties.isDoCountOnlyFlag();
if (sort != null) {
assert !theCountOnlyFlag;
createSort(queryStack3, theSearchProperties.getSortSpec(), theParams);
createSort(queryStack3, sort, theParams);
}
/*
* Now perform the search
*/
executeSearch(theSearchProperties, theSearchQueryExecutors, sqlBuilder);
executeSearch(theOffset, theSearchQueryExecutors, sqlBuilder);
}
private void executeSearch(
SearchQueryProperties theProperties,
List<ISearchQueryExecutor> theSearchQueryExecutors,
SearchQueryBuilder sqlBuilder) {
GeneratedSql generatedSql =
sqlBuilder.generate(theProperties.getOffset(), theProperties.getMaxResultsRequested());
Integer theOffset, List<ISearchQueryExecutor> theSearchQueryExecutors, SearchQueryBuilder sqlBuilder) {
GeneratedSql generatedSql = sqlBuilder.generate(theOffset, myMaxResultsToFetch);
if (!generatedSql.isMatchNothing()) {
SearchQueryExecutor executor =
mySqlBuilderFactory.newSearchQueryExecutor(generatedSql, theProperties.getMaxResultsRequested());
mySqlBuilderFactory.newSearchQueryExecutor(generatedSql, myMaxResultsToFetch);
theSearchQueryExecutors.add(executor);
}
}
private void createChunkedQueryForEverythingSearch(
SearchParameterMap theParams,
SearchQueryProperties theSearchQueryProperties,
Integer theOffset,
Integer theMaximumResults,
boolean theCountOnlyFlag,
List<Long> thePidList,
List<ISearchQueryExecutor> theSearchQueryExecutors) {
@ -836,12 +811,12 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
null,
mySqlBuilderFactory,
myDialectProvider,
theSearchQueryProperties.isDoCountOnlyFlag());
theCountOnlyFlag);
QueryStack queryStack3 = new QueryStack(
theParams, myStorageSettings, myContext, sqlBuilder, mySearchParamRegistry, myPartitionSettings);
JdbcTemplate jdbcTemplate = initializeJdbcTemplate(theSearchQueryProperties.getMaxResultsRequested());
JdbcTemplate jdbcTemplate = initializeJdbcTemplate(theMaximumResults);
Set<Long> targetPids = new HashSet<>();
if (myParams.get(IAnyResource.SP_RES_ID) != null) {
@ -864,9 +839,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
myResourceName,
mySqlBuilderFactory,
myDialectProvider,
theSearchQueryProperties.isDoCountOnlyFlag());
GeneratedSql allTargetsSql = fetchPidsSqlBuilder.generate(
theSearchQueryProperties.getOffset(), mySearchProperties.getMaxResultsRequested());
theCountOnlyFlag);
GeneratedSql allTargetsSql = fetchPidsSqlBuilder.generate(theOffset, myMaxResultsToFetch);
String sql = allTargetsSql.getSql();
Object[] args = allTargetsSql.getBindVariables().toArray(new Object[0]);
@ -900,7 +874,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
* If offset is present, we want deduplicate the results by using GROUP BY
* ORDER BY is required to make sure we return unique results for each page
*/
if (theSearchQueryProperties.hasOffset()) {
if (theOffset != null) {
queryStack3.addGrouping();
queryStack3.addOrdering();
queryStack3.setUseAggregate(true);
@ -909,7 +883,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
/*
* Now perform the search
*/
executeSearch(theSearchQueryProperties, theSearchQueryExecutors, sqlBuilder);
executeSearch(theOffset, theSearchQueryExecutors, sqlBuilder);
}
private void addPidListPredicate(List<Long> thePidList, SearchQueryBuilder theSqlBuilder) {
@ -2438,15 +2412,15 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
// If we don't have a query yet, create one
if (myResultsIterator == null) {
if (!mySearchProperties.hasMaxResultsRequested()) {
mySearchProperties.setMaxResultsRequested(calculateMaxResultsToFetch());
if (myMaxResultsToFetch == null) {
myMaxResultsToFetch = calculateMaxResultsToFetch();
}
/*
* assigns the results iterator
* and populates the myQueryList.
*/
initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested());
initializeIteratorQuery(myOffset, myMaxResultsToFetch);
}
if (myNext == null) {
@ -2480,7 +2454,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
JpaPid next = JpaPid.fromId(nextLong);
if (!myPidSet.contains(next)) {
if (mySearchProperties.hasMaxResultsRequested()) {
if (myMaxResultsToFetch != null) {
/*
* We only add to the map if we aren't fetching "everything";
* otherwise, we let the de-duplication happen in the database
@ -2500,13 +2474,13 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
}
if (!myResultsIterator.hasNext()) {
if (mySearchProperties.hasMaxResultsRequested()
&& (mySkipCount + myNonSkipCount == mySearchProperties.getMaxResultsRequested())) {
if (myMaxResultsToFetch != null && (mySkipCount + myNonSkipCount == myMaxResultsToFetch)) {
if (mySkipCount > 0 && myNonSkipCount == 0) {
sendProcessingMsgAndFirePerformanceHook();
int maxResults = mySearchProperties.getMaxResultsRequested() + 1000;
mySearchProperties.setMaxResultsRequested(maxResults);
initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested());
myMaxResultsToFetch += 1000;
initializeIteratorQuery(myOffset, myMaxResultsToFetch);
}
}
}
@ -2536,7 +2510,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
}
}
if (!mySearchProperties.hasMaxResultsRequested()) {
if (myMaxResultsToFetch == null) {
mySearchRuntimeDetails.setFoundIndexMatchesCount(myNonSkipCount);
} else {
mySearchRuntimeDetails.setFoundMatchesCount(myPidSet.size());
@ -2597,7 +2571,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
String msg = "Pass completed with no matching results seeking rows "
+ myPidSet.size() + "-" + mySkipCount
+ ". This indicates an inefficient query! Retrying with new max count of "
+ mySearchProperties.getMaxResultsRequested();
+ myMaxResultsToFetch;
firePerformanceWarning(myRequest, msg);
}
@ -2612,13 +2586,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
if (myParams.getEverythingMode() != null) {
offset = 0;
}
SearchQueryProperties properties = mySearchProperties.clone();
properties
.setOffset(offset)
.setMaxResultsRequested(theMaxResultsToFetch)
.setDoCountOnlyFlag(false);
myQueryList = createQuery(myParams, properties, myRequest, mySearchRuntimeDetails);
myQueryList = createQuery(
myParams, mySort, offset, theMaxResultsToFetch, false, myRequest, mySearchRuntimeDetails);
}
mySearchRuntimeDetails.setQueryStopwatch(new StopWatch());

View File

@ -1,104 +0,0 @@
package ca.uhn.fhir.jpa.search.builder.models;
import ca.uhn.fhir.rest.api.SortSpec;
public class SearchQueryProperties {
/**
* True if this query is only to fetch the count (and not any results).
*
* True means this is a count only query
*/
private boolean myDoCountOnlyFlag;
/**
* Whether or not we do deduplication of results in memory
* (using a hashset, etc), or push this to the database
* (using GROUP BY, etc).
*
* True means use the database
*/
private boolean myDeduplicateInDBFlag;
/**
* The maximum number of results to fetch (when we want it limited).
* Can be null if we are fetching everything or paging.
*/
private Integer myMaxResultsRequested;
/**
* The offset for the results to fetch.
*
* null if the first page, some number if it's a later page
*/
private Integer myOffset;
/**
* The sort spec for this search
*/
private SortSpec mySortSpec;
public boolean isDoCountOnlyFlag() {
return myDoCountOnlyFlag;
}
public SearchQueryProperties setDoCountOnlyFlag(boolean theDoCountOnlyFlag) {
myDoCountOnlyFlag = theDoCountOnlyFlag;
return this;
}
public boolean isDeduplicateInDBFlag() {
return myDeduplicateInDBFlag;
}
public SearchQueryProperties setDeduplicateInDBFlag(boolean theDeduplicateInDBFlag) {
myDeduplicateInDBFlag = theDeduplicateInDBFlag;
return this;
}
public Integer getMaxResultsRequested() {
return myMaxResultsRequested;
}
public SearchQueryProperties setMaxResultsRequested(Integer theMaxResultsRequested) {
myMaxResultsRequested = theMaxResultsRequested;
return this;
}
public boolean hasMaxResultsRequested() {
return myMaxResultsRequested != null;
}
public Integer getOffset() {
return myOffset;
}
public boolean hasOffset() {
return myOffset != null;
}
public SearchQueryProperties setOffset(Integer theOffset) {
myOffset = theOffset;
return this;
}
public SortSpec getSortSpec() {
return mySortSpec;
}
public boolean hasSort() {
return mySortSpec != null;
}
public SearchQueryProperties setSortSpec(SortSpec theSortSpec) {
mySortSpec = theSortSpec;
return this;
}
public SearchQueryProperties clone() {
return new SearchQueryProperties()
.setMaxResultsRequested(myMaxResultsRequested)
.setSortSpec(mySortSpec)
.setOffset(myOffset)
.setDoCountOnlyFlag(myDoCountOnlyFlag)
.setDeduplicateInDBFlag(myDeduplicateInDBFlag);
}
}

View File

@ -510,6 +510,7 @@ public class SearchQueryBuilder {
* Generate and return the SQL generated by this builder
*/
public GeneratedSql generate(@Nullable Integer theOffset, @Nullable Integer theMaxResultsToFetch) {
getOrCreateFirstPredicateBuilder();
mySelect.validate();

View File

@ -597,7 +597,6 @@ public class SearchTask implements Callable<Void> {
if (next == -1) {
sb.setMaxResultsToFetch(null);
sb.setShouldDeduplicateInDB(true);
} else {
// we want at least 1 more than our requested amount
// so we know that there are other results

View File

@ -4072,6 +4072,7 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
ourLog.info(StringUtils.join(names, '\n'));
assertThat(names).containsExactly("Daniel Adams", "Aaron Alexis", "Carol Allen", "Ruth Black", "Brian Brooks", "Amy Clark", "Susan Clark", "Anthony Coleman", "Lisa Coleman", "Steven Coleman", "Ruth Cook", "Betty Davis", "Joshua Diaz", "Brian Gracia", "Sarah Graham", "Stephan Graham");
}
/**

View File

@ -64,44 +64,44 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
new SqlGenerationTestCase(
"single string - no hfj_resource root",
"Patient?name=FOO",
"SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) fetch first ? rows only",
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.PARTITION_ID = ?) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))) fetch first ? rows only"
"SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))",
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.PARTITION_ID = ?) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)))"
)
, new SqlGenerationTestCase(
"two regular params - should use hfj_resource as root",
"Patient?name=smith&active=true",
"SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_VALUE = ?)) fetch first ? rows only",
"SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON ((t1.PARTITION_ID = t0.PARTITION_ID) AND (t1.RES_ID = t0.RES_ID)) INNER JOIN HFJ_SPIDX_TOKEN t2 ON ((t1.PARTITION_ID = t2.PARTITION_ID) AND (t1.RES_ID = t2.RES_ID)) WHERE (((t0.PARTITION_ID = ?) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))) AND ((t2.PARTITION_ID = ?) AND (t2.HASH_VALUE = ?))) fetch first ? rows only"
"SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_VALUE = ?))",
"SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON ((t1.PARTITION_ID = t0.PARTITION_ID) AND (t1.RES_ID = t0.RES_ID)) INNER JOIN HFJ_SPIDX_TOKEN t2 ON ((t1.PARTITION_ID = t2.PARTITION_ID) AND (t1.RES_ID = t2.RES_ID)) WHERE (((t0.PARTITION_ID = ?) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))) AND ((t2.PARTITION_ID = ?) AND (t2.HASH_VALUE = ?)))"
)
, new SqlGenerationTestCase(
"token not as a NOT IN subselect",
"Encounter?class:not=not-there",
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.RES_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )) fetch first ? rows only",
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID = ?) AND ((t0.PARTITION_ID,t0.RES_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))) fetch first ? rows only"
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.RES_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))",
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID = ?) AND ((t0.PARTITION_ID,t0.RES_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )))"
)
, new SqlGenerationTestCase(
"token not on chain join - NOT IN from hfj_res_link target columns",
"Observation?encounter.class:not=not-there",
"SELECT t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )) fetch first ? rows only",
"SELECT t0.PARTITION_ID,t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.PARTITION_ID = ?) AND ((t0.TARGET_RES_PARTITION_ID,t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))) fetch first ? rows only"
"SELECT t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))",
"SELECT t0.PARTITION_ID,t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.PARTITION_ID = ?) AND ((t0.TARGET_RES_PARTITION_ID,t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )))"
)
, new SqlGenerationTestCase(
"bare sort",
"Patient?_sort=name",
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = ?)) WHERE ((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only",
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only"
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = ?)) WHERE ((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST",
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST"
)
, new SqlGenerationTestCase(
"sort with predicate",
"Patient?active=true&_sort=name",
"SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_TOKEN t0 ON (t1.RES_ID = t0.RES_ID) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.RES_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE (t0.HASH_VALUE = ?) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only",
"SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_TOKEN t0 ON ((t1.PARTITION_ID = t0.PARTITION_ID) AND (t1.RES_ID = t0.RES_ID)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.PARTITION_ID = t2.PARTITION_ID) AND (t1.RES_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE ((t0.PARTITION_ID = ?) AND (t0.HASH_VALUE = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only"
"SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_TOKEN t0 ON (t1.RES_ID = t0.RES_ID) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.RES_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE (t0.HASH_VALUE = ?) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST",
"SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_TOKEN t0 ON ((t1.PARTITION_ID = t0.PARTITION_ID) AND (t1.RES_ID = t0.RES_ID)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.PARTITION_ID = t2.PARTITION_ID) AND (t1.RES_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE ((t0.PARTITION_ID = ?) AND (t0.HASH_VALUE = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST"
)
, new SqlGenerationTestCase(
"chained sort",
"Patient?_sort=Practitioner:general-practitioner.name",
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_LINK t1 ON ((t0.RES_ID = t1.SRC_RESOURCE_ID) AND (t1.SRC_PATH = ?)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.TARGET_RESOURCE_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE ((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only",
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_LINK t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.SRC_RESOURCE_ID) AND (t1.SRC_PATH = ?)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.TARGET_RES_PARTITION_ID = t2.PARTITION_ID) AND (t1.TARGET_RESOURCE_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only"
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_LINK t1 ON ((t0.RES_ID = t1.SRC_RESOURCE_ID) AND (t1.SRC_PATH = ?)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.TARGET_RESOURCE_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE ((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST",
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_LINK t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.SRC_RESOURCE_ID) AND (t1.SRC_PATH = ?)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.TARGET_RES_PARTITION_ID = t2.PARTITION_ID) AND (t1.TARGET_RESOURCE_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST"
)
);
}
@ -153,7 +153,7 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
myPatientDao.search(map);
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false);
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) fetch first ? rows only", sql);
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))", sql);
}
@ -162,6 +162,7 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
*/
@Test
public void testTwoRegularSearchParams() {
myCaptureQueriesListener.clear();
SearchParameterMap map = SearchParameterMap.newSynchronous()
.add(Patient.SP_NAME, new StringParam("FOO"))
@ -169,11 +170,14 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
myPatientDao.search(map);
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false);
assertEquals("SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_SYS_AND_VALUE = ?)) fetch first ? rows only", sql);
assertEquals("SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_SYS_AND_VALUE = ?))", sql);
}
@Test
public void testSearchByProfile_VersionedMode() {
// Put a tag in so we can search for it
String code = "http://" + UUID.randomUUID();
Patient p = new Patient();
@ -189,7 +193,7 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
assertEquals(3, myCaptureQueriesListener.countSelectQueries());
// Query 1 - Find resources: Make sure we search for tag type+system+code always
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false);
assertEquals("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_TAG t1 ON (t0.RES_ID = t1.RES_ID) INNER JOIN HFJ_TAG_DEF t2 ON (t1.TAG_ID = t2.TAG_ID) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t2.TAG_TYPE = ?) AND (t2.TAG_SYSTEM = ?) AND (t2.TAG_CODE = ?))) fetch first ? rows only", sql);
assertEquals("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_TAG t1 ON (t0.RES_ID = t1.RES_ID) INNER JOIN HFJ_TAG_DEF t2 ON (t1.TAG_ID = t2.TAG_ID) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t2.TAG_TYPE = ?) AND (t2.TAG_SYSTEM = ?) AND (t2.TAG_CODE = ?)))", sql);
// Query 2 - Load resourece contents
sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(false, false);
assertThat(sql).contains("where rsv1_0.RES_ID in (?)");
@ -198,6 +202,7 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
assertThat(sql).contains("from HFJ_RES_TAG rt1_0 join HFJ_TAG_DEF");
assertThat(toUnqualifiedVersionlessIds(outcome)).containsExactly(id);
}
@Test
@ -224,11 +229,9 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
.add(Constants.PARAM_PROFILE, new UriParam(code));
IBundleProvider outcome = myPatientDao.search(map, mySrd);
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
// Query 1 - Find resources: Just a standard token search in this mode
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false);
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_URI t0 WHERE (t0.HASH_URI = ?) fetch first ? rows only", sql);
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_URI t0 WHERE (t0.HASH_URI = ?)", sql);
// Query 2 - Load resourece contents
sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(false, false);
assertThat(sql).contains("where rsv1_0.RES_ID in (?)");
@ -252,10 +255,11 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
// Verify
if (theIncludeHashIdentity) {
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE ((t0.HASH_IDENTITY = '7001889285610424179') AND (t0.HASH_SYS_AND_VALUE = '-2780914544385068076')) fetch first '10000' rows only", myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false));
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE ((t0.HASH_IDENTITY = '7001889285610424179') AND (t0.HASH_SYS_AND_VALUE = '-2780914544385068076'))", myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false));
} else {
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_SYS_AND_VALUE = '-2780914544385068076') fetch first '10000' rows only", myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false));
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_SYS_AND_VALUE = '-2780914544385068076')", myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false));
}
}
public static class MyPartitionInterceptor {

View File

@ -77,15 +77,6 @@ public interface ISearchBuilder<T extends IResourcePersistentId<?>> {
void setMaxResultsToFetch(Integer theMaxResultsToFetch);
void setShouldDeduplicateInDB(boolean theShouldDeduplicateInDB);
void setRequireTotal(boolean theRequireTotal);
/**
* True if the results should have a 'total' value
*/
boolean requiresTotal();
void loadResourcesByPid(
Collection<T> thePids,
Collection<T> theIncludedPids,