This commit is contained in:
leif stawnyczy 2024-11-27 09:11:00 -05:00
parent c7a6cd9813
commit 99868e1528
4 changed files with 160 additions and 67 deletions

View File

@ -66,6 +66,7 @@ import ca.uhn.fhir.util.UrlUtil;
import com.google.common.annotations.VisibleForTesting;
import jakarta.annotation.Nonnull;
import jakarta.annotation.Nullable;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.springframework.beans.factory.BeanFactory;
@ -394,7 +395,6 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
try {
return direct.get();
} catch (ResourceNotFoundInIndexException theE) {
// some resources were not found in index, so we will inform this and resort to JPA search
ourLog.warn(
@ -402,6 +402,13 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
}
}
// we set a max to fetch from the db for synchronous searches;
// otherwise, we would have to load everything into memory (or force the db to do so);
// So let's set a max value here
// Integer maxToLoad = ObjectUtils.defaultIfNull(loadSynchronousUpTo, myStorageSettings.getInternalSynchronousSearchSize());
// ourLog.debug("Setting a max fetch value of {} for synchronous search", maxToLoad);
// sb.setMaxResultsToFetch(maxToLoad);
ourLog.debug("Search {} is loading in synchronous mode", searchUuid);
return mySynchronousSearchSvc.executeQuery(
theParams, theRequestDetails, searchUuid, sb, loadSynchronousUpTo, theRequestPartitionId);

View File

@ -60,6 +60,7 @@ import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
import ca.uhn.fhir.jpa.search.SearchConstants;
import ca.uhn.fhir.jpa.search.builder.models.ResolvedSearchQueryExecutor;
import ca.uhn.fhir.jpa.search.builder.models.SearchQueryProperties;
import ca.uhn.fhir.jpa.search.builder.sql.GeneratedSql;
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryExecutor;
@ -360,7 +361,9 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
return myFulltextSearchSvc.count(myResourceName, theParams.clone());
}
List<ISearchQueryExecutor> queries = createQuery(theParams.clone(), null, null, null, true, theRequest, null);
SearchQueryProperties properties = new SearchQueryProperties();
List<ISearchQueryExecutor> queries = createQuery(theParams.clone(), properties, theRequest, null);
if (queries.isEmpty()) {
return 0L;
} else {
@ -405,10 +408,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
private List<ISearchQueryExecutor> createQuery(
SearchParameterMap theParams,
SortSpec sort,
Integer theOffset,
Integer theMaximumResults,
boolean theCountOnlyFlag,
SearchQueryProperties theSearchProperties,
RequestDetails theRequest,
SearchRuntimeDetails theSearchRuntimeDetails) {
@ -422,7 +422,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
List<JpaPid> fulltextMatchIds = null;
int resultCount = 0;
if (myParams.isLastN()) {
fulltextMatchIds = executeLastNAgainstIndex(theMaximumResults);
fulltextMatchIds = executeLastNAgainstIndex(theSearchProperties.getMaxResultsRequested());
resultCount = fulltextMatchIds.size();
} else if (myParams.getEverythingMode() != null) {
fulltextMatchIds = queryHibernateSearchForEverythingPids(theRequest);
@ -479,8 +479,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
if (canSkipDatabase) {
ourLog.trace("Query finished after HSearch. Skip db query phase");
if (theMaximumResults != null) {
fulltextExecutor = SearchQueryExecutors.limited(fulltextExecutor, theMaximumResults);
if (theSearchProperties.getMaxResultsRequested() != null) {
fulltextExecutor = SearchQueryExecutors.limited(fulltextExecutor, theSearchProperties.getMaxResultsRequested());
}
queries.add(fulltextExecutor);
} else {
@ -494,12 +494,12 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
// for each list of (SearchBuilder.getMaximumPageSize())
// we create a chunked query and add it to 'queries'
t -> doCreateChunkedQueries(
theParams, t, theOffset, sort, theCountOnlyFlag, theRequest, queries));
theParams, t, theSearchProperties, theRequest, queries));
}
} else {
// do everything in the database.
createChunkedQuery(
theParams, sort, theOffset, theMaximumResults, theCountOnlyFlag, theRequest, null, queries);
theParams, theSearchProperties, theRequest, null, queries);
}
return queries;
@ -594,16 +594,16 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
private void doCreateChunkedQueries(
SearchParameterMap theParams,
List<Long> thePids,
Integer theOffset,
SortSpec sort,
boolean theCount,
SearchQueryProperties theSearchQueryProperties,
RequestDetails theRequest,
ArrayList<ISearchQueryExecutor> theQueries) {
if (thePids.size() < getMaximumPageSize()) {
thePids = normalizeIdListForInClause(thePids);
}
createChunkedQuery(theParams, sort, theOffset, thePids.size(), theCount, theRequest, thePids, theQueries);
// TODO - thesize was the 4th parameter... what is it supposed to be in createchunkedquery?
theSearchQueryProperties.setMaxResultsRequested(theParams.size());
createChunkedQuery(theParams, theSearchQueryProperties, theRequest, thePids, theQueries);
}
/**
@ -653,27 +653,22 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
private void createChunkedQuery(
SearchParameterMap theParams,
SortSpec sort,
Integer theOffset,
Integer theMaximumResults,
boolean theCountOnlyFlag,
SearchQueryProperties theSearchProperties,
RequestDetails theRequest,
List<Long> thePidList,
List<ISearchQueryExecutor> theSearchQueryExecutors) {
if (myParams.getEverythingMode() != null) {
createChunkedQueryForEverythingSearch(
theParams, theOffset, theMaximumResults, theCountOnlyFlag, thePidList, theSearchQueryExecutors);
theParams, theSearchProperties, thePidList, theSearchQueryExecutors);
} else {
createChunkedQueryNormalSearch(
theParams, sort, theOffset, theCountOnlyFlag, theRequest, thePidList, theSearchQueryExecutors);
theParams, theSearchProperties, theRequest, thePidList, theSearchQueryExecutors);
}
}
private void createChunkedQueryNormalSearch(
SearchParameterMap theParams,
SortSpec sort,
Integer theOffset,
boolean theCountOnlyFlag,
SearchQueryProperties theSearchProperites,
RequestDetails theRequest,
List<Long> thePidList,
List<ISearchQueryExecutor> theSearchQueryExecutors) {
@ -685,7 +680,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
myResourceName,
mySqlBuilderFactory,
myDialectProvider,
theCountOnlyFlag);
theSearchProperites.isDoCountOnlyFlag()
);
QueryStack queryStack3 = new QueryStack(
theParams, myStorageSettings, myContext, sqlBuilder, mySearchParamRegistry, myPartitionSettings);
@ -762,7 +758,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
* if the MaxResultsToFetch is null, we are requesting "everything",
* so we'll let the db do the deduplication (instead of in-memory)
*/
if (theOffset != null || (myMaxResultsToFetch == null && !theCountOnlyFlag)) {
if (theSearchProperites.isDeduplicateInDBFlag()) {
queryStack3.addGrouping();
queryStack3.setUseAggregate(true);
}
@ -773,16 +769,16 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
* If we have a sort, we wrap the criteria search (the search that actually
* finds the appropriate resources) in an outer search which is then sorted
*/
if (sort != null) {
assert !theCountOnlyFlag;
if (theSearchProperites.hasSort()) {
assert !theSearchProperites.isDoCountOnlyFlag();
createSort(queryStack3, sort, theParams);
createSort(queryStack3, theSearchProperites.getSortSpec(), theParams);
}
/*
* Now perform the search
*/
executeSearch(theOffset, theSearchQueryExecutors, sqlBuilder);
executeSearch(theSearchProperites.getOffset(), theSearchQueryExecutors, sqlBuilder);
}
private void executeSearch(
@ -797,9 +793,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
private void createChunkedQueryForEverythingSearch(
SearchParameterMap theParams,
Integer theOffset,
Integer theMaximumResults,
boolean theCountOnlyFlag,
SearchQueryProperties theSearchQueryProperties,
List<Long> thePidList,
List<ISearchQueryExecutor> theSearchQueryExecutors) {
@ -811,12 +805,12 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
null,
mySqlBuilderFactory,
myDialectProvider,
theCountOnlyFlag);
theSearchQueryProperties.isDoCountOnlyFlag());
QueryStack queryStack3 = new QueryStack(
theParams, myStorageSettings, myContext, sqlBuilder, mySearchParamRegistry, myPartitionSettings);
JdbcTemplate jdbcTemplate = initializeJdbcTemplate(theMaximumResults);
JdbcTemplate jdbcTemplate = initializeJdbcTemplate(theSearchQueryProperties.getMaxResultsRequested());
Set<Long> targetPids = new HashSet<>();
if (myParams.get(IAnyResource.SP_RES_ID) != null) {
@ -839,8 +833,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
myResourceName,
mySqlBuilderFactory,
myDialectProvider,
theCountOnlyFlag);
GeneratedSql allTargetsSql = fetchPidsSqlBuilder.generate(theOffset, myMaxResultsToFetch);
theSearchQueryProperties.isDoCountOnlyFlag());
GeneratedSql allTargetsSql = fetchPidsSqlBuilder.generate(theSearchQueryProperties.getOffset(), myMaxResultsToFetch);
String sql = allTargetsSql.getSql();
Object[] args = allTargetsSql.getBindVariables().toArray(new Object[0]);
@ -874,7 +868,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
* If offset is present, we want deduplicate the results by using GROUP BY
* ORDER BY is required to make sure we return unique results for each page
*/
if (theOffset != null) {
if (theSearchQueryProperties.hasOffset()) {
queryStack3.addGrouping();
queryStack3.addOrdering();
queryStack3.setUseAggregate(true);
@ -883,7 +877,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
/*
* Now perform the search
*/
executeSearch(theOffset, theSearchQueryExecutors, sqlBuilder);
executeSearch(theSearchQueryProperties.getOffset(), theSearchQueryExecutors, sqlBuilder);
}
private void addPidListPredicate(List<Long> thePidList, SearchQueryBuilder theSqlBuilder) {
@ -2586,8 +2580,13 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
if (myParams.getEverythingMode() != null) {
offset = 0;
}
myQueryList = createQuery(
myParams, mySort, offset, theMaxResultsToFetch, false, myRequest, mySearchRuntimeDetails);
SearchQueryProperties properties = new SearchQueryProperties();
properties.setOffset(offset)
.setMaxResultsRequested(theMaxResultsToFetch)
.setDoCountOnlyFlag(false)
.setSortSpec(mySort);
myQueryList = createQuery(myParams, properties, myRequest, mySearchRuntimeDetails);
}
mySearchRuntimeDetails.setQueryStopwatch(new StopWatch());

View File

@ -0,0 +1,91 @@
package ca.uhn.fhir.jpa.search.builder.models;
import ca.uhn.fhir.rest.api.SortSpec;
public class SearchQueryProperties {
/**
* True if this query is only to fetch the count (and not any results).
*
* True means this is a count only query
*/
private boolean myDoCountOnlyFlag;
/**
* Whether or not we do deduplication of results in memory
* (using a hashset, etc), or push this to the database
* (using GROUP BY, etc).
*
* True means use the database
*/
private boolean myDeduplicateInDBFlag;
/**
* The maximum number of results to fetch (when we want it limited).
* Can be null if we are fetching everything or paging.
*/
private Integer myMaxResultsRequested;
/**
* The offset for the results to fetch.
*
* null if the first page, some number if it's a later page
*/
private Integer myOffset;
/**
* The sort spec for this search
*/
private SortSpec mySortSpec;
public boolean isDoCountOnlyFlag() {
return myDoCountOnlyFlag;
}
public SearchQueryProperties setDoCountOnlyFlag(boolean theDoCountOnlyFlag) {
myDoCountOnlyFlag = theDoCountOnlyFlag;
return this;
}
public boolean isDeduplicateInDBFlag() {
return myDeduplicateInDBFlag;
}
public SearchQueryProperties setDeduplicateInDBFlag(boolean theDeduplicateInDBFlag) {
myDeduplicateInDBFlag = theDeduplicateInDBFlag;
return this;
}
public Integer getMaxResultsRequested() {
return myMaxResultsRequested;
}
public SearchQueryProperties setMaxResultsRequested(Integer theMaxResultsRequested) {
myMaxResultsRequested = theMaxResultsRequested;
return this;
}
public Integer getOffset() {
return myOffset;
}
public boolean hasOffset() {
return myOffset != null;
}
public SearchQueryProperties setOffset(Integer theOffset) {
myOffset = theOffset;
return this;
}
public SortSpec getSortSpec() {
return mySortSpec;
}
public boolean hasSort() {
return mySortSpec != null;
}
public SearchQueryProperties setSortSpec(SortSpec theSortSpec) {
mySortSpec = theSortSpec;
return this;
}
}

View File

@ -64,44 +64,44 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
new SqlGenerationTestCase(
"single string - no hfj_resource root",
"Patient?name=FOO",
"SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))",
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.PARTITION_ID = ?) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)))"
"SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) fetch first ? rows only",
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.PARTITION_ID = ?) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))) fetch first ? rows only"
)
, new SqlGenerationTestCase(
"two regular params - should use hfj_resource as root",
"Patient?name=smith&active=true",
"SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_VALUE = ?))",
"SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON ((t1.PARTITION_ID = t0.PARTITION_ID) AND (t1.RES_ID = t0.RES_ID)) INNER JOIN HFJ_SPIDX_TOKEN t2 ON ((t1.PARTITION_ID = t2.PARTITION_ID) AND (t1.RES_ID = t2.RES_ID)) WHERE (((t0.PARTITION_ID = ?) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))) AND ((t2.PARTITION_ID = ?) AND (t2.HASH_VALUE = ?)))"
"SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_VALUE = ?)) fetch first ? rows only",
"SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON ((t1.PARTITION_ID = t0.PARTITION_ID) AND (t1.RES_ID = t0.RES_ID)) INNER JOIN HFJ_SPIDX_TOKEN t2 ON ((t1.PARTITION_ID = t2.PARTITION_ID) AND (t1.RES_ID = t2.RES_ID)) WHERE (((t0.PARTITION_ID = ?) AND ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))) AND ((t2.PARTITION_ID = ?) AND (t2.HASH_VALUE = ?))) fetch first ? rows only"
)
, new SqlGenerationTestCase(
"token not as a NOT IN subselect",
"Encounter?class:not=not-there",
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.RES_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))",
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID = ?) AND ((t0.PARTITION_ID,t0.RES_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )))"
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.RES_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )) fetch first ? rows only",
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t0.PARTITION_ID = ?) AND ((t0.PARTITION_ID,t0.RES_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))) fetch first ? rows only"
)
, new SqlGenerationTestCase(
"token not on chain join - NOT IN from hfj_res_link target columns",
"Observation?encounter.class:not=not-there",
"SELECT t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))",
"SELECT t0.PARTITION_ID,t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.PARTITION_ID = ?) AND ((t0.TARGET_RES_PARTITION_ID,t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )))"
"SELECT t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) )) fetch first ? rows only",
"SELECT t0.PARTITION_ID,t0.SRC_RESOURCE_ID FROM HFJ_RES_LINK t0 WHERE ((t0.SRC_PATH = ?) AND ((t0.PARTITION_ID = ?) AND ((t0.TARGET_RES_PARTITION_ID,t0.TARGET_RESOURCE_ID) NOT IN (SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_VALUE = ?)) ))) fetch first ? rows only"
)
, new SqlGenerationTestCase(
"bare sort",
"Patient?_sort=name",
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = ?)) WHERE ((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST",
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST"
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = ?)) WHERE ((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only",
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_SPIDX_STRING t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.RES_ID) AND (t1.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t1.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only"
)
, new SqlGenerationTestCase(
"sort with predicate",
"Patient?active=true&_sort=name",
"SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_TOKEN t0 ON (t1.RES_ID = t0.RES_ID) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.RES_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE (t0.HASH_VALUE = ?) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST",
"SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_TOKEN t0 ON ((t1.PARTITION_ID = t0.PARTITION_ID) AND (t1.RES_ID = t0.RES_ID)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.PARTITION_ID = t2.PARTITION_ID) AND (t1.RES_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE ((t0.PARTITION_ID = ?) AND (t0.HASH_VALUE = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST"
"SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_TOKEN t0 ON (t1.RES_ID = t0.RES_ID) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.RES_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE (t0.HASH_VALUE = ?) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only",
"SELECT t1.PARTITION_ID,t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_TOKEN t0 ON ((t1.PARTITION_ID = t0.PARTITION_ID) AND (t1.RES_ID = t0.RES_ID)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.PARTITION_ID = t2.PARTITION_ID) AND (t1.RES_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE ((t0.PARTITION_ID = ?) AND (t0.HASH_VALUE = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only"
)
, new SqlGenerationTestCase(
"chained sort",
"Patient?_sort=Practitioner:general-practitioner.name",
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_LINK t1 ON ((t0.RES_ID = t1.SRC_RESOURCE_ID) AND (t1.SRC_PATH = ?)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.TARGET_RESOURCE_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE ((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST",
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_LINK t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.SRC_RESOURCE_ID) AND (t1.SRC_PATH = ?)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.TARGET_RES_PARTITION_ID = t2.PARTITION_ID) AND (t1.TARGET_RESOURCE_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST"
"SELECT t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_LINK t1 ON ((t0.RES_ID = t1.SRC_RESOURCE_ID) AND (t1.SRC_PATH = ?)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.TARGET_RESOURCE_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE ((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only",
"SELECT t0.PARTITION_ID,t0.RES_ID FROM HFJ_RESOURCE t0 LEFT OUTER JOIN HFJ_RES_LINK t1 ON ((t0.PARTITION_ID = t1.PARTITION_ID) AND (t0.RES_ID = t1.SRC_RESOURCE_ID) AND (t1.SRC_PATH = ?)) LEFT OUTER JOIN HFJ_SPIDX_STRING t2 ON ((t1.TARGET_RES_PARTITION_ID = t2.PARTITION_ID) AND (t1.TARGET_RESOURCE_ID = t2.RES_ID) AND (t2.HASH_IDENTITY = ?)) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND (t0.PARTITION_ID = ?)) ORDER BY t2.SP_VALUE_NORMALIZED ASC NULLS LAST fetch first ? rows only"
)
);
}
@ -153,7 +153,7 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
myPatientDao.search(map);
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false);
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))", sql);
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) fetch first ? rows only", sql);
}
@ -162,7 +162,6 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
*/
@Test
public void testTwoRegularSearchParams() {
myCaptureQueriesListener.clear();
SearchParameterMap map = SearchParameterMap.newSynchronous()
.add(Patient.SP_NAME, new StringParam("FOO"))
@ -170,14 +169,11 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
myPatientDao.search(map);
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false);
assertEquals("SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_SYS_AND_VALUE = ?))", sql);
assertEquals("SELECT t1.RES_ID FROM HFJ_RESOURCE t1 INNER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) INNER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_SYS_AND_VALUE = ?)) fetch first ? rows only", sql);
}
@Test
public void testSearchByProfile_VersionedMode() {
// Put a tag in so we can search for it
String code = "http://" + UUID.randomUUID();
Patient p = new Patient();
@ -193,7 +189,7 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
assertEquals(3, myCaptureQueriesListener.countSelectQueries());
// Query 1 - Find resources: Make sure we search for tag type+system+code always
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false);
assertEquals("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_TAG t1 ON (t0.RES_ID = t1.RES_ID) INNER JOIN HFJ_TAG_DEF t2 ON (t1.TAG_ID = t2.TAG_ID) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t2.TAG_TYPE = ?) AND (t2.TAG_SYSTEM = ?) AND (t2.TAG_CODE = ?)))", sql);
assertEquals("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_TAG t1 ON (t0.RES_ID = t1.RES_ID) INNER JOIN HFJ_TAG_DEF t2 ON (t1.TAG_ID = t2.TAG_ID) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t2.TAG_TYPE = ?) AND (t2.TAG_SYSTEM = ?) AND (t2.TAG_CODE = ?))) fetch first ? rows only", sql);
// Query 2 - Load resourece contents
sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(false, false);
assertThat(sql).contains("where rsv1_0.RES_ID in (?)");
@ -202,7 +198,6 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
assertThat(sql).contains("from HFJ_RES_TAG rt1_0 join HFJ_TAG_DEF");
assertThat(toUnqualifiedVersionlessIds(outcome)).containsExactly(id);
}
@Test
@ -229,9 +224,11 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
.add(Constants.PARAM_PROFILE, new UriParam(code));
IBundleProvider outcome = myPatientDao.search(map, mySrd);
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
// Query 1 - Find resources: Just a standard token search in this mode
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false);
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_URI t0 WHERE (t0.HASH_URI = ?)", sql);
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_URI t0 WHERE (t0.HASH_URI = ?) fetch first ? rows only", sql);
// Query 2 - Load resourece contents
sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(false, false);
assertThat(sql).contains("where rsv1_0.RES_ID in (?)");
@ -255,11 +252,10 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
// Verify
if (theIncludeHashIdentity) {
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE ((t0.HASH_IDENTITY = '7001889285610424179') AND (t0.HASH_SYS_AND_VALUE = '-2780914544385068076'))", myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false));
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE ((t0.HASH_IDENTITY = '7001889285610424179') AND (t0.HASH_SYS_AND_VALUE = '-2780914544385068076')) fetch first '10000' rows only", myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false));
} else {
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_SYS_AND_VALUE = '-2780914544385068076')", myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false));
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_TOKEN t0 WHERE (t0.HASH_SYS_AND_VALUE = '-2780914544385068076') fetch first '10000' rows only", myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false));
}
}
public static class MyPartitionInterceptor {