5198 fixing searching everything op (#5201)

* fixing searches

* fixing paging in master

* cleanup

* Fixing the tests

* fixing tests

* fixing test

* cleanup

* fixing

* removing unneeded class

* adding a changelog and cleaning some final details

* code styles

* cleaning up imports

* review notes

* formatting

---------

Co-authored-by: leif stawnyczy <leifstawnyczy@leifs-MacBook-Pro.local>
This commit is contained in:
TipzCM 2023-08-17 11:42:06 -04:00 committed by GitHub
parent a10856e091
commit fdfda37f73
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 471 additions and 201 deletions

View File

@ -0,0 +1,7 @@
---
type: fix
issue: 5198
title: "Resolved an issue with type-everything search operations (eg, /Patient/$everything),
where not all page results were being returned if _count was specified to be
the same value as the maximum page size to fetch.
"

View File

@ -55,6 +55,7 @@ import ca.uhn.fhir.jpa.model.search.SearchBuilderLoadIncludesParameters;
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
import ca.uhn.fhir.jpa.search.SearchConstants;
import ca.uhn.fhir.jpa.search.builder.models.ResolvedSearchQueryExecutor;
import ca.uhn.fhir.jpa.search.builder.sql.GeneratedSql;
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryExecutor;
@ -116,7 +117,6 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
@ -178,7 +178,6 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
protected EntityManager myEntityManager;
private List<JpaPid> myAlsoIncludePids;
private CriteriaBuilder myCriteriaBuilder;
private SearchParameterMap myParams;
private String mySearchUuid;
@ -449,9 +448,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
}
} else {
// do everything in the database.
Optional<SearchQueryExecutor> query = createChunkedQuery(
theParams, sort, theOffset, theMaximumResults, theCountOnlyFlag, theRequest, null);
query.ifPresent(queries::add);
createChunkedQuery(
theParams, sort, theOffset, theMaximumResults, theCountOnlyFlag, theRequest, null, queries);
}
return queries;
@ -541,9 +539,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
if (thePids.size() < getMaximumPageSize()) {
normalizeIdListForLastNInClause(thePids);
}
Optional<SearchQueryExecutor> query =
createChunkedQuery(theParams, sort, theOffset, thePids.size(), theCount, theRequest, thePids);
query.ifPresent(t -> theQueries.add(t));
createChunkedQuery(theParams, sort, theOffset, thePids.size(), theCount, theRequest, thePids, theQueries);
}
/**
@ -551,7 +547,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
*
* @param theTargetPids
*/
private void extractTargetPidsFromIdParams(HashSet<Long> theTargetPids) {
private void extractTargetPidsFromIdParams(
HashSet<Long> theTargetPids, List<ISearchQueryExecutor> theSearchQueryExecutors) {
// get all the IQueryParameterType objects
// for _id -> these should all be StringParam values
HashSet<String> ids = new HashSet<>();
@ -575,25 +572,26 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
// this will throw if an id is not found
Map<String, JpaPid> idToPid = myIdHelperService.resolveResourcePersistentIds(
myRequestPartitionId, myResourceName, new ArrayList<>(ids));
if (myAlsoIncludePids == null) {
myAlsoIncludePids = new ArrayList<>();
}
// add the pids to targetPids
for (JpaPid pid : idToPid.values()) {
myAlsoIncludePids.add(pid);
theTargetPids.add(pid.getId());
}
// add the target pids to our executors as the first
// results iterator to go through
theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(new ArrayList<>(theTargetPids)));
}
private Optional<SearchQueryExecutor> createChunkedQuery(
private void createChunkedQuery(
SearchParameterMap theParams,
SortSpec sort,
Integer theOffset,
Integer theMaximumResults,
boolean theCountOnlyFlag,
RequestDetails theRequest,
List<Long> thePidList) {
List<Long> thePidList,
List<ISearchQueryExecutor> theSearchQueryExecutors) {
String sqlBuilderResourceName = myParams.getEverythingMode() == null ? myResourceName : null;
SearchQueryBuilder sqlBuilder = new SearchQueryBuilder(
myContext,
@ -627,7 +625,9 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
if (myParams.getEverythingMode() != null) {
HashSet<Long> targetPids = new HashSet<>();
if (myParams.get(IAnyResource.SP_RES_ID) != null) {
extractTargetPidsFromIdParams(targetPids);
// will add an initial search executor for
// _id params
extractTargetPidsFromIdParams(targetPids, theSearchQueryExecutors);
} else {
// For Everything queries, we make the query root by the ResourceLink table, since this query
// is basically a reverse-include search. For type/Everything (as opposed to instance/Everything)
@ -645,11 +645,11 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
GeneratedSql allTargetsSql = fetchPidsSqlBuilder.generate(theOffset, myMaxResultsToFetch);
String sql = allTargetsSql.getSql();
Object[] args = allTargetsSql.getBindVariables().toArray(new Object[0]);
List<Long> output = jdbcTemplate.query(sql, args, new SingleColumnRowMapper<>(Long.class));
if (myAlsoIncludePids == null) {
myAlsoIncludePids = new ArrayList<>(output.size());
}
myAlsoIncludePids.addAll(JpaPid.fromLongList(output));
// we add a search executor to fetch unlinked patients first
theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(output));
}
List<String> typeSourceResources = new ArrayList<>();
@ -747,12 +747,11 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
* Now perform the search
*/
GeneratedSql generatedSql = sqlBuilder.generate(theOffset, myMaxResultsToFetch);
if (generatedSql.isMatchNothing()) {
return Optional.empty();
if (!generatedSql.isMatchNothing()) {
SearchQueryExecutor executor =
mySqlBuilderFactory.newSearchQueryExecutor(generatedSql, myMaxResultsToFetch);
theSearchQueryExecutors.add(executor);
}
SearchQueryExecutor executor = mySqlBuilderFactory.newSearchQueryExecutor(generatedSql, myMaxResultsToFetch);
return Optional.of(executor);
}
private Collection<String> extractTypeSourceResourcesFromParams() {
@ -1925,11 +1924,35 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
private final Integer myOffset;
private boolean myFirst = true;
private IncludesIterator myIncludesIterator;
/**
* The next JpaPid value of the next result in this query.
* Will not be null if fetched using getNext()
*/
private JpaPid myNext;
/**
* The current query result iterator running sql and supplying PIDs
* @see #myQueryList
*/
private ISearchQueryExecutor myResultsIterator;
private boolean myFetchIncludesForEverythingOperation;
/**
* The count of resources skipped because they were seen in earlier results
*/
private int mySkipCount = 0;
/**
* The count of resources that are new in this search
* (ie, not cached in previous searches)
*/
private int myNonSkipCount = 0;
/**
* The list of queries to use to find all results.
* Normal JPA queries will normally have a single entry.
* Queries that involve Hibernate Search/Elastisearch may have
* multiple queries because of chunking.
* The $everything operation also jams some extra results in.
*/
private List<ISearchQueryExecutor> myQueryList = new ArrayList<>();
private QueryIterator(SearchRuntimeDetails theSearchRuntimeDetails, RequestDetails theRequest) {
@ -1967,109 +1990,87 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
}
}
// assigns the results iterator
/*
* assigns the results iterator
* and populates the myQueryList.
*/
initializeIteratorQuery(myOffset, myMaxResultsToFetch);
}
if (myAlsoIncludePids == null) {
myAlsoIncludePids = new ArrayList<>();
if (myNext == null) {
// no next means we need a new query (if one is available)
while (myResultsIterator.hasNext() || !myQueryList.isEmpty()) {
// Update iterator with next chunk if necessary.
if (!myResultsIterator.hasNext()) {
retrieveNextIteratorQuery();
// if our new results iterator is also empty
// we're done here
if (!myResultsIterator.hasNext()) {
break;
}
}
Long nextLong = myResultsIterator.next();
if (myHavePerfTraceFoundIdHook) {
HookParams params = new HookParams()
.add(Integer.class, System.identityHashCode(this))
.add(Object.class, nextLong);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster,
myRequest,
Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID,
params);
}
if (nextLong != null) {
JpaPid next = JpaPid.fromId(nextLong);
if (myPidSet.add(next)) {
myNext = next;
myNonSkipCount++;
break;
} else {
mySkipCount++;
}
}
if (!myResultsIterator.hasNext()) {
if (myMaxResultsToFetch != null && (mySkipCount + myNonSkipCount == myMaxResultsToFetch)) {
if (mySkipCount > 0 && myNonSkipCount == 0) {
sendProcessingMsgAndFirePerformanceHook();
myMaxResultsToFetch += 1000;
initializeIteratorQuery(myOffset, myMaxResultsToFetch);
}
}
}
}
}
if (myNext == null) {
for (Iterator<JpaPid> myPreResultsIterator = myAlsoIncludePids.iterator();
myPreResultsIterator.hasNext(); ) {
JpaPid next = myPreResultsIterator.next();
if (next != null)
if (myPidSet.add(next)) {
myNext = next;
break;
}
// if we got here, it means the current PjaPid has already been processed
// and we will decide (here) if we need to fetch related resources recursively
if (myFetchIncludesForEverythingOperation) {
myIncludesIterator = new IncludesIterator(myPidSet, myRequest);
myFetchIncludesForEverythingOperation = false;
}
if (myNext == null) {
while (myResultsIterator.hasNext() || !myQueryList.isEmpty()) {
// Update iterator with next chunk if necessary.
if (!myResultsIterator.hasNext()) {
retrieveNextIteratorQuery();
}
Long nextLong = myResultsIterator.next();
if (myHavePerfTraceFoundIdHook) {
HookParams params = new HookParams()
.add(Integer.class, System.identityHashCode(this))
.add(Object.class, nextLong);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster,
myRequest,
Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID,
params);
}
if (nextLong != null) {
JpaPid next = JpaPid.fromId(nextLong);
if (myIncludesIterator != null) {
while (myIncludesIterator.hasNext()) {
JpaPid next = myIncludesIterator.next();
if (next != null)
if (myPidSet.add(next)) {
myNext = next;
myNonSkipCount++;
break;
} else {
mySkipCount++;
}
}
if (!myResultsIterator.hasNext()) {
if (myMaxResultsToFetch != null
&& (mySkipCount + myNonSkipCount == myMaxResultsToFetch)) {
if (mySkipCount > 0 && myNonSkipCount == 0) {
StorageProcessingMessage message = new StorageProcessingMessage();
String msg = "Pass completed with no matching results seeking rows "
+ myPidSet.size() + "-" + mySkipCount
+ ". This indicates an inefficient query! Retrying with new max count of "
+ myMaxResultsToFetch;
ourLog.warn(msg);
message.setMessage(msg);
HookParams params = new HookParams()
.add(RequestDetails.class, myRequest)
.addIfMatchesType(ServletRequestDetails.class, myRequest)
.add(StorageProcessingMessage.class, message);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster,
myRequest,
Pointcut.JPA_PERFTRACE_WARNING,
params);
myMaxResultsToFetch += 1000;
initializeIteratorQuery(myOffset, myMaxResultsToFetch);
}
}
}
}
}
if (myNext == null) {
// if we got here, it means the current PjaPid has already been processed
// and we will decide (here) if we need to fetch related resources recursively
if (myFetchIncludesForEverythingOperation) {
myIncludesIterator = new IncludesIterator(myPidSet, myRequest);
myFetchIncludesForEverythingOperation = false;
}
if (myIncludesIterator != null) {
while (myIncludesIterator.hasNext()) {
JpaPid next = myIncludesIterator.next();
if (next != null)
if (myPidSet.add(next)) {
myNext = next;
break;
}
}
if (myNext == null) {
myNext = NO_MORE;
}
} else {
if (myNext == null) {
myNext = NO_MORE;
}
} else {
myNext = NO_MORE;
}
} // if we need to fetch the next result
}
mySearchRuntimeDetails.setFoundMatchesCount(myPidSet.size());
@ -2100,6 +2101,22 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
}
}
private void sendProcessingMsgAndFirePerformanceHook() {
StorageProcessingMessage message = new StorageProcessingMessage();
String msg = "Pass completed with no matching results seeking rows "
+ myPidSet.size() + "-" + mySkipCount
+ ". This indicates an inefficient query! Retrying with new max count of "
+ myMaxResultsToFetch;
ourLog.warn(msg);
message.setMessage(msg);
HookParams params = new HookParams()
.add(RequestDetails.class, myRequest)
.addIfMatchesType(ServletRequestDetails.class, myRequest)
.add(StorageProcessingMessage.class, message);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_WARNING, params);
}
private void initializeIteratorQuery(Integer theOffset, Integer theMaxResultsToFetch) {
if (myQueryList.isEmpty()) {
// Capture times for Lucene/Elasticsearch queries as well

View File

@ -20,6 +20,7 @@
package ca.uhn.fhir.jpa.search.builder;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.search.builder.models.ResolvedSearchQueryExecutor;
import org.apache.commons.lang3.Validate;
import java.util.Iterator;
@ -57,41 +58,6 @@ public class SearchQueryExecutors {
return new ResolvedSearchQueryExecutor(rawPids);
}
/**
* Adapt bare Iterator to our internal query interface.
*/
static class ResolvedSearchQueryExecutor implements ISearchQueryExecutor {
private final Iterator<Long> myIterator;
ResolvedSearchQueryExecutor(Iterable<Long> theIterable) {
this(theIterable.iterator());
}
ResolvedSearchQueryExecutor(Iterator<Long> theIterator) {
myIterator = theIterator;
}
@Nonnull
public static ResolvedSearchQueryExecutor from(List<Long> rawPids) {
return new ResolvedSearchQueryExecutor(rawPids);
}
@Override
public boolean hasNext() {
return myIterator.hasNext();
}
@Override
public Long next() {
return myIterator.next();
}
@Override
public void close() {
// empty
}
}
public static ISearchQueryExecutor from(Iterator<JpaPid> theIterator) {
return new JpaPidQueryAdaptor(theIterator);
}

View File

@ -0,0 +1,39 @@
package ca.uhn.fhir.jpa.search.builder.models;
import ca.uhn.fhir.jpa.search.builder.ISearchQueryExecutor;
import java.util.Iterator;
import java.util.List;
import javax.annotation.Nonnull;
public class ResolvedSearchQueryExecutor implements ISearchQueryExecutor {
private final Iterator<Long> myIterator;
public ResolvedSearchQueryExecutor(Iterable<Long> theIterable) {
this(theIterable.iterator());
}
public ResolvedSearchQueryExecutor(Iterator<Long> theIterator) {
myIterator = theIterator;
}
@Nonnull
public static ResolvedSearchQueryExecutor from(List<Long> rawPids) {
return new ResolvedSearchQueryExecutor(rawPids);
}
@Override
public boolean hasNext() {
return myIterator.hasNext();
}
@Override
public Long next() {
return myIterator.next();
}
@Override
public void close() {
// empty
}
}

View File

@ -122,9 +122,11 @@ public class SearchTask implements Callable<Void> {
private boolean myAdditionalPrefetchThresholdsRemaining;
private List<JpaPid> myPreviouslyAddedResourcePids;
private Integer myMaxResultsToFetch;
/**
* Constructor
*/
@SuppressWarnings({"unchecked", "rawtypes"})
public SearchTask(
SearchTaskParameters theCreationParams,
HapiTransactionService theManagedTxManager,
@ -198,6 +200,7 @@ public class SearchTask implements Callable<Void> {
myCountSavedTotal = myPreviouslyAddedResourcePids.size();
}
@SuppressWarnings("rawtypes")
private ISearchBuilder newSearchBuilder() {
Class<? extends IBaseResource> resourceTypeClass =
myContext.getResourceDefinition(myResourceType).getImplementingClass();
@ -281,6 +284,7 @@ public class SearchTask implements Callable<Void> {
.execute(() -> doSaveSearch());
}
@SuppressWarnings("rawtypes")
private void saveUnsynced(final IResultIterator theResultIter) {
myTxService
.withRequest(myRequest)
@ -296,7 +300,7 @@ public class SearchTask implements Callable<Void> {
// Interceptor call: STORAGE_PREACCESS_RESOURCES
// This can be used to remove results from the search result details before
// the user has a chance to know that they were in the results
if (mySearchRuntimeDetails.getRequestDetails() != null && unsyncedPids.isEmpty() == false) {
if (mySearchRuntimeDetails.getRequestDetails() != null && !unsyncedPids.isEmpty()) {
JpaPreResourceAccessDetails accessDetails =
new JpaPreResourceAccessDetails(unsyncedPids, () -> newSearchBuilder());
HookParams params = new HookParams()
@ -332,10 +336,8 @@ public class SearchTask implements Callable<Void> {
mySyncedPids.addAll(unsyncedPids);
unsyncedPids.clear();
if (theResultIter.hasNext() == false) {
if (!theResultIter.hasNext()) {
int skippedCount = theResultIter.getSkippedCount();
int nonSkippedCount = theResultIter.getNonSkippedCount();
int totalFetched = skippedCount + myCountSavedThisPass + myCountBlockedThisPass;
ourLog.trace(
"MaxToFetch[{}] SkippedCount[{}] CountSavedThisPass[{}] CountSavedThisTotal[{}] AdditionalPrefetchRemaining[{}]",
myMaxResultsToFetch,
@ -344,16 +346,18 @@ public class SearchTask implements Callable<Void> {
myCountSavedTotal,
myAdditionalPrefetchThresholdsRemaining);
if (nonSkippedCount == 0
|| (myMaxResultsToFetch != null && totalFetched < myMaxResultsToFetch)) {
if (isFinished(theResultIter)) {
// finished
ourLog.trace("Setting search status to FINISHED");
mySearch.setStatus(SearchStatusEnum.FINISHED);
mySearch.setTotalCount(myCountSavedTotal - countBlocked);
} else if (myAdditionalPrefetchThresholdsRemaining) {
// pass complete
ourLog.trace("Setting search status to PASSCMPLET");
mySearch.setStatus(SearchStatusEnum.PASSCMPLET);
mySearch.setSearchParameterMap(myParams);
} else {
// also finished
ourLog.trace("Setting search status to FINISHED");
mySearch.setStatus(SearchStatusEnum.FINISHED);
mySearch.setTotalCount(myCountSavedTotal - countBlocked);
@ -382,8 +386,34 @@ public class SearchTask implements Callable<Void> {
ourLog.trace("saveUnsynced() - post-commit");
}
@SuppressWarnings("rawtypes")
private boolean isFinished(final IResultIterator theResultIter) {
int skippedCount = theResultIter.getSkippedCount();
int nonSkippedCount = theResultIter.getNonSkippedCount();
int totalFetched = skippedCount + myCountSavedThisPass + myCountBlockedThisPass;
if (myMaxResultsToFetch != null && totalFetched < myMaxResultsToFetch) {
// total fetched < max results to fetch -> we've exhausted the search
return true;
} else {
if (nonSkippedCount == 0) {
// no skipped resources in this query
if (myParams.getCount() != null) {
// count supplied
// if the count is > what we've fetched -> we've exhausted the query
return myParams.getCount() > totalFetched;
} else {
// legacy - we have no skipped resources - we are done
return true;
}
}
// skipped resources means we have more to fetch
return false;
}
}
public boolean isNotAborted() {
return myAbortRequested == false;
return !myAbortRequested;
}
public void markComplete() {
@ -517,6 +547,7 @@ public class SearchTask implements Callable<Void> {
* This method actually creates the database query to perform the
* search, and starts it.
*/
@SuppressWarnings({"rawtypes", "unchecked"})
private void doSearch() {
/*
* If the user has explicitly requested a _count, perform a
@ -531,32 +562,7 @@ public class SearchTask implements Callable<Void> {
: SearchParameterMapCalculator.isWantCount(myStorageSettings.getDefaultTotalMode());
if (myParamWantOnlyCount || myParamOrDefaultWantCount) {
ourLog.trace("Performing count");
ISearchBuilder sb = newSearchBuilder();
/*
* createCountQuery
* NB: (see createQuery below)
* Because FulltextSearchSvcImpl will (internally)
* mutate the myParams (searchmap),
* (specifically removing the _content and _text filters)
* we will have to clone those parameters here so that
* the "correct" params are used in createQuery below
*/
Long count = sb.createCountQuery(myParams.clone(), mySearch.getUuid(), myRequest, myRequestPartitionId);
ourLog.trace("Got count {}", count);
myTxService
.withRequest(myRequest)
.withRequestPartitionId(myRequestPartitionId)
.execute(() -> {
mySearch.setTotalCount(count.intValue());
if (myParamWantOnlyCount) {
mySearch.setStatus(SearchStatusEnum.FINISHED);
}
doSaveSearch();
});
doCountOnlyQuery(myParamWantOnlyCount);
if (myParamWantOnlyCount) {
return;
}
@ -573,12 +579,16 @@ public class SearchTask implements Callable<Void> {
*/
int currentlyLoaded = defaultIfNull(mySearch.getNumFound(), 0);
int minWanted = 0;
// if no count is provided,
// we only use the values in SearchPreFetchThresholds
// but if there is a count...
if (myParams.getCount() != null) {
minWanted = myParams.getCount() + 1; // Always fetch one past this page, so we know if there is a next page.
minWanted = Math.min(minWanted, myPagingProvider.getMaximumPageSize());
minWanted = Math.min(myParams.getCount(), myPagingProvider.getMaximumPageSize());
minWanted += currentlyLoaded;
}
// iterate through the search thresholds
for (Iterator<Integer> iter =
myStorageSettings.getSearchPreFetchThresholds().iterator();
iter.hasNext(); ) {
@ -590,8 +600,11 @@ public class SearchTask implements Callable<Void> {
if (next == -1) {
sb.setMaxResultsToFetch(null);
} else {
// we want at least 1 more than our requested amount
// so we know that there are other results
// (in case we get the exact amount back)
myMaxResultsToFetch = Math.max(next, minWanted);
sb.setMaxResultsToFetch(myMaxResultsToFetch);
sb.setMaxResultsToFetch(myMaxResultsToFetch + 1);
}
if (iter.hasNext()) {
@ -633,6 +646,7 @@ public class SearchTask implements Callable<Void> {
*/
try (IResultIterator<JpaPid> resultIterator =
sb.createQuery(myParams, mySearchRuntimeDetails, myRequest, myRequestPartitionId)) {
// resultIterator is SearchBuilder.QueryIterator
assert (resultIterator != null);
/*
@ -678,4 +692,38 @@ public class SearchTask implements Callable<Void> {
throw new InternalErrorException(Msg.code(1166) + e);
}
}
/**
* Does the query but only for the count.
* @param theParamWantOnlyCount - if count query is wanted only
*/
private void doCountOnlyQuery(boolean theParamWantOnlyCount) {
ourLog.trace("Performing count");
@SuppressWarnings("rawtypes")
ISearchBuilder sb = newSearchBuilder();
/*
* createCountQuery
* NB: (see createQuery below)
* Because FulltextSearchSvcImpl will (internally)
* mutate the myParams (searchmap),
* (specifically removing the _content and _text filters)
* we will have to clone those parameters here so that
* the "correct" params are used in createQuery below
*/
Long count = sb.createCountQuery(myParams.clone(), mySearch.getUuid(), myRequest, myRequestPartitionId);
ourLog.trace("Got count {}", count);
myTxService
.withRequest(myRequest)
.withRequestPartitionId(myRequestPartitionId)
.execute(() -> {
mySearch.setTotalCount(count.intValue());
if (theParamWantOnlyCount) {
mySearch.setStatus(SearchStatusEnum.FINISHED);
}
doSaveSearch();
});
}
}

View File

@ -93,7 +93,7 @@ public class ConsentEventsDaoR4Test extends BaseJpaR4SystemTest {
List<String> returnedIdValues = toUnqualifiedVersionlessIdValues(resources);
assertEquals(myObservationIds.subList(0, 10), returnedIdValues);
assertEquals(1, hitCount.get());
assertEquals(myObservationIds.subList(0, 20), interceptedResourceIds);
assertEquals(myObservationIds.subList(0, 21), interceptedResourceIds);
// Fetch the next 30 (do cross a fetch boundary)
outcome = myPagingProvider.retrieveResultList(mySrd, outcome.getUuid());
@ -125,7 +125,7 @@ public class ConsentEventsDaoR4Test extends BaseJpaR4SystemTest {
List<String> returnedIdValues = toUnqualifiedVersionlessIdValues(resources);
assertEquals(myObservationIdsEvenOnly.subList(0, 10), returnedIdValues);
assertEquals(1, hitCount.get());
assertEquals(myObservationIds.subList(0, 20), interceptedResourceIds, "Wrong response from " + outcome.getClass());
assertEquals(myObservationIds.subList(0, 21), interceptedResourceIds, "Wrong response from " + outcome.getClass());
// Fetch the next 30 (do cross a fetch boundary)
String searchId = outcome.getUuid();

View File

@ -26,12 +26,12 @@ import org.springframework.beans.factory.annotation.Autowired;
import java.util.Comparator;
import java.util.List;
import java.util.stream.Collectors;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.in;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
@ -178,7 +178,9 @@ public class FhirResourceDaoR4ComboNonUniqueParamTest extends BaseComboParamsR4T
IIdType id1 = createPatient1();
assertNotNull(id1);
assertEquals(0, myCaptureQueriesListener.countSelectQueries());
assertEquals(0, myCaptureQueriesListener.countSelectQueries(),
String.join(",", "\n" + myCaptureQueriesListener.getSelectQueries().stream().map(q -> q.getThreadName()).collect(Collectors.toList()))
);
assertEquals(12, myCaptureQueriesListener.countInsertQueries());
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());

View File

@ -421,7 +421,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
runInTransaction(() -> {
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
assertEquals(20, search.getNumFound());
assertEquals(21, search.getNumFound());
assertEquals(search.getNumFound(), mySearchResultDao.count());
assertNull(search.getTotalCount());
assertEquals(1, search.getVersion().intValue());
@ -462,7 +462,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
runInTransaction(() -> {
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
assertEquals(SearchStatusEnum.PASSCMPLET, search.getStatus());
assertEquals(50, search.getNumFound());
assertEquals(51, search.getNumFound());
assertEquals(search.getNumFound(), mySearchResultDao.count());
assertNull(search.getTotalCount());
assertEquals(3, search.getVersion().intValue());
@ -501,9 +501,9 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
*/
runInTransaction(() -> {
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
assertEquals(190, search.getNumFound());
assertEquals(191, search.getNumFound());
assertEquals(search.getNumFound(), mySearchResultDao.count());
assertEquals(190, search.getTotalCount().intValue());
assertEquals(191, search.getTotalCount().intValue());
assertEquals(5, search.getVersion().intValue());
assertEquals(SearchStatusEnum.FINISHED, search.getStatus());
});
@ -513,10 +513,10 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
*/
ids = toUnqualifiedVersionlessIdValues(results, 180, 200, false);
assertEquals(10, ids.size());
assertEquals(11, ids.size());
assertEquals("Patient/PT00180", ids.get(0));
assertEquals("Patient/PT00189", ids.get(9));
assertEquals(190, myDatabaseBackedPagingProvider.retrieveResultList(null, uuid).size().intValue());
assertEquals(191, myDatabaseBackedPagingProvider.retrieveResultList(null, uuid).size().intValue());
}
@ -554,7 +554,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
});
runInTransaction(() -> {
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
assertEquals(50, search.getNumFound());
assertEquals(51, search.getNumFound());
assertEquals(search.getNumFound(), mySearchResultDao.count());
assertEquals(null, search.getTotalCount());
assertEquals(SearchStatusEnum.PASSCMPLET, search.getStatus());
@ -589,10 +589,10 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
await().until(() -> runInTransaction(() -> {
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
return search.getNumFound();
}), equalTo(20));
}), equalTo(21));
runInTransaction(() -> {
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
assertEquals(20, search.getNumFound());
assertEquals(21, search.getNumFound());
assertEquals(search.getNumFound(), mySearchResultDao.count());
assertNull(search.getTotalCount());
assertEquals(1, search.getVersion().intValue());
@ -649,14 +649,14 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
*/
waitForSize(
20,
21,
10000,
() -> runInTransaction(() -> mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException("")).getNumFound()),
() -> "Wanted 20: " + runInTransaction(() -> mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException("")).toString()));
runInTransaction(() -> {
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
assertEquals(20, search.getNumFound());
assertEquals(21, search.getNumFound());
assertEquals(search.getNumFound(), mySearchResultDao.count());
assertNull(search.getTotalCount());
assertEquals(1, search.getVersion().intValue());
@ -1172,12 +1172,12 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(4, myCaptureQueriesListener.countSelectQueries());
// first prefetch is 50+1
assertEquals(51, myCaptureQueriesListener.logInsertQueries());
assertEquals(52, myCaptureQueriesListener.logInsertQueries());
assertEquals(1, myCaptureQueriesListener.countUpdateQueries());
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
assertEquals(4, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(51, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(52, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());

View File

@ -0,0 +1,191 @@
package ca.uhn.fhir.jpa.provider.r4;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
import ca.uhn.fhir.parser.StrictErrorHandler;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.server.BasePagingProvider;
import ca.uhn.fhir.rest.server.IPagingProvider;
import ca.uhn.fhir.rest.server.RestfulServer;
import ca.uhn.fhir.util.BundleUtil;
import com.google.common.base.Charsets;
import org.apache.commons.io.IOUtils;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.Patient;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.springframework.beans.factory.annotation.Autowired;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.hl7.fhir.instance.model.api.IBaseBundle.LINK_NEXT;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
@SuppressWarnings("Duplicates")
public class PatientEverythingPaginationR4Test extends BaseResourceProviderR4Test {
private int myOriginalServerDefaultPageSize;
@Autowired
JpaStorageSettings myStorageSettings;
@BeforeEach
public void beforeDisableResultReuse() {
myStorageSettings.setReuseCachedSearchResultsForMillis(null);
}
@Override
@BeforeEach
public void before() throws Exception {
super.before();
myFhirContext.setParserErrorHandler(new StrictErrorHandler());
myStorageSettings.setAllowMultipleDelete(true);
myOriginalServerDefaultPageSize = myServer.getDefaultPageSize();
myServer.setDefaultPageSize(50);
}
@Override
@AfterEach
public void after() throws Exception {
super.after();
myStorageSettings.setReuseCachedSearchResultsForMillis(new JpaStorageSettings().getReuseCachedSearchResultsForMillis());
myServer.setDefaultPageSize(myOriginalServerDefaultPageSize);
}
/**
* Built to reproduce <a href="https://gitlab.com/simpatico.ai/cdr/-/issues/4940">this issue</a>
* Notice that the issue is not gateway related. Is a plain server issue.
*/
@Test
public void testEverythingPaginatesThroughAllPatients_whenCountIsEqualToMaxPageSize() throws IOException {
// setup
int totalPatients = 54;
createPatients(totalPatients);
String url = myServerBase + "/Patient/$everything?_format=json&_count=" + BasePagingProvider.DEFAULT_MAX_PAGE_SIZE;
// test
Bundle bundle = fetchBundle(url);
// first page
List<Patient> patientsFirstPage = BundleUtil.toListOfResourcesOfType(myFhirContext, bundle, Patient.class);
assertEquals(50, patientsFirstPage.size());
String nextUrl = BundleUtil.getLinkUrlOfType(myFhirContext, bundle, LINK_NEXT);
// 2nd/last page
assertNotNull(nextUrl);
Bundle page2 = fetchBundle(nextUrl);
assertNotNull(page2);
List<Patient> patientsPage2 = BundleUtil.toListOfResourcesOfType(myFhirContext, page2, Patient.class);
assertEquals(4, patientsPage2.size());
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void testEverythingTypeOperationPagination_withDifferentPrefetchThresholds_coverageTest(boolean theProvideCountBool) throws IOException {
// setup
List<Integer> previousPrefetchThreshold = myStorageSettings.getSearchPreFetchThresholds();
// other tests may be resetting this
// so we'll set it
int pageSize = myPagingProvider.getDefaultPageSize();
int serverPageSize = myServer.getDefaultPageSize();
try {
int defaultPageSize = theProvideCountBool ? 50 : 10;
// set our prefetch thresholds to ensure we run out of them
List<Integer> prefetchThreshold = Arrays.asList(10, 50, -1);
myStorageSettings.setSearchPreFetchThresholds(prefetchThreshold);
// the number of patients to create
int total = 154;
String nextUrl;
createPatients(total);
Set<String> ids = new HashSet<>();
String url = myServerBase + "/Patient/$everything?_format=json";
if (theProvideCountBool) {
url += "&_count=" + BasePagingProvider.DEFAULT_MAX_PAGE_SIZE;
}
myPagingProvider.setDefaultPageSize(defaultPageSize);
myServer.setDefaultPageSize(defaultPageSize);
// test
Bundle bundle = fetchBundle(url);
// first page
List<Patient> patientsPage = BundleUtil.toListOfResourcesOfType(myFhirContext, bundle, Patient.class);
assertEquals(defaultPageSize, patientsPage.size());
for (Patient p : patientsPage) {
assertTrue(ids.add(p.getId()));
}
nextUrl = BundleUtil.getLinkUrlOfType(myFhirContext, bundle, LINK_NEXT);
assertNotNull(nextUrl);
// all future pages
do {
bundle = fetchBundle(nextUrl);
assertNotNull(bundle);
patientsPage = BundleUtil.toListOfResourcesOfType(myFhirContext, bundle, Patient.class);
for (Patient p : patientsPage) {
assertTrue(ids.add(p.getId()));
}
nextUrl = BundleUtil.getLinkUrlOfType(myFhirContext, bundle, LINK_NEXT);
if (nextUrl != null) {
assertEquals(defaultPageSize, patientsPage.size());
} else {
assertEquals(4, patientsPage.size());
}
} while (nextUrl != null);
// ensure we found everything
assertEquals(total, ids.size());
} finally {
// set it back, just in case
myStorageSettings.setSearchPreFetchThresholds(previousPrefetchThreshold);
myPagingProvider.setDefaultPageSize(pageSize);
myServer.setDefaultPageSize(serverPageSize);
}
}
private void createPatients(int theCount) {
for (int i = 0; i < theCount; i++) {
Patient patient = new Patient();
patient.addName().setFamily("lastn").addGiven("name");
myPatientDao.create(patient, new SystemRequestDetails()).getId().toUnqualifiedVersionless();
}
}
private Bundle fetchBundle(String theUrl) throws IOException {
Bundle bundle;
HttpGet get = new HttpGet(theUrl);
CloseableHttpResponse resp = ourHttpClient.execute(get);
try {
assertEquals(EncodingEnum.JSON.getResourceContentTypeNonLegacy(), resp.getFirstHeader(Constants.HEADER_CONTENT_TYPE).getValue().replaceAll(";.*", ""));
bundle = EncodingEnum.JSON.newParser(myFhirContext).parseResource(Bundle.class, IOUtils.toString(resp.getEntity().getContent(), Charsets.UTF_8));
} finally {
IOUtils.closeQuietly(resp);
}
return bundle;
}
}