diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5198-fixing-everything-search-paging-issues.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5198-fixing-everything-search-paging-issues.yaml new file mode 100644 index 00000000000..7379d14ff29 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5198-fixing-everything-search-paging-issues.yaml @@ -0,0 +1,7 @@ +--- +type: fix +issue: 5198 +title: "Resolved an issue with type-everything search operations (eg, /Patient/$everything), + where not all page results were being returned if _count was specified to be + the same value as the maximum page size to fetch. +" diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java index 5e48a02c6ad..13c636d585f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java @@ -55,6 +55,7 @@ import ca.uhn.fhir.jpa.model.search.SearchBuilderLoadIncludesParameters; import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; import ca.uhn.fhir.jpa.search.SearchConstants; +import ca.uhn.fhir.jpa.search.builder.models.ResolvedSearchQueryExecutor; import ca.uhn.fhir.jpa.search.builder.sql.GeneratedSql; import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryExecutor; @@ -116,7 +117,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import javax.annotation.Nonnull; @@ -178,7 +178,6 @@ public class SearchBuilder implements ISearchBuilder { @PersistenceContext(type = PersistenceContextType.TRANSACTION) protected EntityManager myEntityManager; - private List myAlsoIncludePids; private CriteriaBuilder myCriteriaBuilder; private SearchParameterMap myParams; private String mySearchUuid; @@ -449,9 +448,8 @@ public class SearchBuilder implements ISearchBuilder { } } else { // do everything in the database. - Optional query = createChunkedQuery( - theParams, sort, theOffset, theMaximumResults, theCountOnlyFlag, theRequest, null); - query.ifPresent(queries::add); + createChunkedQuery( + theParams, sort, theOffset, theMaximumResults, theCountOnlyFlag, theRequest, null, queries); } return queries; @@ -541,9 +539,7 @@ public class SearchBuilder implements ISearchBuilder { if (thePids.size() < getMaximumPageSize()) { normalizeIdListForLastNInClause(thePids); } - Optional query = - createChunkedQuery(theParams, sort, theOffset, thePids.size(), theCount, theRequest, thePids); - query.ifPresent(t -> theQueries.add(t)); + createChunkedQuery(theParams, sort, theOffset, thePids.size(), theCount, theRequest, thePids, theQueries); } /** @@ -551,7 +547,8 @@ public class SearchBuilder implements ISearchBuilder { * * @param theTargetPids */ - private void extractTargetPidsFromIdParams(HashSet theTargetPids) { + private void extractTargetPidsFromIdParams( + HashSet theTargetPids, List theSearchQueryExecutors) { // get all the IQueryParameterType objects // for _id -> these should all be StringParam values HashSet ids = new HashSet<>(); @@ -575,25 +572,26 @@ public class SearchBuilder implements ISearchBuilder { // this will throw if an id is not found Map idToPid = myIdHelperService.resolveResourcePersistentIds( myRequestPartitionId, myResourceName, new ArrayList<>(ids)); - if (myAlsoIncludePids == null) { - myAlsoIncludePids = new ArrayList<>(); - } // add the pids to targetPids for (JpaPid pid : idToPid.values()) { - myAlsoIncludePids.add(pid); theTargetPids.add(pid.getId()); } + + // add the target pids to our executors as the first + // results iterator to go through + theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(new ArrayList<>(theTargetPids))); } - private Optional createChunkedQuery( + private void createChunkedQuery( SearchParameterMap theParams, SortSpec sort, Integer theOffset, Integer theMaximumResults, boolean theCountOnlyFlag, RequestDetails theRequest, - List thePidList) { + List thePidList, + List theSearchQueryExecutors) { String sqlBuilderResourceName = myParams.getEverythingMode() == null ? myResourceName : null; SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( myContext, @@ -627,7 +625,9 @@ public class SearchBuilder implements ISearchBuilder { if (myParams.getEverythingMode() != null) { HashSet targetPids = new HashSet<>(); if (myParams.get(IAnyResource.SP_RES_ID) != null) { - extractTargetPidsFromIdParams(targetPids); + // will add an initial search executor for + // _id params + extractTargetPidsFromIdParams(targetPids, theSearchQueryExecutors); } else { // For Everything queries, we make the query root by the ResourceLink table, since this query // is basically a reverse-include search. For type/Everything (as opposed to instance/Everything) @@ -645,11 +645,11 @@ public class SearchBuilder implements ISearchBuilder { GeneratedSql allTargetsSql = fetchPidsSqlBuilder.generate(theOffset, myMaxResultsToFetch); String sql = allTargetsSql.getSql(); Object[] args = allTargetsSql.getBindVariables().toArray(new Object[0]); + List output = jdbcTemplate.query(sql, args, new SingleColumnRowMapper<>(Long.class)); - if (myAlsoIncludePids == null) { - myAlsoIncludePids = new ArrayList<>(output.size()); - } - myAlsoIncludePids.addAll(JpaPid.fromLongList(output)); + + // we add a search executor to fetch unlinked patients first + theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(output)); } List typeSourceResources = new ArrayList<>(); @@ -747,12 +747,11 @@ public class SearchBuilder implements ISearchBuilder { * Now perform the search */ GeneratedSql generatedSql = sqlBuilder.generate(theOffset, myMaxResultsToFetch); - if (generatedSql.isMatchNothing()) { - return Optional.empty(); + if (!generatedSql.isMatchNothing()) { + SearchQueryExecutor executor = + mySqlBuilderFactory.newSearchQueryExecutor(generatedSql, myMaxResultsToFetch); + theSearchQueryExecutors.add(executor); } - - SearchQueryExecutor executor = mySqlBuilderFactory.newSearchQueryExecutor(generatedSql, myMaxResultsToFetch); - return Optional.of(executor); } private Collection extractTypeSourceResourcesFromParams() { @@ -1925,11 +1924,35 @@ public class SearchBuilder implements ISearchBuilder { private final Integer myOffset; private boolean myFirst = true; private IncludesIterator myIncludesIterator; + /** + * The next JpaPid value of the next result in this query. + * Will not be null if fetched using getNext() + */ private JpaPid myNext; + /** + * The current query result iterator running sql and supplying PIDs + * @see #myQueryList + */ private ISearchQueryExecutor myResultsIterator; + private boolean myFetchIncludesForEverythingOperation; + /** + * The count of resources skipped because they were seen in earlier results + */ private int mySkipCount = 0; + /** + * The count of resources that are new in this search + * (ie, not cached in previous searches) + */ private int myNonSkipCount = 0; + + /** + * The list of queries to use to find all results. + * Normal JPA queries will normally have a single entry. + * Queries that involve Hibernate Search/Elastisearch may have + * multiple queries because of chunking. + * The $everything operation also jams some extra results in. + */ private List myQueryList = new ArrayList<>(); private QueryIterator(SearchRuntimeDetails theSearchRuntimeDetails, RequestDetails theRequest) { @@ -1967,109 +1990,87 @@ public class SearchBuilder implements ISearchBuilder { } } - // assigns the results iterator + /* + * assigns the results iterator + * and populates the myQueryList. + */ initializeIteratorQuery(myOffset, myMaxResultsToFetch); + } - if (myAlsoIncludePids == null) { - myAlsoIncludePids = new ArrayList<>(); + if (myNext == null) { + // no next means we need a new query (if one is available) + while (myResultsIterator.hasNext() || !myQueryList.isEmpty()) { + // Update iterator with next chunk if necessary. + if (!myResultsIterator.hasNext()) { + retrieveNextIteratorQuery(); + + // if our new results iterator is also empty + // we're done here + if (!myResultsIterator.hasNext()) { + break; + } + } + + Long nextLong = myResultsIterator.next(); + if (myHavePerfTraceFoundIdHook) { + HookParams params = new HookParams() + .add(Integer.class, System.identityHashCode(this)) + .add(Object.class, nextLong); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, + myRequest, + Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID, + params); + } + + if (nextLong != null) { + JpaPid next = JpaPid.fromId(nextLong); + if (myPidSet.add(next)) { + myNext = next; + myNonSkipCount++; + break; + } else { + mySkipCount++; + } + } + + if (!myResultsIterator.hasNext()) { + if (myMaxResultsToFetch != null && (mySkipCount + myNonSkipCount == myMaxResultsToFetch)) { + if (mySkipCount > 0 && myNonSkipCount == 0) { + + sendProcessingMsgAndFirePerformanceHook(); + + myMaxResultsToFetch += 1000; + initializeIteratorQuery(myOffset, myMaxResultsToFetch); + } + } + } } } if (myNext == null) { - for (Iterator myPreResultsIterator = myAlsoIncludePids.iterator(); - myPreResultsIterator.hasNext(); ) { - JpaPid next = myPreResultsIterator.next(); - if (next != null) - if (myPidSet.add(next)) { - myNext = next; - break; - } + // if we got here, it means the current PjaPid has already been processed + // and we will decide (here) if we need to fetch related resources recursively + if (myFetchIncludesForEverythingOperation) { + myIncludesIterator = new IncludesIterator(myPidSet, myRequest); + myFetchIncludesForEverythingOperation = false; } - - if (myNext == null) { - while (myResultsIterator.hasNext() || !myQueryList.isEmpty()) { - // Update iterator with next chunk if necessary. - if (!myResultsIterator.hasNext()) { - retrieveNextIteratorQuery(); - } - - Long nextLong = myResultsIterator.next(); - if (myHavePerfTraceFoundIdHook) { - HookParams params = new HookParams() - .add(Integer.class, System.identityHashCode(this)) - .add(Object.class, nextLong); - CompositeInterceptorBroadcaster.doCallHooks( - myInterceptorBroadcaster, - myRequest, - Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID, - params); - } - - if (nextLong != null) { - JpaPid next = JpaPid.fromId(nextLong); + if (myIncludesIterator != null) { + while (myIncludesIterator.hasNext()) { + JpaPid next = myIncludesIterator.next(); + if (next != null) if (myPidSet.add(next)) { myNext = next; - myNonSkipCount++; break; - } else { - mySkipCount++; } - } - - if (!myResultsIterator.hasNext()) { - if (myMaxResultsToFetch != null - && (mySkipCount + myNonSkipCount == myMaxResultsToFetch)) { - if (mySkipCount > 0 && myNonSkipCount == 0) { - - StorageProcessingMessage message = new StorageProcessingMessage(); - String msg = "Pass completed with no matching results seeking rows " - + myPidSet.size() + "-" + mySkipCount - + ". This indicates an inefficient query! Retrying with new max count of " - + myMaxResultsToFetch; - ourLog.warn(msg); - message.setMessage(msg); - HookParams params = new HookParams() - .add(RequestDetails.class, myRequest) - .addIfMatchesType(ServletRequestDetails.class, myRequest) - .add(StorageProcessingMessage.class, message); - CompositeInterceptorBroadcaster.doCallHooks( - myInterceptorBroadcaster, - myRequest, - Pointcut.JPA_PERFTRACE_WARNING, - params); - - myMaxResultsToFetch += 1000; - initializeIteratorQuery(myOffset, myMaxResultsToFetch); - } - } - } } - } - - if (myNext == null) { - // if we got here, it means the current PjaPid has already been processed - // and we will decide (here) if we need to fetch related resources recursively - if (myFetchIncludesForEverythingOperation) { - myIncludesIterator = new IncludesIterator(myPidSet, myRequest); - myFetchIncludesForEverythingOperation = false; - } - if (myIncludesIterator != null) { - while (myIncludesIterator.hasNext()) { - JpaPid next = myIncludesIterator.next(); - if (next != null) - if (myPidSet.add(next)) { - myNext = next; - break; - } - } - if (myNext == null) { - myNext = NO_MORE; - } - } else { + if (myNext == null) { myNext = NO_MORE; } + } else { + myNext = NO_MORE; } - } // if we need to fetch the next result + } mySearchRuntimeDetails.setFoundMatchesCount(myPidSet.size()); @@ -2100,6 +2101,22 @@ public class SearchBuilder implements ISearchBuilder { } } + private void sendProcessingMsgAndFirePerformanceHook() { + StorageProcessingMessage message = new StorageProcessingMessage(); + String msg = "Pass completed with no matching results seeking rows " + + myPidSet.size() + "-" + mySkipCount + + ". This indicates an inefficient query! Retrying with new max count of " + + myMaxResultsToFetch; + ourLog.warn(msg); + message.setMessage(msg); + HookParams params = new HookParams() + .add(RequestDetails.class, myRequest) + .addIfMatchesType(ServletRequestDetails.class, myRequest) + .add(StorageProcessingMessage.class, message); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_WARNING, params); + } + private void initializeIteratorQuery(Integer theOffset, Integer theMaxResultsToFetch) { if (myQueryList.isEmpty()) { // Capture times for Lucene/Elasticsearch queries as well diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchQueryExecutors.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchQueryExecutors.java index f099ac9d668..e1a6ddacf25 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchQueryExecutors.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchQueryExecutors.java @@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.search.builder; import ca.uhn.fhir.jpa.model.dao.JpaPid; +import ca.uhn.fhir.jpa.search.builder.models.ResolvedSearchQueryExecutor; import org.apache.commons.lang3.Validate; import java.util.Iterator; @@ -57,41 +58,6 @@ public class SearchQueryExecutors { return new ResolvedSearchQueryExecutor(rawPids); } - /** - * Adapt bare Iterator to our internal query interface. - */ - static class ResolvedSearchQueryExecutor implements ISearchQueryExecutor { - private final Iterator myIterator; - - ResolvedSearchQueryExecutor(Iterable theIterable) { - this(theIterable.iterator()); - } - - ResolvedSearchQueryExecutor(Iterator theIterator) { - myIterator = theIterator; - } - - @Nonnull - public static ResolvedSearchQueryExecutor from(List rawPids) { - return new ResolvedSearchQueryExecutor(rawPids); - } - - @Override - public boolean hasNext() { - return myIterator.hasNext(); - } - - @Override - public Long next() { - return myIterator.next(); - } - - @Override - public void close() { - // empty - } - } - public static ISearchQueryExecutor from(Iterator theIterator) { return new JpaPidQueryAdaptor(theIterator); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/models/ResolvedSearchQueryExecutor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/models/ResolvedSearchQueryExecutor.java new file mode 100644 index 00000000000..44fa3993f63 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/models/ResolvedSearchQueryExecutor.java @@ -0,0 +1,39 @@ +package ca.uhn.fhir.jpa.search.builder.models; + +import ca.uhn.fhir.jpa.search.builder.ISearchQueryExecutor; + +import java.util.Iterator; +import java.util.List; +import javax.annotation.Nonnull; + +public class ResolvedSearchQueryExecutor implements ISearchQueryExecutor { + private final Iterator myIterator; + + public ResolvedSearchQueryExecutor(Iterable theIterable) { + this(theIterable.iterator()); + } + + public ResolvedSearchQueryExecutor(Iterator theIterator) { + myIterator = theIterator; + } + + @Nonnull + public static ResolvedSearchQueryExecutor from(List rawPids) { + return new ResolvedSearchQueryExecutor(rawPids); + } + + @Override + public boolean hasNext() { + return myIterator.hasNext(); + } + + @Override + public Long next() { + return myIterator.next(); + } + + @Override + public void close() { + // empty + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchTask.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchTask.java index 64b39e376d4..9396f7d9542 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchTask.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchTask.java @@ -122,9 +122,11 @@ public class SearchTask implements Callable { private boolean myAdditionalPrefetchThresholdsRemaining; private List myPreviouslyAddedResourcePids; private Integer myMaxResultsToFetch; + /** * Constructor */ + @SuppressWarnings({"unchecked", "rawtypes"}) public SearchTask( SearchTaskParameters theCreationParams, HapiTransactionService theManagedTxManager, @@ -198,6 +200,7 @@ public class SearchTask implements Callable { myCountSavedTotal = myPreviouslyAddedResourcePids.size(); } + @SuppressWarnings("rawtypes") private ISearchBuilder newSearchBuilder() { Class resourceTypeClass = myContext.getResourceDefinition(myResourceType).getImplementingClass(); @@ -281,6 +284,7 @@ public class SearchTask implements Callable { .execute(() -> doSaveSearch()); } + @SuppressWarnings("rawtypes") private void saveUnsynced(final IResultIterator theResultIter) { myTxService .withRequest(myRequest) @@ -296,7 +300,7 @@ public class SearchTask implements Callable { // Interceptor call: STORAGE_PREACCESS_RESOURCES // This can be used to remove results from the search result details before // the user has a chance to know that they were in the results - if (mySearchRuntimeDetails.getRequestDetails() != null && unsyncedPids.isEmpty() == false) { + if (mySearchRuntimeDetails.getRequestDetails() != null && !unsyncedPids.isEmpty()) { JpaPreResourceAccessDetails accessDetails = new JpaPreResourceAccessDetails(unsyncedPids, () -> newSearchBuilder()); HookParams params = new HookParams() @@ -332,10 +336,8 @@ public class SearchTask implements Callable { mySyncedPids.addAll(unsyncedPids); unsyncedPids.clear(); - if (theResultIter.hasNext() == false) { + if (!theResultIter.hasNext()) { int skippedCount = theResultIter.getSkippedCount(); - int nonSkippedCount = theResultIter.getNonSkippedCount(); - int totalFetched = skippedCount + myCountSavedThisPass + myCountBlockedThisPass; ourLog.trace( "MaxToFetch[{}] SkippedCount[{}] CountSavedThisPass[{}] CountSavedThisTotal[{}] AdditionalPrefetchRemaining[{}]", myMaxResultsToFetch, @@ -344,16 +346,18 @@ public class SearchTask implements Callable { myCountSavedTotal, myAdditionalPrefetchThresholdsRemaining); - if (nonSkippedCount == 0 - || (myMaxResultsToFetch != null && totalFetched < myMaxResultsToFetch)) { + if (isFinished(theResultIter)) { + // finished ourLog.trace("Setting search status to FINISHED"); mySearch.setStatus(SearchStatusEnum.FINISHED); mySearch.setTotalCount(myCountSavedTotal - countBlocked); } else if (myAdditionalPrefetchThresholdsRemaining) { + // pass complete ourLog.trace("Setting search status to PASSCMPLET"); mySearch.setStatus(SearchStatusEnum.PASSCMPLET); mySearch.setSearchParameterMap(myParams); } else { + // also finished ourLog.trace("Setting search status to FINISHED"); mySearch.setStatus(SearchStatusEnum.FINISHED); mySearch.setTotalCount(myCountSavedTotal - countBlocked); @@ -382,8 +386,34 @@ public class SearchTask implements Callable { ourLog.trace("saveUnsynced() - post-commit"); } + @SuppressWarnings("rawtypes") + private boolean isFinished(final IResultIterator theResultIter) { + int skippedCount = theResultIter.getSkippedCount(); + int nonSkippedCount = theResultIter.getNonSkippedCount(); + int totalFetched = skippedCount + myCountSavedThisPass + myCountBlockedThisPass; + + if (myMaxResultsToFetch != null && totalFetched < myMaxResultsToFetch) { + // total fetched < max results to fetch -> we've exhausted the search + return true; + } else { + if (nonSkippedCount == 0) { + // no skipped resources in this query + if (myParams.getCount() != null) { + // count supplied + // if the count is > what we've fetched -> we've exhausted the query + return myParams.getCount() > totalFetched; + } else { + // legacy - we have no skipped resources - we are done + return true; + } + } + // skipped resources means we have more to fetch + return false; + } + } + public boolean isNotAborted() { - return myAbortRequested == false; + return !myAbortRequested; } public void markComplete() { @@ -517,6 +547,7 @@ public class SearchTask implements Callable { * This method actually creates the database query to perform the * search, and starts it. */ + @SuppressWarnings({"rawtypes", "unchecked"}) private void doSearch() { /* * If the user has explicitly requested a _count, perform a @@ -531,32 +562,7 @@ public class SearchTask implements Callable { : SearchParameterMapCalculator.isWantCount(myStorageSettings.getDefaultTotalMode()); if (myParamWantOnlyCount || myParamOrDefaultWantCount) { - ourLog.trace("Performing count"); - ISearchBuilder sb = newSearchBuilder(); - - /* - * createCountQuery - * NB: (see createQuery below) - * Because FulltextSearchSvcImpl will (internally) - * mutate the myParams (searchmap), - * (specifically removing the _content and _text filters) - * we will have to clone those parameters here so that - * the "correct" params are used in createQuery below - */ - Long count = sb.createCountQuery(myParams.clone(), mySearch.getUuid(), myRequest, myRequestPartitionId); - - ourLog.trace("Got count {}", count); - - myTxService - .withRequest(myRequest) - .withRequestPartitionId(myRequestPartitionId) - .execute(() -> { - mySearch.setTotalCount(count.intValue()); - if (myParamWantOnlyCount) { - mySearch.setStatus(SearchStatusEnum.FINISHED); - } - doSaveSearch(); - }); + doCountOnlyQuery(myParamWantOnlyCount); if (myParamWantOnlyCount) { return; } @@ -573,12 +579,16 @@ public class SearchTask implements Callable { */ int currentlyLoaded = defaultIfNull(mySearch.getNumFound(), 0); int minWanted = 0; + + // if no count is provided, + // we only use the values in SearchPreFetchThresholds + // but if there is a count... if (myParams.getCount() != null) { - minWanted = myParams.getCount() + 1; // Always fetch one past this page, so we know if there is a next page. - minWanted = Math.min(minWanted, myPagingProvider.getMaximumPageSize()); + minWanted = Math.min(myParams.getCount(), myPagingProvider.getMaximumPageSize()); minWanted += currentlyLoaded; } + // iterate through the search thresholds for (Iterator iter = myStorageSettings.getSearchPreFetchThresholds().iterator(); iter.hasNext(); ) { @@ -590,8 +600,11 @@ public class SearchTask implements Callable { if (next == -1) { sb.setMaxResultsToFetch(null); } else { + // we want at least 1 more than our requested amount + // so we know that there are other results + // (in case we get the exact amount back) myMaxResultsToFetch = Math.max(next, minWanted); - sb.setMaxResultsToFetch(myMaxResultsToFetch); + sb.setMaxResultsToFetch(myMaxResultsToFetch + 1); } if (iter.hasNext()) { @@ -633,6 +646,7 @@ public class SearchTask implements Callable { */ try (IResultIterator resultIterator = sb.createQuery(myParams, mySearchRuntimeDetails, myRequest, myRequestPartitionId)) { + // resultIterator is SearchBuilder.QueryIterator assert (resultIterator != null); /* @@ -678,4 +692,38 @@ public class SearchTask implements Callable { throw new InternalErrorException(Msg.code(1166) + e); } } + + /** + * Does the query but only for the count. + * @param theParamWantOnlyCount - if count query is wanted only + */ + private void doCountOnlyQuery(boolean theParamWantOnlyCount) { + ourLog.trace("Performing count"); + @SuppressWarnings("rawtypes") + ISearchBuilder sb = newSearchBuilder(); + + /* + * createCountQuery + * NB: (see createQuery below) + * Because FulltextSearchSvcImpl will (internally) + * mutate the myParams (searchmap), + * (specifically removing the _content and _text filters) + * we will have to clone those parameters here so that + * the "correct" params are used in createQuery below + */ + Long count = sb.createCountQuery(myParams.clone(), mySearch.getUuid(), myRequest, myRequestPartitionId); + + ourLog.trace("Got count {}", count); + + myTxService + .withRequest(myRequest) + .withRequestPartitionId(myRequestPartitionId) + .execute(() -> { + mySearch.setTotalCount(count.intValue()); + if (theParamWantOnlyCount) { + mySearch.setStatus(SearchStatusEnum.FINISHED); + } + doSaveSearch(); + }); + } } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/ConsentEventsDaoR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/ConsentEventsDaoR4Test.java index 1bd202ff9b6..0c1e49496f6 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/ConsentEventsDaoR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/ConsentEventsDaoR4Test.java @@ -93,7 +93,7 @@ public class ConsentEventsDaoR4Test extends BaseJpaR4SystemTest { List returnedIdValues = toUnqualifiedVersionlessIdValues(resources); assertEquals(myObservationIds.subList(0, 10), returnedIdValues); assertEquals(1, hitCount.get()); - assertEquals(myObservationIds.subList(0, 20), interceptedResourceIds); + assertEquals(myObservationIds.subList(0, 21), interceptedResourceIds); // Fetch the next 30 (do cross a fetch boundary) outcome = myPagingProvider.retrieveResultList(mySrd, outcome.getUuid()); @@ -125,7 +125,7 @@ public class ConsentEventsDaoR4Test extends BaseJpaR4SystemTest { List returnedIdValues = toUnqualifiedVersionlessIdValues(resources); assertEquals(myObservationIdsEvenOnly.subList(0, 10), returnedIdValues); assertEquals(1, hitCount.get()); - assertEquals(myObservationIds.subList(0, 20), interceptedResourceIds, "Wrong response from " + outcome.getClass()); + assertEquals(myObservationIds.subList(0, 21), interceptedResourceIds, "Wrong response from " + outcome.getClass()); // Fetch the next 30 (do cross a fetch boundary) String searchId = outcome.getUuid(); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ComboNonUniqueParamTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ComboNonUniqueParamTest.java index 9e985d8fa04..6602dcc8f3e 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ComboNonUniqueParamTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ComboNonUniqueParamTest.java @@ -26,12 +26,12 @@ import org.springframework.beans.factory.annotation.Autowired; import java.util.Comparator; import java.util.List; +import java.util.stream.Collectors; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.in; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -178,7 +178,9 @@ public class FhirResourceDaoR4ComboNonUniqueParamTest extends BaseComboParamsR4T IIdType id1 = createPatient1(); assertNotNull(id1); - assertEquals(0, myCaptureQueriesListener.countSelectQueries()); + assertEquals(0, myCaptureQueriesListener.countSelectQueries(), + String.join(",", "\n" + myCaptureQueriesListener.getSelectQueries().stream().map(q -> q.getThreadName()).collect(Collectors.toList())) + ); assertEquals(12, myCaptureQueriesListener.countInsertQueries()); assertEquals(0, myCaptureQueriesListener.countUpdateQueries()); assertEquals(0, myCaptureQueriesListener.countDeleteQueries()); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchOptimizedTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchOptimizedTest.java index 67fb685cfcd..44828cd39ab 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchOptimizedTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchOptimizedTest.java @@ -421,7 +421,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test { runInTransaction(() -> { Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException("")); - assertEquals(20, search.getNumFound()); + assertEquals(21, search.getNumFound()); assertEquals(search.getNumFound(), mySearchResultDao.count()); assertNull(search.getTotalCount()); assertEquals(1, search.getVersion().intValue()); @@ -462,7 +462,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test { runInTransaction(() -> { Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException("")); assertEquals(SearchStatusEnum.PASSCMPLET, search.getStatus()); - assertEquals(50, search.getNumFound()); + assertEquals(51, search.getNumFound()); assertEquals(search.getNumFound(), mySearchResultDao.count()); assertNull(search.getTotalCount()); assertEquals(3, search.getVersion().intValue()); @@ -501,9 +501,9 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test { */ runInTransaction(() -> { Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException("")); - assertEquals(190, search.getNumFound()); + assertEquals(191, search.getNumFound()); assertEquals(search.getNumFound(), mySearchResultDao.count()); - assertEquals(190, search.getTotalCount().intValue()); + assertEquals(191, search.getTotalCount().intValue()); assertEquals(5, search.getVersion().intValue()); assertEquals(SearchStatusEnum.FINISHED, search.getStatus()); }); @@ -513,10 +513,10 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test { */ ids = toUnqualifiedVersionlessIdValues(results, 180, 200, false); - assertEquals(10, ids.size()); + assertEquals(11, ids.size()); assertEquals("Patient/PT00180", ids.get(0)); assertEquals("Patient/PT00189", ids.get(9)); - assertEquals(190, myDatabaseBackedPagingProvider.retrieveResultList(null, uuid).size().intValue()); + assertEquals(191, myDatabaseBackedPagingProvider.retrieveResultList(null, uuid).size().intValue()); } @@ -554,7 +554,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test { }); runInTransaction(() -> { Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException("")); - assertEquals(50, search.getNumFound()); + assertEquals(51, search.getNumFound()); assertEquals(search.getNumFound(), mySearchResultDao.count()); assertEquals(null, search.getTotalCount()); assertEquals(SearchStatusEnum.PASSCMPLET, search.getStatus()); @@ -589,10 +589,10 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test { await().until(() -> runInTransaction(() -> { Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException("")); return search.getNumFound(); - }), equalTo(20)); + }), equalTo(21)); runInTransaction(() -> { Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException("")); - assertEquals(20, search.getNumFound()); + assertEquals(21, search.getNumFound()); assertEquals(search.getNumFound(), mySearchResultDao.count()); assertNull(search.getTotalCount()); assertEquals(1, search.getVersion().intValue()); @@ -649,14 +649,14 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test { */ waitForSize( - 20, + 21, 10000, () -> runInTransaction(() -> mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException("")).getNumFound()), () -> "Wanted 20: " + runInTransaction(() -> mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException("")).toString())); runInTransaction(() -> { Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException("")); - assertEquals(20, search.getNumFound()); + assertEquals(21, search.getNumFound()); assertEquals(search.getNumFound(), mySearchResultDao.count()); assertNull(search.getTotalCount()); assertEquals(1, search.getVersion().intValue()); @@ -1172,12 +1172,12 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test { myCaptureQueriesListener.logSelectQueriesForCurrentThread(); assertEquals(4, myCaptureQueriesListener.countSelectQueries()); // first prefetch is 50+1 - assertEquals(51, myCaptureQueriesListener.logInsertQueries()); + assertEquals(52, myCaptureQueriesListener.logInsertQueries()); assertEquals(1, myCaptureQueriesListener.countUpdateQueries()); assertEquals(0, myCaptureQueriesListener.countDeleteQueries()); assertEquals(4, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); - assertEquals(51, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); + assertEquals(52, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/PatientEverythingPaginationR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/PatientEverythingPaginationR4Test.java new file mode 100644 index 00000000000..aa9a751a9fc --- /dev/null +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/PatientEverythingPaginationR4Test.java @@ -0,0 +1,191 @@ +package ca.uhn.fhir.jpa.provider.r4; + +import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; +import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test; +import ca.uhn.fhir.parser.StrictErrorHandler; +import ca.uhn.fhir.rest.api.Constants; +import ca.uhn.fhir.rest.api.EncodingEnum; +import ca.uhn.fhir.rest.api.server.SystemRequestDetails; +import ca.uhn.fhir.rest.server.BasePagingProvider; +import ca.uhn.fhir.rest.server.IPagingProvider; +import ca.uhn.fhir.rest.server.RestfulServer; +import ca.uhn.fhir.util.BundleUtil; +import com.google.common.base.Charsets; +import org.apache.commons.io.IOUtils; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpGet; +import org.hl7.fhir.r4.model.Bundle; +import org.hl7.fhir.r4.model.Patient; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; +import org.springframework.beans.factory.annotation.Autowired; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import static org.hl7.fhir.instance.model.api.IBaseBundle.LINK_NEXT; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +@SuppressWarnings("Duplicates") +public class PatientEverythingPaginationR4Test extends BaseResourceProviderR4Test { + + private int myOriginalServerDefaultPageSize; + + @Autowired + JpaStorageSettings myStorageSettings; + + @BeforeEach + public void beforeDisableResultReuse() { + myStorageSettings.setReuseCachedSearchResultsForMillis(null); + } + + @Override + @BeforeEach + public void before() throws Exception { + super.before(); + myFhirContext.setParserErrorHandler(new StrictErrorHandler()); + + myStorageSettings.setAllowMultipleDelete(true); + + myOriginalServerDefaultPageSize = myServer.getDefaultPageSize(); + myServer.setDefaultPageSize(50); + + } + + @Override + @AfterEach + public void after() throws Exception { + super.after(); + + myStorageSettings.setReuseCachedSearchResultsForMillis(new JpaStorageSettings().getReuseCachedSearchResultsForMillis()); + myServer.setDefaultPageSize(myOriginalServerDefaultPageSize); + } + + /** + * Built to reproduce this issue + * Notice that the issue is not gateway related. Is a plain server issue. + */ + @Test + public void testEverythingPaginatesThroughAllPatients_whenCountIsEqualToMaxPageSize() throws IOException { + // setup + int totalPatients = 54; + createPatients(totalPatients); + + String url = myServerBase + "/Patient/$everything?_format=json&_count=" + BasePagingProvider.DEFAULT_MAX_PAGE_SIZE; + + // test + Bundle bundle = fetchBundle(url); + + // first page + List patientsFirstPage = BundleUtil.toListOfResourcesOfType(myFhirContext, bundle, Patient.class); + assertEquals(50, patientsFirstPage.size()); + + String nextUrl = BundleUtil.getLinkUrlOfType(myFhirContext, bundle, LINK_NEXT); + + // 2nd/last page + assertNotNull(nextUrl); + Bundle page2 = fetchBundle(nextUrl); + assertNotNull(page2); + List patientsPage2 = BundleUtil.toListOfResourcesOfType(myFhirContext, page2, Patient.class); + + assertEquals(4, patientsPage2.size()); + } + + @ParameterizedTest + @ValueSource(booleans = {true, false}) + public void testEverythingTypeOperationPagination_withDifferentPrefetchThresholds_coverageTest(boolean theProvideCountBool) throws IOException { + // setup + List previousPrefetchThreshold = myStorageSettings.getSearchPreFetchThresholds(); + // other tests may be resetting this + // so we'll set it + int pageSize = myPagingProvider.getDefaultPageSize(); + int serverPageSize = myServer.getDefaultPageSize(); + try { + int defaultPageSize = theProvideCountBool ? 50 : 10; + // set our prefetch thresholds to ensure we run out of them + List prefetchThreshold = Arrays.asList(10, 50, -1); + myStorageSettings.setSearchPreFetchThresholds(prefetchThreshold); + + // the number of patients to create + int total = 154; + String nextUrl; + createPatients(total); + Set ids = new HashSet<>(); + + String url = myServerBase + "/Patient/$everything?_format=json"; + if (theProvideCountBool) { + url += "&_count=" + BasePagingProvider.DEFAULT_MAX_PAGE_SIZE; + } + myPagingProvider.setDefaultPageSize(defaultPageSize); + myServer.setDefaultPageSize(defaultPageSize); + + // test + Bundle bundle = fetchBundle(url); + + // first page + List patientsPage = BundleUtil.toListOfResourcesOfType(myFhirContext, bundle, Patient.class); + assertEquals(defaultPageSize, patientsPage.size()); + + for (Patient p : patientsPage) { + assertTrue(ids.add(p.getId())); + } + nextUrl = BundleUtil.getLinkUrlOfType(myFhirContext, bundle, LINK_NEXT); + assertNotNull(nextUrl); + + // all future pages + do { + bundle = fetchBundle(nextUrl); + assertNotNull(bundle); + patientsPage = BundleUtil.toListOfResourcesOfType(myFhirContext, bundle, Patient.class); + for (Patient p : patientsPage) { + assertTrue(ids.add(p.getId())); + } + nextUrl = BundleUtil.getLinkUrlOfType(myFhirContext, bundle, LINK_NEXT); + if (nextUrl != null) { + assertEquals(defaultPageSize, patientsPage.size()); + } else { + assertEquals(4, patientsPage.size()); + } + } while (nextUrl != null); + + // ensure we found everything + assertEquals(total, ids.size()); + } finally { + // set it back, just in case + myStorageSettings.setSearchPreFetchThresholds(previousPrefetchThreshold); + myPagingProvider.setDefaultPageSize(pageSize); + myServer.setDefaultPageSize(serverPageSize); + } + } + + private void createPatients(int theCount) { + for (int i = 0; i < theCount; i++) { + Patient patient = new Patient(); + patient.addName().setFamily("lastn").addGiven("name"); + myPatientDao.create(patient, new SystemRequestDetails()).getId().toUnqualifiedVersionless(); + } + } + + private Bundle fetchBundle(String theUrl) throws IOException { + Bundle bundle; + HttpGet get = new HttpGet(theUrl); + CloseableHttpResponse resp = ourHttpClient.execute(get); + try { + assertEquals(EncodingEnum.JSON.getResourceContentTypeNonLegacy(), resp.getFirstHeader(Constants.HEADER_CONTENT_TYPE).getValue().replaceAll(";.*", "")); + bundle = EncodingEnum.JSON.newParser(myFhirContext).parseResource(Bundle.class, IOUtils.toString(resp.getEntity().getContent(), Charsets.UTF_8)); + } finally { + IOUtils.closeQuietly(resp); + } + + return bundle; + } + +}