Merge branch 'master' into rel_7_4_mb_20240813
This commit is contained in:
commit
359984e06c
|
@ -20,10 +20,12 @@ package ca.uhn.fhir.util;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import com.google.common.collect.Streams;
|
||||
import jakarta.annotation.Nonnull;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.stream.Stream;
|
||||
|
@ -57,4 +59,9 @@ public class TaskChunker<T> {
|
|||
public <T> Stream<List<T>> chunk(Stream<T> theStream, int theChunkSize) {
|
||||
return StreamUtil.partition(theStream, theChunkSize);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public void chunk(Iterator<T> theIterator, int theChunkSize, Consumer<List<T>> theListConsumer) {
|
||||
chunk(Streams.stream(theIterator), theChunkSize).forEach(theListConsumer);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,14 +3,21 @@ package ca.uhn.fhir.util;
|
|||
import jakarta.annotation.Nonnull;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.Arguments;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.Captor;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.stream.IntStream;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.mockito.Mockito.times;
|
||||
|
@ -43,8 +50,32 @@ public class TaskChunkerTest {
|
|||
|
||||
@Nonnull
|
||||
private static List<Integer> newIntRangeList(int startInclusive, int endExclusive) {
|
||||
List<Integer> input = IntStream.range(startInclusive, endExclusive).boxed().toList();
|
||||
return input;
|
||||
return IntStream.range(startInclusive, endExclusive).boxed().toList();
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("testIteratorChunkArguments")
|
||||
void testIteratorChunk(List<Integer> theListToChunk, List<List<Integer>> theExpectedChunks) {
|
||||
// given
|
||||
Iterator<Integer> iter = theListToChunk.iterator();
|
||||
ArrayList<List<Integer>> result = new ArrayList<>();
|
||||
|
||||
// when
|
||||
new TaskChunker<Integer>().chunk(iter, 3, result::add);
|
||||
|
||||
// then
|
||||
assertEquals(theExpectedChunks, result);
|
||||
}
|
||||
|
||||
public static Stream<Arguments> testIteratorChunkArguments() {
|
||||
return Stream.of(
|
||||
Arguments.of(Collections.emptyList(), Collections.emptyList()),
|
||||
Arguments.of(List.of(1), List.of(List.of(1))),
|
||||
Arguments.of(List.of(1,2), List.of(List.of(1,2))),
|
||||
Arguments.of(List.of(1,2,3), List.of(List.of(1,2,3))),
|
||||
Arguments.of(List.of(1,2,3,4), List.of(List.of(1,2,3), List.of(4))),
|
||||
Arguments.of(List.of(1,2,3,4,5,6,7,8,9), List.of(List.of(1,2,3), List.of(4,5,6), List.of(7,8,9)))
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6216
|
||||
jira: SMILE-8806
|
||||
title: "Previously, searches combining the `_text` query parameter (using Lucene/Elasticsearch) with query parameters
|
||||
using the database (e.g. `identifier` or `date`) could miss matches when more than 500 results match the `_text` query
|
||||
parameter. This has been fixed, but may be slow if many results match the `_text` query and must be checked against the
|
||||
database parameters."
|
|
@ -32,6 +32,7 @@ import ca.uhn.fhir.jpa.dao.search.ExtendedHSearchResourceProjection;
|
|||
import ca.uhn.fhir.jpa.dao.search.ExtendedHSearchSearchBuilder;
|
||||
import ca.uhn.fhir.jpa.dao.search.IHSearchSortHelper;
|
||||
import ca.uhn.fhir.jpa.dao.search.LastNOperation;
|
||||
import ca.uhn.fhir.jpa.dao.search.SearchScrollQueryExecutorAdaptor;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.model.search.ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams;
|
||||
|
@ -40,6 +41,7 @@ import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
|
|||
import ca.uhn.fhir.jpa.search.autocomplete.ValueSetAutocompleteOptions;
|
||||
import ca.uhn.fhir.jpa.search.autocomplete.ValueSetAutocompleteSearch;
|
||||
import ca.uhn.fhir.jpa.search.builder.ISearchQueryExecutor;
|
||||
import ca.uhn.fhir.jpa.search.builder.SearchBuilder;
|
||||
import ca.uhn.fhir.jpa.search.builder.SearchQueryExecutors;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.searchparam.extractor.ISearchParamExtractor;
|
||||
|
@ -183,6 +185,19 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
|||
return doSearch(theResourceName, theParams, null, theMaxResultsToFetch, theRequestDetails);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
@Override
|
||||
public ISearchQueryExecutor searchScrolled(
|
||||
String theResourceType, SearchParameterMap theParams, RequestDetails theRequestDetails) {
|
||||
validateHibernateSearchIsEnabled();
|
||||
|
||||
SearchQueryOptionsStep<?, Long, SearchLoadingOptionsStep, ?, ?> searchQueryOptionsStep =
|
||||
getSearchQueryOptionsStep(theResourceType, theParams, null);
|
||||
logQuery(searchQueryOptionsStep, theRequestDetails);
|
||||
|
||||
return new SearchScrollQueryExecutorAdaptor(searchQueryOptionsStep.scroll(SearchBuilder.getMaximumPageSize()));
|
||||
}
|
||||
|
||||
// keep this in sync with supportsSomeOf();
|
||||
@SuppressWarnings("rawtypes")
|
||||
private ISearchQueryExecutor doSearch(
|
||||
|
|
|
@ -62,6 +62,17 @@ public interface IFulltextSearchSvc {
|
|||
Integer theMaxResultsToFetch,
|
||||
RequestDetails theRequestDetails);
|
||||
|
||||
/**
|
||||
* Query the index for a complete iterator of ALL results. (scrollable search result).
|
||||
*
|
||||
* @param theResourceName e.g. Patient
|
||||
* @param theParams The search query
|
||||
* @param theRequestDetails The request details
|
||||
* @return Iterator of result PIDs
|
||||
*/
|
||||
ISearchQueryExecutor searchScrolled(
|
||||
String theResourceName, SearchParameterMap theParams, RequestDetails theRequestDetails);
|
||||
|
||||
/**
|
||||
* Autocomplete search for NIH $expand contextDirection=existing
|
||||
* @param theOptions operation options
|
||||
|
|
|
@ -101,7 +101,6 @@ import ca.uhn.fhir.util.StringUtil;
|
|||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Streams;
|
||||
import com.healthmarketscience.sqlbuilder.Condition;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import jakarta.annotation.Nullable;
|
||||
|
@ -141,7 +140,9 @@ import java.util.stream.Collectors;
|
|||
import static ca.uhn.fhir.jpa.model.util.JpaConstants.UNDESIRED_RESOURCE_LINKAGES_FOR_EVERYTHING_ON_PATIENT_INSTANCE;
|
||||
import static ca.uhn.fhir.jpa.search.builder.QueryStack.LOCATION_POSITION;
|
||||
import static ca.uhn.fhir.jpa.search.builder.QueryStack.SearchForIdsParams.with;
|
||||
import static ca.uhn.fhir.jpa.util.InClauseNormalizer.*;
|
||||
import static java.util.Objects.requireNonNull;
|
||||
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
|
||||
import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
@ -205,9 +206,6 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
@Autowired(required = false)
|
||||
private IElasticsearchSvc myIElasticsearchSvc;
|
||||
|
||||
@Autowired
|
||||
private FhirContext myCtx;
|
||||
|
||||
@Autowired
|
||||
private IJpaStorageResourceParser myJpaStorageResourceParser;
|
||||
|
||||
|
@ -332,8 +330,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
init(theParams, theSearchUuid, theRequestPartitionId);
|
||||
|
||||
if (checkUseHibernateSearch()) {
|
||||
long count = myFulltextSearchSvc.count(myResourceName, theParams.clone());
|
||||
return count;
|
||||
return myFulltextSearchSvc.count(myResourceName, theParams.clone());
|
||||
}
|
||||
|
||||
List<ISearchQueryExecutor> queries = createQuery(theParams.clone(), null, null, null, true, theRequest, null);
|
||||
|
@ -404,8 +401,16 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
fulltextMatchIds = queryHibernateSearchForEverythingPids(theRequest);
|
||||
resultCount = fulltextMatchIds.size();
|
||||
} else {
|
||||
fulltextExecutor = myFulltextSearchSvc.searchNotScrolled(
|
||||
myResourceName, myParams, myMaxResultsToFetch, theRequest);
|
||||
// todo performance MB - some queries must intersect with JPA (e.g. they have a chain, or we haven't
|
||||
// enabled SP indexing).
|
||||
// and some queries don't need JPA. We only need the scroll when we need to intersect with JPA.
|
||||
// It would be faster to have a non-scrolled search in this case, since creating the scroll requires
|
||||
// extra work in Elastic.
|
||||
// if (eligibleToSkipJPAQuery) fulltextExecutor = myFulltextSearchSvc.searchNotScrolled( ...
|
||||
|
||||
// we might need to intersect with JPA. So we might need to traverse ALL results from lucene, not just
|
||||
// a page.
|
||||
fulltextExecutor = myFulltextSearchSvc.searchScrolled(myResourceName, myParams, theRequest);
|
||||
}
|
||||
|
||||
if (fulltextExecutor == null) {
|
||||
|
@ -457,7 +462,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
// We break the pids into chunks that fit in the 1k limit for jdbc bind params.
|
||||
new QueryChunker<Long>()
|
||||
.chunk(
|
||||
Streams.stream(fulltextExecutor).collect(Collectors.toList()),
|
||||
fulltextExecutor,
|
||||
SearchBuilder.getMaximumPageSize(),
|
||||
t -> doCreateChunkedQueries(
|
||||
theParams, t, theOffset, sort, theCountOnlyFlag, theRequest, queries));
|
||||
}
|
||||
|
@ -560,8 +566,9 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
boolean theCount,
|
||||
RequestDetails theRequest,
|
||||
ArrayList<ISearchQueryExecutor> theQueries) {
|
||||
|
||||
if (thePids.size() < getMaximumPageSize()) {
|
||||
normalizeIdListForLastNInClause(thePids);
|
||||
thePids = normalizeIdListForInClause(thePids);
|
||||
}
|
||||
createChunkedQuery(theParams, sort, theOffset, thePids.size(), theCount, theRequest, thePids, theQueries);
|
||||
}
|
||||
|
@ -885,41 +892,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
&& theParams.values().stream()
|
||||
.flatMap(Collection::stream)
|
||||
.flatMap(Collection::stream)
|
||||
.anyMatch(t -> t instanceof ReferenceParam);
|
||||
}
|
||||
|
||||
private List<Long> normalizeIdListForLastNInClause(List<Long> lastnResourceIds) {
|
||||
/*
|
||||
The following is a workaround to a known issue involving Hibernate. If queries are used with "in" clauses with large and varying
|
||||
numbers of parameters, this can overwhelm Hibernate's QueryPlanCache and deplete heap space. See the following link for more info:
|
||||
https://stackoverflow.com/questions/31557076/spring-hibernate-query-plan-cache-memory-usage.
|
||||
|
||||
Normalizing the number of parameters in the "in" clause stabilizes the size of the QueryPlanCache, so long as the number of
|
||||
arguments never exceeds the maximum specified below.
|
||||
*/
|
||||
int listSize = lastnResourceIds.size();
|
||||
|
||||
if (listSize > 1 && listSize < 10) {
|
||||
padIdListWithPlaceholders(lastnResourceIds, 10);
|
||||
} else if (listSize > 10 && listSize < 50) {
|
||||
padIdListWithPlaceholders(lastnResourceIds, 50);
|
||||
} else if (listSize > 50 && listSize < 100) {
|
||||
padIdListWithPlaceholders(lastnResourceIds, 100);
|
||||
} else if (listSize > 100 && listSize < 200) {
|
||||
padIdListWithPlaceholders(lastnResourceIds, 200);
|
||||
} else if (listSize > 200 && listSize < 500) {
|
||||
padIdListWithPlaceholders(lastnResourceIds, 500);
|
||||
} else if (listSize > 500 && listSize < 800) {
|
||||
padIdListWithPlaceholders(lastnResourceIds, 800);
|
||||
}
|
||||
|
||||
return lastnResourceIds;
|
||||
}
|
||||
|
||||
private void padIdListWithPlaceholders(List<Long> theIdList, int preferredListSize) {
|
||||
while (theIdList.size() < preferredListSize) {
|
||||
theIdList.add(-1L);
|
||||
}
|
||||
.anyMatch(ReferenceParam.class::isInstance);
|
||||
}
|
||||
|
||||
private void createSort(QueryStack theQueryStack, SortSpec theSort, SearchParameterMap theParams) {
|
||||
|
@ -1154,7 +1127,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
|
||||
List<Long> versionlessPids = JpaPid.toLongList(thePids);
|
||||
if (versionlessPids.size() < getMaximumPageSize()) {
|
||||
versionlessPids = normalizeIdListForLastNInClause(versionlessPids);
|
||||
versionlessPids = normalizeIdListForInClause(versionlessPids);
|
||||
}
|
||||
|
||||
// -- get the resource from the searchView
|
||||
|
@ -1243,7 +1216,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
Map<Long, Collection<ResourceTag>> tagMap = new HashMap<>();
|
||||
|
||||
// -- no tags
|
||||
if (thePidList.size() == 0) return tagMap;
|
||||
if (thePidList.isEmpty()) return tagMap;
|
||||
|
||||
// -- get all tags for the idList
|
||||
Collection<ResourceTag> tagList = myResourceTagDao.findByResourceIds(thePidList);
|
||||
|
@ -1383,7 +1356,6 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
EntityManager entityManager = theParameters.getEntityManager();
|
||||
Integer maxCount = theParameters.getMaxCount();
|
||||
FhirContext fhirContext = theParameters.getFhirContext();
|
||||
DateRangeParam lastUpdated = theParameters.getLastUpdated();
|
||||
RequestDetails request = theParameters.getRequestDetails();
|
||||
String searchIdOrDescription = theParameters.getSearchIdOrDescription();
|
||||
List<String> desiredResourceTypes = theParameters.getDesiredResourceTypes();
|
||||
|
@ -1922,11 +1894,10 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
}
|
||||
assert !targetResourceTypes.isEmpty();
|
||||
|
||||
Set<Long> identityHashesForTypes = targetResourceTypes.stream()
|
||||
return targetResourceTypes.stream()
|
||||
.map(type -> BaseResourceIndexedSearchParam.calculateHashIdentity(
|
||||
myPartitionSettings, myRequestPartitionId, type, "url"))
|
||||
.collect(Collectors.toSet());
|
||||
return identityHashesForTypes;
|
||||
}
|
||||
|
||||
private <T> List<Collection<T>> partition(Collection<T> theNextRoundMatches, int theMaxLoad) {
|
||||
|
@ -2506,7 +2477,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
|
||||
private void retrieveNextIteratorQuery() {
|
||||
close();
|
||||
if (myQueryList != null && myQueryList.size() > 0) {
|
||||
if (isNotEmpty(myQueryList)) {
|
||||
myResultsIterator = myQueryList.remove(0);
|
||||
myHasNextIteratorQuery = true;
|
||||
} else {
|
||||
|
|
|
@ -0,0 +1,65 @@
|
|||
package ca.uhn.fhir.jpa.util;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/*
|
||||
This class encapsulate the implementation providing a workaround to a known issue involving Hibernate. If queries are used with "in" clauses with large and varying
|
||||
numbers of parameters, this can overwhelm Hibernate's QueryPlanCache and deplete heap space. See the following link for more info:
|
||||
https://stackoverflow.com/questions/31557076/spring-hibernate-query-plan-cache-memory-usage.
|
||||
|
||||
Normalizing the number of parameters in the "in" clause stabilizes the size of the QueryPlanCache, so long as the number of
|
||||
arguments never exceeds the maximum specified below.
|
||||
*/
|
||||
public class InClauseNormalizer {
|
||||
|
||||
public static List<Long> normalizeIdListForInClause(List<Long> theResourceIds) {
|
||||
|
||||
List<Long> retVal = theResourceIds;
|
||||
|
||||
int listSize = theResourceIds.size();
|
||||
|
||||
if (listSize > 1 && listSize < 10) {
|
||||
retVal = padIdListWithPlaceholders(theResourceIds, 10);
|
||||
} else if (listSize > 10 && listSize < 50) {
|
||||
retVal = padIdListWithPlaceholders(theResourceIds, 50);
|
||||
} else if (listSize > 50 && listSize < 100) {
|
||||
retVal = padIdListWithPlaceholders(theResourceIds, 100);
|
||||
} else if (listSize > 100 && listSize < 200) {
|
||||
retVal = padIdListWithPlaceholders(theResourceIds, 200);
|
||||
} else if (listSize > 200 && listSize < 500) {
|
||||
retVal = padIdListWithPlaceholders(theResourceIds, 500);
|
||||
} else if (listSize > 500 && listSize < 800) {
|
||||
retVal = padIdListWithPlaceholders(theResourceIds, 800);
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private static List<Long> padIdListWithPlaceholders(List<Long> theIdList, int preferredListSize) {
|
||||
List<Long> retVal = theIdList;
|
||||
|
||||
if (isUnmodifiableList(theIdList)) {
|
||||
retVal = new ArrayList<>(preferredListSize);
|
||||
retVal.addAll(theIdList);
|
||||
}
|
||||
|
||||
while (retVal.size() < preferredListSize) {
|
||||
retVal.add(-1L);
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private static boolean isUnmodifiableList(List<Long> theList) {
|
||||
try {
|
||||
theList.addAll(Collections.emptyList());
|
||||
} catch (Exception e) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private InClauseNormalizer() {}
|
||||
}
|
|
@ -0,0 +1,72 @@
|
|||
package ca.uhn.fhir.util;
|
||||
|
||||
import ca.uhn.fhir.jpa.util.InClauseNormalizer;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.Arguments;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static java.util.Collections.nCopies;
|
||||
import static java.util.Collections.unmodifiableList;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
public class InClauseNormalizerTest {
|
||||
private static final Long ourResourceId = 1L;
|
||||
private static final Long ourPaddingValue = -1L;
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("arguments")
|
||||
public void testNormalizeUnmodifiableList_willCreateNewListAndPadToSize(int theInitialListSize, int theExpectedNormalizedListSize) {
|
||||
List<Long> initialList = new ArrayList<>(nCopies(theInitialListSize, ourResourceId));
|
||||
initialList = unmodifiableList(initialList);
|
||||
|
||||
List<Long> normalizedList = InClauseNormalizer.normalizeIdListForInClause(initialList);
|
||||
|
||||
assertNormalizedList(initialList, normalizedList, theInitialListSize, theExpectedNormalizedListSize);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("arguments")
|
||||
public void testNormalizeListToSizeAndPad(int theInitialListSize, int theExpectedNormalizedListSize) {
|
||||
List<Long> initialList = new ArrayList<>(nCopies(theInitialListSize, ourResourceId));
|
||||
|
||||
List<Long> normalizedList = InClauseNormalizer.normalizeIdListForInClause(initialList);
|
||||
|
||||
assertNormalizedList(initialList, normalizedList, theInitialListSize, theExpectedNormalizedListSize);
|
||||
}
|
||||
|
||||
private void assertNormalizedList(List<Long> theInitialList, List<Long> theNormalizedList, int theInitialListSize, int theExpectedNormalizedListSize) {
|
||||
List<Long> expectedPaddedSubList = new ArrayList<>(nCopies(theExpectedNormalizedListSize - theInitialListSize, ourPaddingValue));
|
||||
|
||||
assertThat(theNormalizedList).startsWith(listToArray(theInitialList));
|
||||
assertThat(theNormalizedList).hasSize(theExpectedNormalizedListSize);
|
||||
assertThat(theNormalizedList).endsWith(listToArray(expectedPaddedSubList));
|
||||
}
|
||||
|
||||
static Long[] listToArray(List<Long> theList) {
|
||||
return theList.toArray(new Long[0]);
|
||||
}
|
||||
|
||||
private static Stream<Arguments> arguments(){
|
||||
return Stream.of(
|
||||
Arguments.of(0, 0),
|
||||
Arguments.of(1, 1),
|
||||
Arguments.of(2, 10),
|
||||
Arguments.of(10, 10),
|
||||
Arguments.of(12, 50),
|
||||
Arguments.of(50, 50),
|
||||
Arguments.of(51, 100),
|
||||
Arguments.of(100, 100),
|
||||
Arguments.of(150, 200),
|
||||
Arguments.of(300, 500),
|
||||
Arguments.of(500, 500),
|
||||
Arguments.of(700, 800),
|
||||
Arguments.of(800, 800),
|
||||
Arguments.of(801, 801)
|
||||
);
|
||||
}
|
||||
|
||||
}
|
|
@ -56,8 +56,6 @@ public class FhirResourceDaoR4SearchLastNIT extends BaseR4SearchLastN {
|
|||
@Mock
|
||||
private IHSearchEventListener mySearchEventListener;
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
public void testLastNChunking() {
|
||||
|
||||
|
@ -108,7 +106,6 @@ public class FhirResourceDaoR4SearchLastNIT extends BaseR4SearchLastN {
|
|||
secondQueryPattern.append("\\).*");
|
||||
assertThat(queries.get(1).toUpperCase().replaceAll(" , ", ",")).matches(secondQueryPattern.toString());
|
||||
assertThat(queries.get(3).toUpperCase().replaceAll(" , ", ",")).matches(secondQueryPattern.toString());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -763,7 +763,7 @@ public class Batch2CoordinatorIT extends BaseJpaR4Test {
|
|||
myFirstStepLatch.awaitExpected();
|
||||
|
||||
// validate
|
||||
myBatch2JobHelper.awaitJobHasStatusWithForcedMaintenanceRuns(instanceId, StatusEnum.IN_PROGRESS);
|
||||
myBatch2JobHelper.awaitJobHasStatusWithForcedMaintenanceRuns(instanceId, StatusEnum.ERRORED);
|
||||
|
||||
// execute
|
||||
ourLog.info("Cancel job {}", instanceId);
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.search.builder.SearchBuilder;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
|
@ -12,6 +12,9 @@ import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
|||
import ca.uhn.fhir.rest.param.StringAndListParam;
|
||||
import ca.uhn.fhir.rest.param.StringOrListParam;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.param.TokenAndListParam;
|
||||
import ca.uhn.fhir.rest.param.TokenOrListParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import org.hl7.fhir.r4.model.Organization;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
@ -19,9 +22,11 @@ import org.springframework.beans.factory.annotation.Autowired;
|
|||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
public class FhirSearchDaoR4Test extends BaseJpaR4Test {
|
||||
|
||||
|
@ -51,11 +56,11 @@ public class FhirSearchDaoR4Test extends BaseJpaR4Test {
|
|||
patient.addName().addGiven(content).setFamily("hirasawa");
|
||||
id1 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless().getIdPartAsLong();
|
||||
}
|
||||
Long id2;
|
||||
|
||||
{
|
||||
Patient patient = new Patient();
|
||||
patient.addName().addGiven("mio").setFamily("akiyama");
|
||||
id2 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless().getIdPartAsLong();
|
||||
myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless().getIdPartAsLong();
|
||||
}
|
||||
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
|
@ -257,4 +262,105 @@ public class FhirSearchDaoR4Test extends BaseJpaR4Test {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchNarrativeWithLuceneSearch() {
|
||||
final int numberOfPatientsToCreate = SearchBuilder.getMaximumPageSize() + 10;
|
||||
List<String> expectedActivePatientIds = new ArrayList<>(numberOfPatientsToCreate);
|
||||
|
||||
for (int i = 0; i < numberOfPatientsToCreate; i++)
|
||||
{
|
||||
Patient patient = new Patient();
|
||||
patient.getText().setDivAsString("<div>AAAS<p>FOO</p> CCC </div>");
|
||||
expectedActivePatientIds.add(myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless().getIdPart());
|
||||
}
|
||||
|
||||
{
|
||||
Patient patient = new Patient();
|
||||
patient.getText().setDivAsString("<div>AAAB<p>FOO</p> CCC </div>");
|
||||
myPatientDao.create(patient, mySrd);
|
||||
}
|
||||
{
|
||||
Patient patient = new Patient();
|
||||
patient.getText().setDivAsString("<div>ZZYZXY</div>");
|
||||
myPatientDao.create(patient, mySrd);
|
||||
}
|
||||
|
||||
SearchParameterMap map = new SearchParameterMap().setLoadSynchronous(true);
|
||||
map.add(Constants.PARAM_TEXT, new StringParam("AAAS"));
|
||||
|
||||
IBundleProvider searchResultBundle = myPatientDao.search(map, mySrd);
|
||||
List<String> resourceIdsFromSearchResult = searchResultBundle.getAllResourceIds();
|
||||
|
||||
assertThat(resourceIdsFromSearchResult).containsExactlyInAnyOrderElementsOf(expectedActivePatientIds);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLuceneNarrativeSearchQueryIntersectingJpaQuery() {
|
||||
final int numberOfPatientsToCreate = SearchBuilder.getMaximumPageSize() + 10;
|
||||
List<String> expectedActivePatientIds = new ArrayList<>(numberOfPatientsToCreate);
|
||||
|
||||
// create active and non-active patients with the same narrative
|
||||
for (int i = 0; i < numberOfPatientsToCreate; i++)
|
||||
{
|
||||
Patient activePatient = new Patient();
|
||||
activePatient.getText().setDivAsString("<div>AAAS<p>FOO</p> CCC </div>");
|
||||
activePatient.setActive(true);
|
||||
String patientId = myPatientDao.create(activePatient, mySrd).getId().toUnqualifiedVersionless().getIdPart();
|
||||
expectedActivePatientIds.add(patientId);
|
||||
|
||||
Patient nonActivePatient = new Patient();
|
||||
nonActivePatient.getText().setDivAsString("<div>AAAS<p>FOO</p> CCC </div>");
|
||||
nonActivePatient.setActive(false);
|
||||
myPatientDao.create(nonActivePatient, mySrd);
|
||||
}
|
||||
|
||||
SearchParameterMap map = new SearchParameterMap().setLoadSynchronous(true);
|
||||
|
||||
TokenAndListParam tokenAndListParam = new TokenAndListParam();
|
||||
tokenAndListParam.addAnd(new TokenOrListParam().addOr(new TokenParam().setValue("true")));
|
||||
|
||||
map.add("active", tokenAndListParam);
|
||||
map.add(Constants.PARAM_TEXT, new StringParam("AAAS"));
|
||||
|
||||
IBundleProvider searchResultBundle = myPatientDao.search(map, mySrd);
|
||||
List<String> resourceIdsFromSearchResult = searchResultBundle.getAllResourceIds();
|
||||
|
||||
assertThat(resourceIdsFromSearchResult).containsExactlyInAnyOrderElementsOf(expectedActivePatientIds);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLuceneContentSearchQueryIntersectingJpaQuery() {
|
||||
final int numberOfPatientsToCreate = SearchBuilder.getMaximumPageSize() + 10;
|
||||
final String patientFamilyName = "Flanders";
|
||||
List<String> expectedActivePatientIds = new ArrayList<>(numberOfPatientsToCreate);
|
||||
|
||||
// create active and non-active patients with the same narrative
|
||||
for (int i = 0; i < numberOfPatientsToCreate; i++)
|
||||
{
|
||||
Patient activePatient = new Patient();
|
||||
activePatient.addName().setFamily(patientFamilyName);
|
||||
activePatient.setActive(true);
|
||||
String patientId = myPatientDao.create(activePatient, mySrd).getId().toUnqualifiedVersionless().getIdPart();
|
||||
expectedActivePatientIds.add(patientId);
|
||||
|
||||
Patient nonActivePatient = new Patient();
|
||||
nonActivePatient.addName().setFamily(patientFamilyName);
|
||||
nonActivePatient.setActive(false);
|
||||
myPatientDao.create(nonActivePatient, mySrd);
|
||||
}
|
||||
|
||||
SearchParameterMap map = new SearchParameterMap().setLoadSynchronous(true);
|
||||
|
||||
TokenAndListParam tokenAndListParam = new TokenAndListParam();
|
||||
tokenAndListParam.addAnd(new TokenOrListParam().addOr(new TokenParam().setValue("true")));
|
||||
|
||||
map.add("active", tokenAndListParam);
|
||||
map.add(Constants.PARAM_CONTENT, new StringParam(patientFamilyName));
|
||||
|
||||
IBundleProvider searchResultBundle = myPatientDao.search(map, mySrd);
|
||||
List<String> resourceIdsFromSearchResult = searchResultBundle.getAllResourceIds();
|
||||
|
||||
assertThat(resourceIdsFromSearchResult).containsExactlyInAnyOrderElementsOf(expectedActivePatientIds);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,3 +1,22 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server Test Utilities
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.embedded;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
|
|
@ -1,3 +1,22 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server Test Utilities
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.embedded.annotation;
|
||||
|
||||
import ca.uhn.fhir.jpa.embedded.OracleCondition;
|
||||
|
|
|
@ -25,6 +25,7 @@ import ca.uhn.fhir.batch2.api.IJobPersistence;
|
|||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.batch2.model.StatusEnum;
|
||||
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.awaitility.Awaitility;
|
||||
import org.awaitility.core.ConditionTimeoutException;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -50,6 +51,8 @@ public class Batch2JobHelper {
|
|||
private static final Logger ourLog = LoggerFactory.getLogger(Batch2JobHelper.class);
|
||||
|
||||
private static final int BATCH_SIZE = 100;
|
||||
public static final int DEFAULT_WAIT_DEADLINE = 30;
|
||||
public static final Duration DEFAULT_WAIT_DURATION = Duration.of(DEFAULT_WAIT_DEADLINE, ChronoUnit.SECONDS);
|
||||
|
||||
private final IJobMaintenanceService myJobMaintenanceService;
|
||||
private final IJobCoordinator myJobCoordinator;
|
||||
|
@ -82,11 +85,11 @@ public class Batch2JobHelper {
|
|||
}
|
||||
|
||||
public JobInstance awaitJobHasStatus(String theInstanceId, StatusEnum... theExpectedStatus) {
|
||||
return awaitJobHasStatus(theInstanceId, 30, theExpectedStatus);
|
||||
return awaitJobHasStatus(theInstanceId, DEFAULT_WAIT_DEADLINE, theExpectedStatus);
|
||||
}
|
||||
|
||||
public JobInstance awaitJobHasStatusWithoutMaintenancePass(String theInstanceId, StatusEnum... theExpectedStatus) {
|
||||
return awaitJobawaitJobHasStatusWithoutMaintenancePass(theInstanceId, 10, theExpectedStatus);
|
||||
return awaitJobawaitJobHasStatusWithoutMaintenancePass(theInstanceId, DEFAULT_WAIT_DEADLINE, theExpectedStatus);
|
||||
}
|
||||
|
||||
public JobInstance awaitJobHasStatus(String theInstanceId, int theSecondsToWait, StatusEnum... theExpectedStatus) {
|
||||
|
@ -144,7 +147,7 @@ public class Batch2JobHelper {
|
|||
return myJobCoordinator.getInstance(theBatchJobId);
|
||||
}
|
||||
|
||||
private boolean checkStatusWithMaintenancePass(String theInstanceId, StatusEnum... theExpectedStatuses) throws InterruptedException {
|
||||
private boolean checkStatusWithMaintenancePass(String theInstanceId, StatusEnum... theExpectedStatuses) {
|
||||
if (hasStatus(theInstanceId, theExpectedStatuses)) {
|
||||
return true;
|
||||
}
|
||||
|
@ -170,37 +173,41 @@ public class Batch2JobHelper {
|
|||
return awaitJobHasStatus(theInstanceId, StatusEnum.ERRORED, StatusEnum.FAILED);
|
||||
}
|
||||
|
||||
public void awaitJobHasStatusWithForcedMaintenanceRuns(String theInstanceId, StatusEnum theStatusEnum) {
|
||||
public void awaitJobHasStatusWithForcedMaintenanceRuns(String theInstanceId, StatusEnum... theStatusEnums) {
|
||||
AtomicInteger counter = new AtomicInteger();
|
||||
Duration waitDuration = DEFAULT_WAIT_DURATION;
|
||||
try {
|
||||
await()
|
||||
.atMost(Duration.of(10, ChronoUnit.SECONDS))
|
||||
.atMost(waitDuration)
|
||||
.until(() -> {
|
||||
counter.getAndIncrement();
|
||||
forceRunMaintenancePass();
|
||||
return hasStatus(theInstanceId, theStatusEnum);
|
||||
return hasStatus(theInstanceId, theStatusEnums);
|
||||
});
|
||||
} catch (ConditionTimeoutException ex) {
|
||||
StatusEnum status = getStatus(theInstanceId);
|
||||
String msg = String.format(
|
||||
"Job %s has state %s after 10s timeout and %d checks",
|
||||
fail(String.format(
|
||||
"Job %s has state %s after %s timeout and %d checks",
|
||||
theInstanceId,
|
||||
status.name(),
|
||||
waitDuration,
|
||||
counter.get()
|
||||
);
|
||||
), ex);
|
||||
}
|
||||
}
|
||||
|
||||
public void awaitJobInProgress(String theInstanceId) {
|
||||
Duration waitDuration = DEFAULT_WAIT_DURATION;
|
||||
try {
|
||||
await()
|
||||
.atMost(Duration.of(10, ChronoUnit.SECONDS))
|
||||
.atMost(waitDuration)
|
||||
.until(() -> checkStatusWithMaintenancePass(theInstanceId, StatusEnum.IN_PROGRESS));
|
||||
} catch (ConditionTimeoutException ex) {
|
||||
StatusEnum statusEnum = getStatus(theInstanceId);
|
||||
String msg = String.format("Job %s still has status %s after 10 seconds.",
|
||||
String msg = String.format("Job %s still has status %s after %s seconds.",
|
||||
theInstanceId,
|
||||
statusEnum.name());
|
||||
statusEnum.name(),
|
||||
waitDuration);
|
||||
fail(msg);
|
||||
}
|
||||
}
|
||||
|
@ -291,12 +298,7 @@ public class Batch2JobHelper {
|
|||
}
|
||||
|
||||
if (!map.isEmpty()) {
|
||||
ourLog.error(
|
||||
"Found Running Jobs "
|
||||
+ map.keySet().stream()
|
||||
.map(k -> k + " : " + map.get(k))
|
||||
.collect(Collectors.joining("\n"))
|
||||
);
|
||||
ourLog.error("Found Running Jobs {}",map.keySet().stream().map(k -> k + " : " + map.get(k)).collect(Collectors.joining("\n")));
|
||||
|
||||
return true;
|
||||
}
|
||||
|
@ -305,7 +307,7 @@ public class Batch2JobHelper {
|
|||
|
||||
public void awaitNoJobsRunning(boolean theExpectAtLeastOneJobToExist) {
|
||||
HashMap<String, String> map = new HashMap<>();
|
||||
Awaitility.await().atMost(10, TimeUnit.SECONDS)
|
||||
Awaitility.await().atMost(DEFAULT_WAIT_DURATION)
|
||||
.until(() -> {
|
||||
myJobMaintenanceService.runMaintenancePass();
|
||||
|
||||
|
@ -335,6 +337,7 @@ public class Batch2JobHelper {
|
|||
myJobMaintenanceService.runMaintenancePass();
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void enableMaintenanceRunner(boolean theEnabled) {
|
||||
myJobMaintenanceService.enableMaintenancePass(theEnabled);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue