Fix reference params via filter

This commit is contained in:
James Agnew 2019-08-16 14:56:36 -04:00
parent afb682dfe9
commit 9e8af58e81
4 changed files with 208 additions and 78 deletions

View File

@ -1743,11 +1743,11 @@ public class SearchBuilder implements ISearchBuilder {
}
private Collection<Predicate> createPredicateToken(Collection<IQueryParameterType> theParameters,
String theResourceName,
String theParamName,
CriteriaBuilder theBuilder,
From<?, ResourceIndexedSearchParamToken> theFrom,
SearchFilterParser.CompareOperation operation) {
String theResourceName,
String theParamName,
CriteriaBuilder theBuilder,
From<?, ResourceIndexedSearchParamToken> theFrom,
SearchFilterParser.CompareOperation operation) {
final List<VersionIndependentConcept> codes = new ArrayList<>();
TokenParamModifier modifier = null;
@ -2495,7 +2495,7 @@ public class SearchBuilder implements ISearchBuilder {
// the user has a chance to know that they were in the results
if (allAdded.size() > 0) {
List<Long> includedPidList = new ArrayList<>(allAdded);
JpaPreResourceAccessDetails accessDetails = new JpaPreResourceAccessDetails(includedPidList, ()->this);
JpaPreResourceAccessDetails accessDetails = new JpaPreResourceAccessDetails(includedPidList, () -> this);
HookParams params = new HookParams()
.add(IPreResourceAccessDetails.class, accessDetails)
.add(RequestDetails.class, theRequest)
@ -2718,13 +2718,13 @@ public class SearchBuilder implements ISearchBuilder {
Collections.singletonList(new NumberParam(theFilter.getValue())),
theFilter.getOperation());
} else if (typeEnum == RestSearchParameterTypeEnum.REFERENCE) {
return addPredicateReference(theResourceName,
theFilter.getParamPath().getName(),
Collections.singletonList(new ReferenceParam(theFilter.getParamPath().getName(),
(theFilter.getParamPath().getNext() != null) ? theFilter.getParamPath().getNext().toString()
: null,
theFilter.getValue())),
theFilter.getOperation(), theRequest);
String paramName = theFilter.getParamPath().getName();
SearchFilterParser.CompareOperation operation = theFilter.getOperation();
String resourceType = null; // The value can either have (Patient/123) or not have (123) a resource type, either way it's not needed here
String chain = (theFilter.getParamPath().getNext() != null) ? theFilter.getParamPath().getNext().toString() : null;
String value = theFilter.getValue();
ReferenceParam referenceParam = new ReferenceParam(resourceType, chain, value);
return addPredicateReference(theResourceName, paramName, Collections.singletonList(referenceParam), operation, theRequest);
} else if (typeEnum == RestSearchParameterTypeEnum.QUANTITY) {
return addPredicateQuantity(theResourceName,
theFilter.getParamPath().getName(),
@ -2765,7 +2765,7 @@ public class SearchBuilder implements ISearchBuilder {
theResourceName, theRequest);
if (((SearchFilterParser.FilterLogical) filter).getOperation() == SearchFilterParser.FilterLogicalOperation.and) {
return myBuilder.and(leftPredicate, rightPredicate);
return myBuilder.and(leftPredicate, rightPredicate);
} else if (((SearchFilterParser.FilterLogical) filter).getOperation() == SearchFilterParser.FilterLogicalOperation.or) {
return myBuilder.or(leftPredicate, rightPredicate);
}
@ -3078,79 +3078,79 @@ public class SearchBuilder implements ISearchBuilder {
CurrentThreadCaptureQueriesListener.startCapturing();
}
// If we don't have a query yet, create one
if (myResultsIterator == null) {
if (myMaxResultsToFetch == null) {
myMaxResultsToFetch = myDaoConfig.getFetchSizeDefaultMaximum();
}
// If we don't have a query yet, create one
if (myResultsIterator == null) {
if (myMaxResultsToFetch == null) {
myMaxResultsToFetch = myDaoConfig.getFetchSizeDefaultMaximum();
}
final TypedQuery<Long> query = createQuery(mySort, myMaxResultsToFetch, false, myRequest);
final TypedQuery<Long> query = createQuery(mySort, myMaxResultsToFetch, false, myRequest);
mySearchRuntimeDetails.setQueryStopwatch(new StopWatch());
mySearchRuntimeDetails.setQueryStopwatch(new StopWatch());
Query<Long> hibernateQuery = (Query<Long>) query;
hibernateQuery.setFetchSize(myFetchSize);
ScrollableResults scroll = hibernateQuery.scroll(ScrollMode.FORWARD_ONLY);
myResultsIterator = new ScrollableResultsIterator<>(scroll);
Query<Long> hibernateQuery = (Query<Long>) query;
hibernateQuery.setFetchSize(myFetchSize);
ScrollableResults scroll = hibernateQuery.scroll(ScrollMode.FORWARD_ONLY);
myResultsIterator = new ScrollableResultsIterator<>(scroll);
// If the query resulted in extra results being requested
if (myAlsoIncludePids != null) {
myPreResultsIterator = myAlsoIncludePids.iterator();
}
}
if (myNext == null) {
if (myPreResultsIterator != null && myPreResultsIterator.hasNext()) {
while (myPreResultsIterator.hasNext()) {
Long next = myPreResultsIterator.next();
if (next != null)
if (myPidSet.add(next)) {
myNext = next;
break;
}
// If the query resulted in extra results being requested
if (myAlsoIncludePids != null) {
myPreResultsIterator = myAlsoIncludePids.iterator();
}
}
if (myNext == null) {
while (myResultsIterator.hasNext()) {
Long next = myResultsIterator.next();
if (next != null) {
if (myPidSet.add(next)) {
myNext = next;
break;
} else {
mySkipCount++;
}
}
}
}
if (myNext == null) {
if (myStillNeedToFetchIncludes) {
myIncludesIterator = new IncludesIterator(myPidSet, myRequest);
myStillNeedToFetchIncludes = false;
}
if (myIncludesIterator != null) {
while (myIncludesIterator.hasNext()) {
Long next = myIncludesIterator.next();
if (myPreResultsIterator != null && myPreResultsIterator.hasNext()) {
while (myPreResultsIterator.hasNext()) {
Long next = myPreResultsIterator.next();
if (next != null)
if (myPidSet.add(next)) {
myNext = next;
break;
}
}
if (myNext == null) {
}
if (myNext == null) {
while (myResultsIterator.hasNext()) {
Long next = myResultsIterator.next();
if (next != null) {
if (myPidSet.add(next)) {
myNext = next;
break;
} else {
mySkipCount++;
}
}
}
}
if (myNext == null) {
if (myStillNeedToFetchIncludes) {
myIncludesIterator = new IncludesIterator(myPidSet, myRequest);
myStillNeedToFetchIncludes = false;
}
if (myIncludesIterator != null) {
while (myIncludesIterator.hasNext()) {
Long next = myIncludesIterator.next();
if (next != null)
if (myPidSet.add(next)) {
myNext = next;
break;
}
}
if (myNext == null) {
myNext = NO_MORE;
}
} else {
myNext = NO_MORE;
}
} else {
myNext = NO_MORE;
}
}
} // if we need to fetch the next result
} // if we need to fetch the next result
mySearchRuntimeDetails.setFoundMatchesCount(myPidSet.size());
mySearchRuntimeDetails.setFoundMatchesCount(myPidSet.size());
} finally {
if (haveRawSqlHooks) {
@ -3281,7 +3281,6 @@ public class SearchBuilder implements ISearchBuilder {
}
/**
* Figures out the tolerance for a search. For example, if the user is searching for <code>4.00</code>, this method
* returns <code>0.005</code> because we shold actually match values which are
@ -3347,7 +3346,6 @@ public class SearchBuilder implements ISearchBuilder {
}
private static Predicate[] toArray(List<Predicate> thePredicates) {
return thePredicates.toArray(new Predicate[0]);
}

View File

@ -7,17 +7,17 @@ import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import org.hamcrest.Matchers;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.CarePlan;
import org.hl7.fhir.r4.model.Patient;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentMatchers;
import java.util.List;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.emptyString;
import static org.junit.Assert.*;
@SuppressWarnings({"Duplicates"})
@ -49,7 +49,7 @@ public class FhirResourceDaoR4FilterTest extends BaseJpaR4Test {
@Test
public void testBrackets() {
Patient p= new Patient();
Patient p = new Patient();
p.addName().setFamily("Smith").addGiven("John");
p.setActive(true);
String id1 = myPatientDao.create(p).getId().toUnqualifiedVersionless().getValue();
@ -72,14 +72,14 @@ public class FhirResourceDaoR4FilterTest extends BaseJpaR4Test {
map.setLoadSynchronous(true);
map.add(Constants.PARAM_FILTER, new StringParam("(name eq smith) or (name eq jones)"));
found = toUnqualifiedVersionlessIdValues(myPatientDao.search(map));
assertThat(found, containsInAnyOrder(id1,id2));
assertThat(found, containsInAnyOrder(id1, id2));
}
@Test
public void testStringComparatorEq() {
Patient p= new Patient();
Patient p = new Patient();
p.addName().setFamily("Smith").addGiven("John");
p.setActive(true);
String id1 = myPatientDao.create(p).getId().toUnqualifiedVersionless().getValue();
@ -106,6 +106,55 @@ public class FhirResourceDaoR4FilterTest extends BaseJpaR4Test {
}
@Test
public void testReferenceComparatorEq() {
Patient p = new Patient();
p.addName().setFamily("Smith").addGiven("John");
p.setActive(true);
IIdType ptId = myPatientDao.create(p).getId().toUnqualifiedVersionless();
p = new Patient();
p.addName().setFamily("Smith").addGiven("John2");
p.setActive(true);
IIdType ptId2 = myPatientDao.create(p).getId().toUnqualifiedVersionless();
p = new Patient();
p.addName().setFamily("Smith").addGiven("John3");
p.setActive(true);
IIdType ptId3 = myPatientDao.create(p).getId().toUnqualifiedVersionless();
CarePlan cp = new CarePlan();
cp.getSubject().setReference(ptId.getValue());
String cpId = myCarePlanDao.create(cp).getId().toUnqualifiedVersionless().getValue();
cp = new CarePlan();
cp.addActivity().getDetail().addPerformer().setReference(ptId2.getValue());
String cpId2 = myCarePlanDao.create(cp).getId().toUnqualifiedVersionless().getValue();
SearchParameterMap map;
List<String> found;
map = new SearchParameterMap();
map.setLoadSynchronous(true);
map.add(Constants.PARAM_FILTER, new StringParam("subject eq " + ptId.getValue()));
found = toUnqualifiedVersionlessIdValues(myCarePlanDao.search(map));
assertThat(found, containsInAnyOrder(cpId));
map = new SearchParameterMap();
map.setLoadSynchronous(true);
map.add(Constants.PARAM_FILTER, new StringParam("subject eq " + ptId.getIdPart()));
found = toUnqualifiedVersionlessIdValues(myCarePlanDao.search(map));
assertThat(found, containsInAnyOrder(cpId));
map = new SearchParameterMap();
map.setLoadSynchronous(true);
map.add(Constants.PARAM_FILTER, new StringParam("(subject eq " + ptId.getIdPart() + ") or (performer eq " + ptId2.getValue() + ")"));
found = toUnqualifiedVersionlessIdValues(myCarePlanDao.search(map));
assertThat(found, containsInAnyOrder(cpId, cpId2));
}
@Test
public void testFilterDisabled() {
myDaoConfig.setFilterParameterEnabled(false);

View File

@ -3,7 +3,10 @@ package ca.uhn.fhir.jpa.stresstest;
import ca.uhn.fhir.jpa.config.TestR4Config;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.provider.r4.BaseResourceProviderR4Test;
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.SortSpec;
import ca.uhn.fhir.rest.client.api.IGenericClient;
import ca.uhn.fhir.test.utilities.UnregisterScheduledProcessor;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
@ -33,9 +36,7 @@ import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.TestPropertySource;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.*;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
@ -64,7 +65,8 @@ public class StressTestR4Test extends BaseResourceProviderR4Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(StressTestR4Test.class);
private RequestValidatingInterceptor myRequestValidatingInterceptor;
@Autowired
private IPagingProvider myPagingProvider;
private DatabaseBackedPagingProvider myPagingProvider;
private int myPreviousMaxPageSize;
@Override
@After
@ -74,6 +76,7 @@ public class StressTestR4Test extends BaseResourceProviderR4Test {
ourRestServer.unregisterInterceptor(myRequestValidatingInterceptor);
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.ENABLED);
myPagingProvider.setMaximumPageSize(myPreviousMaxPageSize);
}
@Override
@ -85,8 +88,85 @@ public class StressTestR4Test extends BaseResourceProviderR4Test {
FhirInstanceValidator module = new FhirInstanceValidator();
module.setValidationSupport(myValidationSupport);
myRequestValidatingInterceptor.addValidatorModule(module);
myPreviousMaxPageSize = myPagingProvider.getMaximumPageSize();
myPagingProvider.setMaximumPageSize(300);
}
@Test
public void testNoDuplicatesInSearchResults() throws Exception {
int count = 1000;
Bundle bundle = new Bundle();
for (int i = 0; i < count; i++) {
Observation o = new Observation();
o.setId("A" + leftPad(Integer.toString(i), 4, '0'));
o.setEffective( DateTimeType.now());
o.setStatus(Observation.ObservationStatus.FINAL);
bundle.addEntry().setFullUrl(o.getId()).setResource(o).getRequest().setMethod(HTTPVerb.PUT).setUrl("Observation/A" + i);
}
StopWatch sw = new StopWatch();
ourLog.info("Saving {} resources", bundle.getEntry().size());
mySystemDao.transaction(null, bundle);
ourLog.info("Saved {} resources in {}", bundle.getEntry().size(), sw.toString());
Map<String, IBaseResource> ids = new HashMap<>();
IGenericClient fhirClient = this.ourClient;
String url = ourServerBase + "/Observation?date=gt2000&_sort=-_lastUpdated";
int pageIndex = 0;
ourLog.info("Loading page {}", pageIndex);
Bundle searchResult = fhirClient
.search()
.byUrl(url)
.count(300)
.returnBundle(Bundle.class)
.execute();
while(true) {
List<String> passIds = searchResult
.getEntry()
.stream()
.map(t -> t.getResource().getIdElement().getValue())
.collect(Collectors.toList());
int index = 0;
for (String nextId : passIds) {
Resource nextResource = searchResult.getEntry().get(index).getResource();
if (ids.containsKey(nextId)) {
String previousContent = fhirClient.getFhirContext().newJsonParser().encodeResourceToString(ids.get(nextId));
String newContent = fhirClient.getFhirContext().newJsonParser().encodeResourceToString(nextResource);
throw new Exception("Duplicate ID " + nextId + " found at index " + index + " of page " + pageIndex + "\n\nPrevious: " + previousContent + "\n\nNew: " + newContent);
}
ids.put(nextId, nextResource);
index++;
}
if (searchResult.getLink(Constants.LINK_NEXT) == null) {
break;
} else {
if (searchResult.getEntry().size() != 300) {
throw new Exception("Page had " + searchResult.getEntry().size() + " resources");
}
if (passIds.size() != 300) {
throw new Exception("Page had " + passIds.size() + " unique ids");
}
}
pageIndex++;
ourLog.info("Loading page {}: {}", pageIndex, searchResult.getLink(Constants.LINK_NEXT).getUrl());
searchResult = fhirClient.loadPage().next(searchResult).execute();
}
assertEquals(count, ids.size());
}
@Test
public void testPageThroughLotsOfPages() {
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED);

View File

@ -29,6 +29,9 @@
the server will now reject requests for other resource types earlier in the processing
cycle. Thanks to Anders Havn for the suggestion!
</action>
<action type="fix">
Reference search parameters did not work via the _filter parameter
</action>
</release>
<release version="4.0.0" date="2019-08-14" description="Igloo">
<action type="add">