Avoid search failure on Oracle when performing very large includes

This commit is contained in:
James Agnew 2019-01-04 09:11:11 -05:00
parent 31b8a392b7
commit 7ba07d9f02
5 changed files with 177 additions and 49 deletions

View File

@ -0,0 +1,37 @@
package ca.uhn.fhir.jpa.config;
import net.ttddyy.dsproxy.ExecutionInfo;
import net.ttddyy.dsproxy.QueryInfo;
import net.ttddyy.dsproxy.proxy.ParameterSetOperation;
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
import org.apache.commons.lang3.Validate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
/**
* What's going on here:
* <p>
* Oracle chokes on queries that have more than 1000 parameters. We have stress tests that
* do giant queries, so this is here to cause a failure if these result in lots of
* bound parameters that would cause a failure in oracle. By default these don't cause issues
* in Derby which is why we simulate the failure using this listener.
* </p>
*/
public class BlockLargeNumbersOfParamsListener implements ProxyDataSourceBuilder.SingleQueryExecution {
private static final Logger ourLog = LoggerFactory.getLogger(BlockLargeNumbersOfParamsListener.class);
@Override
public void execute(ExecutionInfo theExecInfo, List<QueryInfo> theQueryInfoList) {
ourLog.trace("Query with {} queries", theQueryInfoList.size());
for (QueryInfo next : theQueryInfoList) {
ourLog.trace("Have {} param lists", next.getParametersList().size());
for (List<ParameterSetOperation> nextParamsList : next.getParametersList()) {
ourLog.trace("Have {} sub-param lists", nextParamsList.size());
Validate.isTrue(nextParamsList.size() < 1000, "Query has %s parameters: %s", nextParamsList.size(), next.getQuery());
}
}
}
}

View File

@ -1864,11 +1864,11 @@ public class SearchBuilder implements ISearchBuilder {
return retVal;
}
private void doLoadPids(List<IBaseResource> theResourceListToPopulate, Set<Long> theRevIncludedPids, boolean theForHistoryOperation, EntityManager entityManager, FhirContext context, IDao theDao,
Map<Long, Integer> position, Collection<Long> pids) {
private void doLoadPids(List<IBaseResource> theResourceListToPopulate, Set<Long> theIncludedPids, boolean theForHistoryOperation, EntityManager theEntityManager, FhirContext theContext, IDao theDao,
Map<Long, Integer> thePosition, Collection<Long> thePids) {
// -- get the resource from the searchView
Collection<ResourceSearchView> resourceSearchViewList = myResourceSearchViewDao.findByResourceIds(pids);
Collection<ResourceSearchView> resourceSearchViewList = myResourceSearchViewDao.findByResourceIds(thePids);
//-- preload all tags with tag definition if any
Map<Long, Collection<ResourceTag>> tagMap = getResourceTagMap(resourceSearchViewList);
@ -1876,7 +1876,7 @@ public class SearchBuilder implements ISearchBuilder {
Long resourceId;
for (ResourceSearchView next : resourceSearchViewList) {
Class<? extends IBaseResource> resourceType = context.getResourceDefinition(next.getResourceType()).getImplementingClass();
Class<? extends IBaseResource> resourceType = theContext.getResourceDefinition(next.getResourceType()).getImplementingClass();
resourceId = next.getId();
@ -1885,20 +1885,20 @@ public class SearchBuilder implements ISearchBuilder {
ourLog.warn("Unable to find resource {}/{}/_history/{} in database", next.getResourceType(), next.getIdDt().getIdPart(), next.getVersion());
continue;
}
Integer index = position.get(resourceId);
Integer index = thePosition.get(resourceId);
if (index == null) {
ourLog.warn("Got back unexpected resource PID {}", resourceId);
continue;
}
if (resource instanceof IResource) {
if (theRevIncludedPids.contains(resourceId)) {
if (theIncludedPids.contains(resourceId)) {
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IResource) resource, BundleEntrySearchModeEnum.INCLUDE);
} else {
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IResource) resource, BundleEntrySearchModeEnum.MATCH);
}
} else {
if (theRevIncludedPids.contains(resourceId)) {
if (theIncludedPids.contains(resourceId)) {
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IAnyResource) resource, BundleEntrySearchModeEnum.INCLUDE.getCode());
} else {
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IAnyResource) resource, BundleEntrySearchModeEnum.MATCH.getCode());
@ -1947,8 +1947,10 @@ public class SearchBuilder implements ISearchBuilder {
return tagMap;
}
private static final int maxLoad = 800;
@Override
public void loadResourcesByPid(Collection<Long> theIncludePids, List<IBaseResource> theResourceListToPopulate, Set<Long> theRevIncludedPids, boolean theForHistoryOperation,
public void loadResourcesByPid(Collection<Long> theIncludePids, List<IBaseResource> theResourceListToPopulate, Set<Long> theIncludedPids, boolean theForHistoryOperation,
EntityManager entityManager, FhirContext context, IDao theDao) {
if (theIncludePids.isEmpty()) {
ourLog.debug("The include pids are empty");
@ -1971,13 +1973,12 @@ public class SearchBuilder implements ISearchBuilder {
* if it's lots of IDs. I suppose maybe we should be doing this as a join anyhow
* but this should work too. Sigh.
*/
int maxLoad = 800;
List<Long> pids = new ArrayList<>(theIncludePids);
for (int i = 0; i < pids.size(); i += maxLoad) {
int to = i + maxLoad;
to = Math.min(to, pids.size());
List<Long> pidsSubList = pids.subList(i, to);
doLoadPids(theResourceListToPopulate, theRevIncludedPids, theForHistoryOperation, entityManager, context, theDao, position, pidsSubList);
doLoadPids(theResourceListToPopulate, theIncludedPids, theForHistoryOperation, entityManager, context, theDao, position, pidsSubList);
}
}
@ -2021,14 +2022,17 @@ public class SearchBuilder implements ISearchBuilder {
if (matchAll) {
String sql;
sql = "SELECT r FROM ResourceLink r WHERE r." + searchFieldName + " IN (:target_pids) ";
TypedQuery<ResourceLink> q = theEntityManager.createQuery(sql, ResourceLink.class);
q.setParameter("target_pids", nextRoundMatches);
List<ResourceLink> results = q.getResultList();
for (ResourceLink resourceLink : results) {
if (theReverseMode) {
pidsToInclude.add(resourceLink.getSourceResourcePid());
} else {
pidsToInclude.add(resourceLink.getTargetResourcePid());
List<Collection<Long>> partitions = partition(nextRoundMatches, maxLoad);
for (Collection<Long> nextPartition : partitions) {
TypedQuery<ResourceLink> q = theEntityManager.createQuery(sql, ResourceLink.class);
q.setParameter("target_pids", nextPartition);
List<ResourceLink> results = q.getResultList();
for (ResourceLink resourceLink : results) {
if (theReverseMode) {
pidsToInclude.add(resourceLink.getSourceResourcePid());
} else {
pidsToInclude.add(resourceLink.getTargetResourcePid());
}
}
}
} else {
@ -2071,25 +2075,28 @@ public class SearchBuilder implements ISearchBuilder {
sql = "SELECT r FROM ResourceLink r WHERE r.mySourcePath = :src_path AND r." + searchFieldName + " IN (:target_pids)";
}
TypedQuery<ResourceLink> q = theEntityManager.createQuery(sql, ResourceLink.class);
q.setParameter("src_path", nextPath);
q.setParameter("target_pids", nextRoundMatches);
if (targetResourceType != null) {
q.setParameter("target_resource_type", targetResourceType);
} else if (haveTargetTypesDefinedByParam) {
q.setParameter("target_resource_types", param.getTargets());
}
List<ResourceLink> results = q.getResultList();
for (ResourceLink resourceLink : results) {
if (theReverseMode) {
Long pid = resourceLink.getSourceResourcePid();
if (pid != null) {
pidsToInclude.add(pid);
}
} else {
Long pid = resourceLink.getTargetResourcePid();
if (pid != null) {
pidsToInclude.add(pid);
List<Collection<Long>> partitions = partition(nextRoundMatches, maxLoad);
for (Collection<Long> nextPartition : partitions) {
TypedQuery<ResourceLink> q = theEntityManager.createQuery(sql, ResourceLink.class);
q.setParameter("src_path", nextPath);
q.setParameter("target_pids", nextPartition);
if (targetResourceType != null) {
q.setParameter("target_resource_type", targetResourceType);
} else if (haveTargetTypesDefinedByParam) {
q.setParameter("target_resource_types", param.getTargets());
}
List<ResourceLink> results = q.getResultList();
for (ResourceLink resourceLink : results) {
if (theReverseMode) {
Long pid = resourceLink.getSourceResourcePid();
if (pid != null) {
pidsToInclude.add(pid);
}
} else {
Long pid = resourceLink.getTargetResourcePid();
if (pid != null) {
pidsToInclude.add(pid);
}
}
}
}
@ -2117,6 +2124,30 @@ public class SearchBuilder implements ISearchBuilder {
return allAdded;
}
private List<Collection<Long>> partition(Collection<Long> theNextRoundMatches, int theMaxLoad) {
if (theNextRoundMatches.size() <= theMaxLoad) {
return Collections.singletonList(theNextRoundMatches);
} else {
List<Collection<Long>> retVal = new ArrayList<>();
Collection<Long> current = null;
for (Long next : theNextRoundMatches) {
if (current == null) {
current = new ArrayList<>(theMaxLoad);
retVal.add(current);
}
current.add(next);
if (current.size() >= theMaxLoad) {
current = null;
}
}
return retVal;
}
}
private void searchForIdsWithAndOr(@Nonnull SearchParameterMap theParams) {
myParams = theParams;

View File

@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.config;
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
import ca.uhn.fhir.validation.ResultSeverityEnum;
import net.ttddyy.dsproxy.listener.SingleQueryCountHolder;
import net.ttddyy.dsproxy.listener.logging.SLF4JLogLevel;
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
import org.apache.commons.dbcp2.BasicDataSource;
import org.springframework.context.annotation.Bean;
@ -96,9 +97,10 @@ public class TestR4Config extends BaseJavaConfigR4 {
DataSource dataSource = ProxyDataSourceBuilder
.create(retVal)
// .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL")
.logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL")
// .logSlowQueryBySlf4j(10, TimeUnit.SECONDS)
// .countQuery(new ThreadQueryCountHolder())
.beforeQuery(new BlockLargeNumbersOfParamsListener())
.countQuery(singleQueryCountHolder())
.build();

View File

@ -1,21 +1,22 @@
package ca.uhn.fhir.jpa.stresstest;
import ca.uhn.fhir.jpa.provider.dstu3.BaseResourceProviderDstu3Test;
import ca.uhn.fhir.jpa.provider.r4.BaseResourceProviderR4Test;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.TestUtil;
import com.google.common.base.Charsets;
import com.google.common.collect.Lists;
import org.apache.commons.io.IOUtils;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.hl7.fhir.dstu3.hapi.validation.FhirInstanceValidator;
import org.hl7.fhir.dstu3.model.Bundle;
import org.hl7.fhir.dstu3.model.Bundle.BundleType;
import org.hl7.fhir.dstu3.model.Bundle.HTTPVerb;
import org.hl7.fhir.dstu3.model.CodeableConcept;
import org.hl7.fhir.dstu3.model.Coding;
import org.hl7.fhir.dstu3.model.Patient;
import org.hl7.fhir.r4.hapi.validation.FhirInstanceValidator;
import org.hl7.fhir.r4.model.*;
import org.hl7.fhir.r4.model.Bundle.BundleType;
import org.hl7.fhir.r4.model.Bundle.HTTPVerb;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
@ -26,11 +27,12 @@ import java.util.UUID;
import static org.junit.Assert.*;
public class StressTestDstu3Test extends BaseResourceProviderDstu3Test {
public class StressTestR4Test extends BaseResourceProviderR4Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(StressTestDstu3Test.class);
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(StressTestR4Test.class);
private RequestValidatingInterceptor myRequestValidatingInterceptor;
@Override
@After
public void after() throws Exception {
super.after();
@ -38,6 +40,7 @@ public class StressTestDstu3Test extends BaseResourceProviderDstu3Test {
ourRestServer.unregisterInterceptor(myRequestValidatingInterceptor);
}
@Override
@Before
public void before() throws Exception {
super.before();
@ -48,6 +51,56 @@ public class StressTestDstu3Test extends BaseResourceProviderDstu3Test {
myRequestValidatingInterceptor.addValidatorModule(module);
}
@Test
public void testSearchWithLargeNumberOfIncludes() {
Bundle bundle = new Bundle();
DiagnosticReport dr = new DiagnosticReport();
dr.setId(IdType.newRandomUuid());
bundle.addEntry().setFullUrl(dr.getId()).setResource(dr).getRequest().setMethod(HTTPVerb.POST).setUrl("DiagnosticReport");
for (int i = 0; i < 1200; i++) {
Observation o = new Observation();
o.setId(IdType.newRandomUuid());
o.setStatus(Observation.ObservationStatus.FINAL);
bundle.addEntry().setFullUrl(o.getId()).setResource(o).getRequest().setMethod(HTTPVerb.POST).setUrl("Observation");
dr.addResult().setReference(o.getId());
if (i == 0) {
Observation o2 = new Observation();
o2.setId(IdType.newRandomUuid());
o2.setStatus(Observation.ObservationStatus.FINAL);
bundle.addEntry().setFullUrl(o2.getId()).setResource(o2).getRequest().setMethod(HTTPVerb.POST).setUrl("Observation");
o.addHasMember(new Reference(o2.getId()));
}
}
StopWatch sw = new StopWatch();
ourLog.info("Saving {} resources", bundle.getEntry().size());
mySystemDao.transaction(null, bundle);
ourLog.info("Saved {} resources in {}", bundle.getEntry().size(), sw.toString());
// Using _include=*
SearchParameterMap map = new SearchParameterMap();
map.addInclude(IBaseResource.INCLUDE_ALL.asRecursive());
map.setLoadSynchronous(true);
IBundleProvider results = myDiagnosticReportDao.search(map);
List<IBaseResource> resultsAndIncludes = results.getResources(0, 999999);
assertEquals(1202, resultsAndIncludes.size());
// Using focused includes
map = new SearchParameterMap();
map.addInclude(DiagnosticReport.INCLUDE_RESULT.asRecursive());
map.addInclude(Observation.INCLUDE_HAS_MEMBER.asRecursive());
map.setLoadSynchronous(true);
results = myDiagnosticReportDao.search(map);
resultsAndIncludes = results.getResources(0, 999999);
assertEquals(1202, resultsAndIncludes.size());
}
@Test
public void testMultithreadedSearch() throws Exception {
Bundle input = new Bundle();
@ -213,7 +266,7 @@ public class StressTestDstu3Test extends BaseResourceProviderDstu3Test {
try {
Patient p = new Patient();
p.addIdentifier().setSystem("http://test").setValue("BAR").setType(new CodeableConcept().addCoding(new Coding().setSystem("http://foo").setCode("bar")));
p.setGender(org.hl7.fhir.dstu3.model.Enumerations.AdministrativeGender.MALE);
p.setGender(org.hl7.fhir.r4.model.Enumerations.AdministrativeGender.MALE);
ourClient.create().resource(p).execute();
ourSearchParamRegistry.forceRefresh();

View File

@ -222,6 +222,11 @@
FHIR Servers now support the HTTP HEAD method for FHIR read operations. Thanks
to GitHub user Cory00 for the pull request!
</action>
<action type="fix">
When running the JPA server on Oracle, certain search queries that return a very large number of
_included resources failed with an SQL exception stating that too many parameters were used. Search
include logic has been reworked to avoid this.
</action>
</release>
<release version="3.6.0" date="2018-11-12" description="Food">
<action type="add">