Avoid search failure on Oracle when performing very large includes

This commit is contained in:
James Agnew 2019-01-04 09:11:11 -05:00
parent 31b8a392b7
commit 7ba07d9f02
5 changed files with 177 additions and 49 deletions

View File

@ -0,0 +1,37 @@
package ca.uhn.fhir.jpa.config;
import net.ttddyy.dsproxy.ExecutionInfo;
import net.ttddyy.dsproxy.QueryInfo;
import net.ttddyy.dsproxy.proxy.ParameterSetOperation;
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
import org.apache.commons.lang3.Validate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
/**
* What's going on here:
* <p>
* Oracle chokes on queries that have more than 1000 parameters. We have stress tests that
* do giant queries, so this is here to cause a failure if these result in lots of
* bound parameters that would cause a failure in oracle. By default these don't cause issues
* in Derby which is why we simulate the failure using this listener.
* </p>
*/
public class BlockLargeNumbersOfParamsListener implements ProxyDataSourceBuilder.SingleQueryExecution {
private static final Logger ourLog = LoggerFactory.getLogger(BlockLargeNumbersOfParamsListener.class);
@Override
public void execute(ExecutionInfo theExecInfo, List<QueryInfo> theQueryInfoList) {
ourLog.trace("Query with {} queries", theQueryInfoList.size());
for (QueryInfo next : theQueryInfoList) {
ourLog.trace("Have {} param lists", next.getParametersList().size());
for (List<ParameterSetOperation> nextParamsList : next.getParametersList()) {
ourLog.trace("Have {} sub-param lists", nextParamsList.size());
Validate.isTrue(nextParamsList.size() < 1000, "Query has %s parameters: %s", nextParamsList.size(), next.getQuery());
}
}
}
}

View File

@ -1864,11 +1864,11 @@ public class SearchBuilder implements ISearchBuilder {
return retVal; return retVal;
} }
private void doLoadPids(List<IBaseResource> theResourceListToPopulate, Set<Long> theRevIncludedPids, boolean theForHistoryOperation, EntityManager entityManager, FhirContext context, IDao theDao, private void doLoadPids(List<IBaseResource> theResourceListToPopulate, Set<Long> theIncludedPids, boolean theForHistoryOperation, EntityManager theEntityManager, FhirContext theContext, IDao theDao,
Map<Long, Integer> position, Collection<Long> pids) { Map<Long, Integer> thePosition, Collection<Long> thePids) {
// -- get the resource from the searchView // -- get the resource from the searchView
Collection<ResourceSearchView> resourceSearchViewList = myResourceSearchViewDao.findByResourceIds(pids); Collection<ResourceSearchView> resourceSearchViewList = myResourceSearchViewDao.findByResourceIds(thePids);
//-- preload all tags with tag definition if any //-- preload all tags with tag definition if any
Map<Long, Collection<ResourceTag>> tagMap = getResourceTagMap(resourceSearchViewList); Map<Long, Collection<ResourceTag>> tagMap = getResourceTagMap(resourceSearchViewList);
@ -1876,7 +1876,7 @@ public class SearchBuilder implements ISearchBuilder {
Long resourceId; Long resourceId;
for (ResourceSearchView next : resourceSearchViewList) { for (ResourceSearchView next : resourceSearchViewList) {
Class<? extends IBaseResource> resourceType = context.getResourceDefinition(next.getResourceType()).getImplementingClass(); Class<? extends IBaseResource> resourceType = theContext.getResourceDefinition(next.getResourceType()).getImplementingClass();
resourceId = next.getId(); resourceId = next.getId();
@ -1885,20 +1885,20 @@ public class SearchBuilder implements ISearchBuilder {
ourLog.warn("Unable to find resource {}/{}/_history/{} in database", next.getResourceType(), next.getIdDt().getIdPart(), next.getVersion()); ourLog.warn("Unable to find resource {}/{}/_history/{} in database", next.getResourceType(), next.getIdDt().getIdPart(), next.getVersion());
continue; continue;
} }
Integer index = position.get(resourceId); Integer index = thePosition.get(resourceId);
if (index == null) { if (index == null) {
ourLog.warn("Got back unexpected resource PID {}", resourceId); ourLog.warn("Got back unexpected resource PID {}", resourceId);
continue; continue;
} }
if (resource instanceof IResource) { if (resource instanceof IResource) {
if (theRevIncludedPids.contains(resourceId)) { if (theIncludedPids.contains(resourceId)) {
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IResource) resource, BundleEntrySearchModeEnum.INCLUDE); ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IResource) resource, BundleEntrySearchModeEnum.INCLUDE);
} else { } else {
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IResource) resource, BundleEntrySearchModeEnum.MATCH); ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IResource) resource, BundleEntrySearchModeEnum.MATCH);
} }
} else { } else {
if (theRevIncludedPids.contains(resourceId)) { if (theIncludedPids.contains(resourceId)) {
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IAnyResource) resource, BundleEntrySearchModeEnum.INCLUDE.getCode()); ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IAnyResource) resource, BundleEntrySearchModeEnum.INCLUDE.getCode());
} else { } else {
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IAnyResource) resource, BundleEntrySearchModeEnum.MATCH.getCode()); ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IAnyResource) resource, BundleEntrySearchModeEnum.MATCH.getCode());
@ -1947,8 +1947,10 @@ public class SearchBuilder implements ISearchBuilder {
return tagMap; return tagMap;
} }
private static final int maxLoad = 800;
@Override @Override
public void loadResourcesByPid(Collection<Long> theIncludePids, List<IBaseResource> theResourceListToPopulate, Set<Long> theRevIncludedPids, boolean theForHistoryOperation, public void loadResourcesByPid(Collection<Long> theIncludePids, List<IBaseResource> theResourceListToPopulate, Set<Long> theIncludedPids, boolean theForHistoryOperation,
EntityManager entityManager, FhirContext context, IDao theDao) { EntityManager entityManager, FhirContext context, IDao theDao) {
if (theIncludePids.isEmpty()) { if (theIncludePids.isEmpty()) {
ourLog.debug("The include pids are empty"); ourLog.debug("The include pids are empty");
@ -1971,13 +1973,12 @@ public class SearchBuilder implements ISearchBuilder {
* if it's lots of IDs. I suppose maybe we should be doing this as a join anyhow * if it's lots of IDs. I suppose maybe we should be doing this as a join anyhow
* but this should work too. Sigh. * but this should work too. Sigh.
*/ */
int maxLoad = 800;
List<Long> pids = new ArrayList<>(theIncludePids); List<Long> pids = new ArrayList<>(theIncludePids);
for (int i = 0; i < pids.size(); i += maxLoad) { for (int i = 0; i < pids.size(); i += maxLoad) {
int to = i + maxLoad; int to = i + maxLoad;
to = Math.min(to, pids.size()); to = Math.min(to, pids.size());
List<Long> pidsSubList = pids.subList(i, to); List<Long> pidsSubList = pids.subList(i, to);
doLoadPids(theResourceListToPopulate, theRevIncludedPids, theForHistoryOperation, entityManager, context, theDao, position, pidsSubList); doLoadPids(theResourceListToPopulate, theIncludedPids, theForHistoryOperation, entityManager, context, theDao, position, pidsSubList);
} }
} }
@ -2021,14 +2022,17 @@ public class SearchBuilder implements ISearchBuilder {
if (matchAll) { if (matchAll) {
String sql; String sql;
sql = "SELECT r FROM ResourceLink r WHERE r." + searchFieldName + " IN (:target_pids) "; sql = "SELECT r FROM ResourceLink r WHERE r." + searchFieldName + " IN (:target_pids) ";
TypedQuery<ResourceLink> q = theEntityManager.createQuery(sql, ResourceLink.class); List<Collection<Long>> partitions = partition(nextRoundMatches, maxLoad);
q.setParameter("target_pids", nextRoundMatches); for (Collection<Long> nextPartition : partitions) {
List<ResourceLink> results = q.getResultList(); TypedQuery<ResourceLink> q = theEntityManager.createQuery(sql, ResourceLink.class);
for (ResourceLink resourceLink : results) { q.setParameter("target_pids", nextPartition);
if (theReverseMode) { List<ResourceLink> results = q.getResultList();
pidsToInclude.add(resourceLink.getSourceResourcePid()); for (ResourceLink resourceLink : results) {
} else { if (theReverseMode) {
pidsToInclude.add(resourceLink.getTargetResourcePid()); pidsToInclude.add(resourceLink.getSourceResourcePid());
} else {
pidsToInclude.add(resourceLink.getTargetResourcePid());
}
} }
} }
} else { } else {
@ -2071,25 +2075,28 @@ public class SearchBuilder implements ISearchBuilder {
sql = "SELECT r FROM ResourceLink r WHERE r.mySourcePath = :src_path AND r." + searchFieldName + " IN (:target_pids)"; sql = "SELECT r FROM ResourceLink r WHERE r.mySourcePath = :src_path AND r." + searchFieldName + " IN (:target_pids)";
} }
TypedQuery<ResourceLink> q = theEntityManager.createQuery(sql, ResourceLink.class); List<Collection<Long>> partitions = partition(nextRoundMatches, maxLoad);
q.setParameter("src_path", nextPath); for (Collection<Long> nextPartition : partitions) {
q.setParameter("target_pids", nextRoundMatches); TypedQuery<ResourceLink> q = theEntityManager.createQuery(sql, ResourceLink.class);
if (targetResourceType != null) { q.setParameter("src_path", nextPath);
q.setParameter("target_resource_type", targetResourceType); q.setParameter("target_pids", nextPartition);
} else if (haveTargetTypesDefinedByParam) { if (targetResourceType != null) {
q.setParameter("target_resource_types", param.getTargets()); q.setParameter("target_resource_type", targetResourceType);
} } else if (haveTargetTypesDefinedByParam) {
List<ResourceLink> results = q.getResultList(); q.setParameter("target_resource_types", param.getTargets());
for (ResourceLink resourceLink : results) { }
if (theReverseMode) { List<ResourceLink> results = q.getResultList();
Long pid = resourceLink.getSourceResourcePid(); for (ResourceLink resourceLink : results) {
if (pid != null) { if (theReverseMode) {
pidsToInclude.add(pid); Long pid = resourceLink.getSourceResourcePid();
} if (pid != null) {
} else { pidsToInclude.add(pid);
Long pid = resourceLink.getTargetResourcePid(); }
if (pid != null) { } else {
pidsToInclude.add(pid); Long pid = resourceLink.getTargetResourcePid();
if (pid != null) {
pidsToInclude.add(pid);
}
} }
} }
} }
@ -2117,6 +2124,30 @@ public class SearchBuilder implements ISearchBuilder {
return allAdded; return allAdded;
} }
private List<Collection<Long>> partition(Collection<Long> theNextRoundMatches, int theMaxLoad) {
if (theNextRoundMatches.size() <= theMaxLoad) {
return Collections.singletonList(theNextRoundMatches);
} else {
List<Collection<Long>> retVal = new ArrayList<>();
Collection<Long> current = null;
for (Long next : theNextRoundMatches) {
if (current == null) {
current = new ArrayList<>(theMaxLoad);
retVal.add(current);
}
current.add(next);
if (current.size() >= theMaxLoad) {
current = null;
}
}
return retVal;
}
}
private void searchForIdsWithAndOr(@Nonnull SearchParameterMap theParams) { private void searchForIdsWithAndOr(@Nonnull SearchParameterMap theParams) {
myParams = theParams; myParams = theParams;

View File

@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.config;
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor; import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
import ca.uhn.fhir.validation.ResultSeverityEnum; import ca.uhn.fhir.validation.ResultSeverityEnum;
import net.ttddyy.dsproxy.listener.SingleQueryCountHolder; import net.ttddyy.dsproxy.listener.SingleQueryCountHolder;
import net.ttddyy.dsproxy.listener.logging.SLF4JLogLevel;
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder; import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
import org.apache.commons.dbcp2.BasicDataSource; import org.apache.commons.dbcp2.BasicDataSource;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
@ -96,9 +97,10 @@ public class TestR4Config extends BaseJavaConfigR4 {
DataSource dataSource = ProxyDataSourceBuilder DataSource dataSource = ProxyDataSourceBuilder
.create(retVal) .create(retVal)
// .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL") .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL")
// .logSlowQueryBySlf4j(10, TimeUnit.SECONDS) // .logSlowQueryBySlf4j(10, TimeUnit.SECONDS)
// .countQuery(new ThreadQueryCountHolder()) // .countQuery(new ThreadQueryCountHolder())
.beforeQuery(new BlockLargeNumbersOfParamsListener())
.countQuery(singleQueryCountHolder()) .countQuery(singleQueryCountHolder())
.build(); .build();

View File

@ -1,21 +1,22 @@
package ca.uhn.fhir.jpa.stresstest; package ca.uhn.fhir.jpa.stresstest;
import ca.uhn.fhir.jpa.provider.dstu3.BaseResourceProviderDstu3Test; import ca.uhn.fhir.jpa.provider.r4.BaseResourceProviderR4Test;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor; import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.TestUtil; import ca.uhn.fhir.util.TestUtil;
import com.google.common.base.Charsets; import com.google.common.base.Charsets;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpGet;
import org.hl7.fhir.dstu3.hapi.validation.FhirInstanceValidator; import org.hl7.fhir.r4.hapi.validation.FhirInstanceValidator;
import org.hl7.fhir.dstu3.model.Bundle; import org.hl7.fhir.r4.model.*;
import org.hl7.fhir.dstu3.model.Bundle.BundleType; import org.hl7.fhir.r4.model.Bundle.BundleType;
import org.hl7.fhir.dstu3.model.Bundle.HTTPVerb; import org.hl7.fhir.r4.model.Bundle.HTTPVerb;
import org.hl7.fhir.dstu3.model.CodeableConcept; import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.dstu3.model.Coding;
import org.hl7.fhir.dstu3.model.Patient;
import org.junit.After; import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Before; import org.junit.Before;
@ -26,11 +27,12 @@ import java.util.UUID;
import static org.junit.Assert.*; import static org.junit.Assert.*;
public class StressTestDstu3Test extends BaseResourceProviderDstu3Test { public class StressTestR4Test extends BaseResourceProviderR4Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(StressTestDstu3Test.class); private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(StressTestR4Test.class);
private RequestValidatingInterceptor myRequestValidatingInterceptor; private RequestValidatingInterceptor myRequestValidatingInterceptor;
@Override
@After @After
public void after() throws Exception { public void after() throws Exception {
super.after(); super.after();
@ -38,6 +40,7 @@ public class StressTestDstu3Test extends BaseResourceProviderDstu3Test {
ourRestServer.unregisterInterceptor(myRequestValidatingInterceptor); ourRestServer.unregisterInterceptor(myRequestValidatingInterceptor);
} }
@Override
@Before @Before
public void before() throws Exception { public void before() throws Exception {
super.before(); super.before();
@ -48,6 +51,56 @@ public class StressTestDstu3Test extends BaseResourceProviderDstu3Test {
myRequestValidatingInterceptor.addValidatorModule(module); myRequestValidatingInterceptor.addValidatorModule(module);
} }
@Test
public void testSearchWithLargeNumberOfIncludes() {
Bundle bundle = new Bundle();
DiagnosticReport dr = new DiagnosticReport();
dr.setId(IdType.newRandomUuid());
bundle.addEntry().setFullUrl(dr.getId()).setResource(dr).getRequest().setMethod(HTTPVerb.POST).setUrl("DiagnosticReport");
for (int i = 0; i < 1200; i++) {
Observation o = new Observation();
o.setId(IdType.newRandomUuid());
o.setStatus(Observation.ObservationStatus.FINAL);
bundle.addEntry().setFullUrl(o.getId()).setResource(o).getRequest().setMethod(HTTPVerb.POST).setUrl("Observation");
dr.addResult().setReference(o.getId());
if (i == 0) {
Observation o2 = new Observation();
o2.setId(IdType.newRandomUuid());
o2.setStatus(Observation.ObservationStatus.FINAL);
bundle.addEntry().setFullUrl(o2.getId()).setResource(o2).getRequest().setMethod(HTTPVerb.POST).setUrl("Observation");
o.addHasMember(new Reference(o2.getId()));
}
}
StopWatch sw = new StopWatch();
ourLog.info("Saving {} resources", bundle.getEntry().size());
mySystemDao.transaction(null, bundle);
ourLog.info("Saved {} resources in {}", bundle.getEntry().size(), sw.toString());
// Using _include=*
SearchParameterMap map = new SearchParameterMap();
map.addInclude(IBaseResource.INCLUDE_ALL.asRecursive());
map.setLoadSynchronous(true);
IBundleProvider results = myDiagnosticReportDao.search(map);
List<IBaseResource> resultsAndIncludes = results.getResources(0, 999999);
assertEquals(1202, resultsAndIncludes.size());
// Using focused includes
map = new SearchParameterMap();
map.addInclude(DiagnosticReport.INCLUDE_RESULT.asRecursive());
map.addInclude(Observation.INCLUDE_HAS_MEMBER.asRecursive());
map.setLoadSynchronous(true);
results = myDiagnosticReportDao.search(map);
resultsAndIncludes = results.getResources(0, 999999);
assertEquals(1202, resultsAndIncludes.size());
}
@Test @Test
public void testMultithreadedSearch() throws Exception { public void testMultithreadedSearch() throws Exception {
Bundle input = new Bundle(); Bundle input = new Bundle();
@ -213,7 +266,7 @@ public class StressTestDstu3Test extends BaseResourceProviderDstu3Test {
try { try {
Patient p = new Patient(); Patient p = new Patient();
p.addIdentifier().setSystem("http://test").setValue("BAR").setType(new CodeableConcept().addCoding(new Coding().setSystem("http://foo").setCode("bar"))); p.addIdentifier().setSystem("http://test").setValue("BAR").setType(new CodeableConcept().addCoding(new Coding().setSystem("http://foo").setCode("bar")));
p.setGender(org.hl7.fhir.dstu3.model.Enumerations.AdministrativeGender.MALE); p.setGender(org.hl7.fhir.r4.model.Enumerations.AdministrativeGender.MALE);
ourClient.create().resource(p).execute(); ourClient.create().resource(p).execute();
ourSearchParamRegistry.forceRefresh(); ourSearchParamRegistry.forceRefresh();

View File

@ -222,6 +222,11 @@
FHIR Servers now support the HTTP HEAD method for FHIR read operations. Thanks FHIR Servers now support the HTTP HEAD method for FHIR read operations. Thanks
to GitHub user Cory00 for the pull request! to GitHub user Cory00 for the pull request!
</action> </action>
<action type="fix">
When running the JPA server on Oracle, certain search queries that return a very large number of
_included resources failed with an SQL exception stating that too many parameters were used. Search
include logic has been reworked to avoid this.
</action>
</release> </release>
<release version="3.6.0" date="2018-11-12" description="Food"> <release version="3.6.0" date="2018-11-12" description="Food">
<action type="add"> <action type="add">