Merge history table prefetch with resource table. (#5825)

This commit is contained in:
Michael Buckley 2024-04-04 19:53:35 -04:00 committed by GitHub
parent fcf0a85584
commit bdea4b6900
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 150 additions and 154 deletions

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 5824
jira: SMILE-7999
title: "We now avoid a query during reindex and transaction processing that was very slow on Sql Server."

View File

@ -28,7 +28,6 @@ import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
import ca.uhn.fhir.jpa.api.model.ExpungeOutcome; import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
import ca.uhn.fhir.jpa.dao.expunge.ExpungeService; import ca.uhn.fhir.jpa.dao.expunge.ExpungeService;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService; import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
@ -38,7 +37,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProviderFactory; import ca.uhn.fhir.jpa.search.PersistedJpaBundleProviderFactory;
import ca.uhn.fhir.jpa.search.builder.SearchBuilder; import ca.uhn.fhir.jpa.search.SearchConstants;
import ca.uhn.fhir.jpa.util.QueryChunker; import ca.uhn.fhir.jpa.util.QueryChunker;
import ca.uhn.fhir.jpa.util.ResourceCountCache; import ca.uhn.fhir.jpa.util.ResourceCountCache;
import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.IBundleProvider;
@ -47,34 +46,31 @@ import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException; import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException;
import ca.uhn.fhir.util.StopWatch; import ca.uhn.fhir.util.StopWatch;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import jakarta.annotation.Nonnull;
import jakarta.annotation.Nullable; import jakarta.annotation.Nullable;
import jakarta.persistence.EntityManager; import jakarta.persistence.EntityManager;
import jakarta.persistence.PersistenceContext; import jakarta.persistence.PersistenceContext;
import jakarta.persistence.PersistenceContextType; import jakarta.persistence.PersistenceContextType;
import jakarta.persistence.Query;
import jakarta.persistence.TypedQuery; import jakarta.persistence.TypedQuery;
import jakarta.persistence.criteria.CriteriaBuilder;
import jakarta.persistence.criteria.CriteriaQuery;
import jakarta.persistence.criteria.JoinType;
import jakarta.persistence.criteria.Predicate;
import jakarta.persistence.criteria.Root;
import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContext;
import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import java.util.ArrayList;
import java.util.Date; import java.util.Date;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.function.Predicate;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream;
public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends BaseStorageDao public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends BaseStorageDao
implements IFhirSystemDao<T, MT> { implements IFhirSystemDao<T, MT> {
public static final Predicate[] EMPTY_PREDICATE_ARRAY = new Predicate[0];
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiFhirSystemDao.class); private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiFhirSystemDao.class);
public ResourceCountCache myResourceCountsCache; public ResourceCountCache myResourceCountsCache;
@PersistenceContext(type = PersistenceContextType.TRANSACTION) @PersistenceContext(type = PersistenceContextType.TRANSACTION)
@ -95,9 +91,6 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
@Autowired @Autowired
private PersistedJpaBundleProviderFactory myPersistedJpaBundleProviderFactory; private PersistedJpaBundleProviderFactory myPersistedJpaBundleProviderFactory;
@Autowired
private IResourceTagDao myResourceTagDao;
@Autowired @Autowired
private IInterceptorBroadcaster myInterceptorBroadcaster; private IInterceptorBroadcaster myInterceptorBroadcaster;
@ -181,13 +174,25 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
return myTransactionProcessor.transaction(theRequestDetails, theRequest, true); return myTransactionProcessor.transaction(theRequestDetails, theRequest, true);
} }
/**
* Prefetch entities into the Hibernate session.
*
* When processing several resources (e.g. transaction bundle, $reindex chunk, etc.)
* it would be slow to fetch each piece of a resource (e.g. all token index rows)
* one resource at a time.
* Instead, we fetch all the linked resources for the entire batch and populate the Hibernate Session.
*
* @param theResolvedIds the pids
* @param thePreFetchIndexes Should resource indexes be loaded
*/
@SuppressWarnings("rawtypes")
@Override @Override
public <P extends IResourcePersistentId> void preFetchResources( public <P extends IResourcePersistentId> void preFetchResources(
List<P> theResolvedIds, boolean thePreFetchIndexes) { List<P> theResolvedIds, boolean thePreFetchIndexes) {
HapiTransactionService.requireTransaction(); HapiTransactionService.requireTransaction();
List<Long> pids = theResolvedIds.stream().map(t -> ((JpaPid) t).getId()).collect(Collectors.toList()); List<Long> pids = theResolvedIds.stream().map(t -> ((JpaPid) t).getId()).collect(Collectors.toList());
new QueryChunker<Long>().chunk(pids, ids -> { new QueryChunker<Long>().chunk(pids, idChunk -> {
/* /*
* Pre-fetch the resources we're touching in this transaction in mass - this reduced the * Pre-fetch the resources we're touching in this transaction in mass - this reduced the
@ -200,122 +205,110 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
* *
* However, for realistic average workloads, this should reduce the number of round trips. * However, for realistic average workloads, this should reduce the number of round trips.
*/ */
if (ids.size() >= 2) { if (idChunk.size() >= 2) {
List<ResourceTable> loadedResourceTableEntries = new ArrayList<>(); List<ResourceTable> entityChunk = prefetchResourceTableHistoryAndProvenance(idChunk);
new QueryChunker<Long>()
.chunk(
ids,
nextChunk ->
loadedResourceTableEntries.addAll(myResourceTableDao.findAllById(nextChunk)));
List<Long> entityIds;
if (thePreFetchIndexes) { if (thePreFetchIndexes) {
entityIds = loadedResourceTableEntries.stream()
.filter(ResourceTable::isParamsStringPopulated)
.map(ResourceTable::getId)
.collect(Collectors.toList());
if (entityIds.size() > 0) {
preFetchIndexes(entityIds, "string", "myParamsString", null);
}
entityIds = loadedResourceTableEntries.stream() prefetchByField("string", "myParamsString", ResourceTable::isParamsStringPopulated, entityChunk);
.filter(ResourceTable::isParamsTokenPopulated) prefetchByField("token", "myParamsToken", ResourceTable::isParamsTokenPopulated, entityChunk);
.map(ResourceTable::getId) prefetchByField("date", "myParamsDate", ResourceTable::isParamsDatePopulated, entityChunk);
.collect(Collectors.toList()); prefetchByField(
if (entityIds.size() > 0) { "quantity", "myParamsQuantity", ResourceTable::isParamsQuantityPopulated, entityChunk);
preFetchIndexes(entityIds, "token", "myParamsToken", null); prefetchByField("resourceLinks", "myResourceLinks", ResourceTable::isHasLinks, entityChunk);
}
entityIds = loadedResourceTableEntries.stream() prefetchByJoinClause(
.filter(ResourceTable::isParamsDatePopulated) "tags",
.map(ResourceTable::getId) // fetch the TagResources and the actual TagDefinitions
.collect(Collectors.toList()); "LEFT JOIN FETCH r.myTags t LEFT JOIN FETCH t.myTag",
if (entityIds.size() > 0) { BaseHasResource::isHasTags,
preFetchIndexes(entityIds, "date", "myParamsDate", null); entityChunk);
}
entityIds = loadedResourceTableEntries.stream()
.filter(ResourceTable::isParamsQuantityPopulated)
.map(ResourceTable::getId)
.collect(Collectors.toList());
if (entityIds.size() > 0) {
preFetchIndexes(entityIds, "quantity", "myParamsQuantity", null);
}
entityIds = loadedResourceTableEntries.stream()
.filter(ResourceTable::isHasLinks)
.map(ResourceTable::getId)
.collect(Collectors.toList());
if (entityIds.size() > 0) {
preFetchIndexes(entityIds, "resourceLinks", "myResourceLinks", null);
}
entityIds = loadedResourceTableEntries.stream()
.filter(BaseHasResource::isHasTags)
.map(ResourceTable::getId)
.collect(Collectors.toList());
if (entityIds.size() > 0) {
myResourceTagDao.findByResourceIds(entityIds);
preFetchIndexes(entityIds, "tags", "myTags", null);
}
entityIds = loadedResourceTableEntries.stream()
.map(ResourceTable::getId)
.collect(Collectors.toList());
if (myStorageSettings.getIndexMissingFields() == JpaStorageSettings.IndexEnabledEnum.ENABLED) { if (myStorageSettings.getIndexMissingFields() == JpaStorageSettings.IndexEnabledEnum.ENABLED) {
preFetchIndexes(entityIds, "searchParamPresence", "mySearchParamPresents", null); prefetchByField("searchParamPresence", "mySearchParamPresents", r -> true, entityChunk);
} }
} }
new QueryChunker<ResourceTable>()
.chunk(loadedResourceTableEntries, SearchBuilder.getMaximumPageSize() / 2, entries -> {
Map<Long, ResourceTable> entities =
entries.stream().collect(Collectors.toMap(ResourceTable::getId, t -> t));
CriteriaBuilder b = myEntityManager.getCriteriaBuilder();
CriteriaQuery<ResourceHistoryTable> q = b.createQuery(ResourceHistoryTable.class);
Root<ResourceHistoryTable> from = q.from(ResourceHistoryTable.class);
from.fetch("myProvenance", JoinType.LEFT);
List<Predicate> orPredicates = new ArrayList<>();
for (ResourceTable next : entries) {
Predicate resId = b.equal(from.get("myResourceId"), next.getId());
Predicate resVer = b.equal(from.get("myResourceVersion"), next.getVersion());
orPredicates.add(b.and(resId, resVer));
}
q.where(b.or(orPredicates.toArray(EMPTY_PREDICATE_ARRAY)));
List<ResourceHistoryTable> resultList =
myEntityManager.createQuery(q).getResultList();
for (ResourceHistoryTable next : resultList) {
ResourceTable nextEntity = entities.get(next.getResourceId());
if (nextEntity != null) {
nextEntity.setCurrentVersionEntity(next);
}
}
});
} }
}); });
} }
private void preFetchIndexes( @Nonnull
List<Long> theIds, private List<ResourceTable> prefetchResourceTableHistoryAndProvenance(List<Long> idChunk) {
String typeDesc, assert idChunk.size() < SearchConstants.MAX_PAGE_SIZE : "assume pre-chunked";
String fieldName,
@Nullable List<ResourceTable> theEntityListToPopulate) { Query query = myEntityManager.createQuery("select r, h "
new QueryChunker<Long>().chunk(theIds, ids -> { + " FROM ResourceTable r "
TypedQuery<ResourceTable> query = myEntityManager.createQuery( + " LEFT JOIN fetch ResourceHistoryTable h "
"FROM ResourceTable r LEFT JOIN FETCH r." + fieldName + " WHERE r.myId IN ( :IDS )", + " on r.myVersion = h.myResourceVersion and r.id = h.myResourceId "
ResourceTable.class); + " left join fetch h.myProvenance "
query.setParameter("IDS", ids); + " WHERE r.myId IN ( :IDS ) ");
List<ResourceTable> indexFetchOutcome = query.getResultList(); query.setParameter("IDS", idChunk);
ourLog.debug("Pre-fetched {} {}} indexes", indexFetchOutcome.size(), typeDesc);
if (theEntityListToPopulate != null) { @SuppressWarnings("unchecked")
theEntityListToPopulate.addAll(indexFetchOutcome); Stream<Object[]> queryResultStream = query.getResultStream();
} return queryResultStream
}); .map(nextPair -> {
// Store the matching ResourceHistoryTable in the transient slot on ResourceTable
ResourceTable result = (ResourceTable) nextPair[0];
ResourceHistoryTable currentVersion = (ResourceHistoryTable) nextPair[1];
result.setCurrentVersionEntity(currentVersion);
return result;
})
.collect(Collectors.toList());
}
/**
* Prefetch a join field for the active subset of some ResourceTable entities.
* Convenience wrapper around prefetchByJoinClause() for simple fields.
*
* @param theDescription for logging
* @param theJpaFieldName the join field from ResourceTable
* @param theEntityPredicate select which ResourceTable entities need this join
* @param theEntities the ResourceTable entities to consider
*/
private void prefetchByField(
String theDescription,
String theJpaFieldName,
Predicate<ResourceTable> theEntityPredicate,
List<ResourceTable> theEntities) {
String joinClause = "LEFT JOIN FETCH r." + theJpaFieldName;
prefetchByJoinClause(theDescription, joinClause, theEntityPredicate, theEntities);
}
/**
* Prefetch a join field for the active subset of some ResourceTable entities.
*
* @param theDescription for logging
* @param theJoinClause the JPA join expression to add to `ResourceTable r`
* @param theEntityPredicate selects which entities need this prefetch
* @param theEntities the ResourceTable entities to consider
*/
private void prefetchByJoinClause(
String theDescription,
String theJoinClause,
Predicate<ResourceTable> theEntityPredicate,
List<ResourceTable> theEntities) {
// Which entities need this prefetch?
List<Long> idSubset = theEntities.stream()
.filter(theEntityPredicate)
.map(ResourceTable::getId)
.collect(Collectors.toList());
if (idSubset.isEmpty()) {
// nothing to do
return;
}
String jqlQuery = "FROM ResourceTable r " + theJoinClause + " WHERE r.myId IN ( :IDS )";
TypedQuery<ResourceTable> query = myEntityManager.createQuery(jqlQuery, ResourceTable.class);
query.setParameter("IDS", idSubset);
List<ResourceTable> indexFetchOutcome = query.getResultList();
ourLog.debug("Pre-fetched {} {} indexes", indexFetchOutcome.size(), theDescription);
} }
@Nullable @Nullable

View File

@ -3,7 +3,6 @@ package ca.uhn.fhir.jpa.bulk.imprt2;
import ca.uhn.fhir.batch2.api.JobExecutionFailedException; import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
import ca.uhn.fhir.batch2.jobs.imprt.ConsumeFilesStep; import ca.uhn.fhir.batch2.jobs.imprt.ConsumeFilesStep;
import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.dao.r4.BasePartitioningR4Test; import ca.uhn.fhir.jpa.dao.r4.BasePartitioningR4Test;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r4.model.IdType; import org.hl7.fhir.r4.model.IdType;
@ -84,7 +83,7 @@ public class ConsumeFilesStepR4Test extends BasePartitioningR4Test {
// Validate // Validate
assertEquals(7, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread(), myCaptureQueriesListener.getInsertQueriesForCurrentThread().stream().map(t->t.getSql(true, false)).collect(Collectors.joining("\n"))); assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread(), myCaptureQueriesListener.getInsertQueriesForCurrentThread().stream().map(t->t.getSql(true, false)).collect(Collectors.joining("\n")));
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
@ -145,9 +144,9 @@ public class ConsumeFilesStepR4Test extends BasePartitioningR4Test {
// Validate // Validate
if (partitionEnabled) { if (partitionEnabled) {
assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
} else {
assertEquals(7, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(7, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
} else {
assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
} }
assertEquals(2, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(2, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(4, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(4, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());

View File

@ -149,7 +149,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
@Autowired @Autowired
private ISubscriptionTriggeringSvc mySubscriptionTriggeringSvc; private ISubscriptionTriggeringSvc mySubscriptionTriggeringSvc;
@Autowired @Autowired
private ResourceModifiedSubmitterSvc myResourceModifiedSubmitterSvc;; private ResourceModifiedSubmitterSvc myResourceModifiedSubmitterSvc;
@Autowired @Autowired
private ReindexStep myReindexStep; private ReindexStep myReindexStep;
@Autowired @Autowired
@ -827,7 +827,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
DeleteMethodOutcome outcome = myPatientDao.deleteByUrl("Patient?active=true", new SystemRequestDetails()); DeleteMethodOutcome outcome = myPatientDao.deleteByUrl("Patient?active=true", new SystemRequestDetails());
// Validate // Validate
assertEquals(13, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(12, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(10, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(10, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
assertEquals(10, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(10, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(30, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); assertEquals(30, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
@ -1026,10 +1026,10 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
@ParameterizedTest @ParameterizedTest
@CsvSource({ @CsvSource({
// OptimisticLock OptimizeMode ExpectedSelect ExpectedUpdate // OptimisticLock OptimizeMode ExpectedSelect ExpectedUpdate
" false, CURRENT_VERSION, 2, 0", " false, CURRENT_VERSION, 1, 0",
" true, CURRENT_VERSION, 12, 0", " true, CURRENT_VERSION, 11, 0",
" false, ALL_VERSIONS, 12, 0", " false, ALL_VERSIONS, 11, 0",
" true, ALL_VERSIONS, 22, 0", " true, ALL_VERSIONS, 21, 0",
}) })
public void testReindexJob_OptimizeStorage(boolean theOptimisticLock, ReindexParameters.OptimizeStorageModeEnum theOptimizeStorageModeEnum, int theExpectedSelectCount, int theExpectedUpdateCount) { public void testReindexJob_OptimizeStorage(boolean theOptimisticLock, ReindexParameters.OptimizeStorageModeEnum theOptimizeStorageModeEnum, int theExpectedSelectCount, int theExpectedUpdateCount) {
// Setup // Setup
@ -1841,7 +1841,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
outcome = mySystemDao.transaction(mySrd, input.get()); outcome = mySystemDao.transaction(mySrd, input.get());
ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
myCaptureQueriesListener.logSelectQueries(); myCaptureQueriesListener.logSelectQueries();
assertEquals(5, myCaptureQueriesListener.countSelectQueries()); assertEquals(4, myCaptureQueriesListener.countSelectQueries());
myCaptureQueriesListener.logInsertQueries(); myCaptureQueriesListener.logInsertQueries();
assertEquals(2, myCaptureQueriesListener.countInsertQueries()); assertEquals(2, myCaptureQueriesListener.countInsertQueries());
myCaptureQueriesListener.logUpdateQueries(); myCaptureQueriesListener.logUpdateQueries();
@ -1857,7 +1857,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
outcome = mySystemDao.transaction(mySrd, input.get()); outcome = mySystemDao.transaction(mySrd, input.get());
ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
myCaptureQueriesListener.logSelectQueries(); myCaptureQueriesListener.logSelectQueries();
assertEquals(5, myCaptureQueriesListener.countSelectQueries()); assertEquals(4, myCaptureQueriesListener.countSelectQueries());
myCaptureQueriesListener.logInsertQueries(); myCaptureQueriesListener.logInsertQueries();
assertEquals(2, myCaptureQueriesListener.countInsertQueries()); assertEquals(2, myCaptureQueriesListener.countInsertQueries());
myCaptureQueriesListener.logUpdateQueries(); myCaptureQueriesListener.logUpdateQueries();
@ -1927,7 +1927,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
outcome = mySystemDao.transaction(mySrd, input.get()); outcome = mySystemDao.transaction(mySrd, input.get());
ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
myCaptureQueriesListener.logSelectQueries(); myCaptureQueriesListener.logSelectQueries();
assertEquals(9, myCaptureQueriesListener.countSelectQueries()); assertEquals(7, myCaptureQueriesListener.countSelectQueries());
myCaptureQueriesListener.logInsertQueries(); myCaptureQueriesListener.logInsertQueries();
assertEquals(7, myCaptureQueriesListener.countInsertQueries()); assertEquals(7, myCaptureQueriesListener.countInsertQueries());
myCaptureQueriesListener.logUpdateQueries(); myCaptureQueriesListener.logUpdateQueries();
@ -1943,7 +1943,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
outcome = mySystemDao.transaction(mySrd, input.get()); outcome = mySystemDao.transaction(mySrd, input.get());
ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
myCaptureQueriesListener.logSelectQueries(); myCaptureQueriesListener.logSelectQueries();
assertEquals(7, myCaptureQueriesListener.countSelectQueries()); assertEquals(5, myCaptureQueriesListener.countSelectQueries());
myCaptureQueriesListener.logInsertQueries(); myCaptureQueriesListener.logInsertQueries();
assertEquals(5, myCaptureQueriesListener.countInsertQueries()); assertEquals(5, myCaptureQueriesListener.countInsertQueries());
myCaptureQueriesListener.logUpdateQueries(); myCaptureQueriesListener.logUpdateQueries();
@ -2239,7 +2239,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
outcome = mySystemDao.transaction(mySrd, input.get()); outcome = mySystemDao.transaction(mySrd, input.get());
ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
myCaptureQueriesListener.logSelectQueries(); myCaptureQueriesListener.logSelectQueries();
assertEquals(9, myCaptureQueriesListener.countSelectQueries()); assertEquals(8, myCaptureQueriesListener.countSelectQueries());
myCaptureQueriesListener.logInsertQueries(); myCaptureQueriesListener.logInsertQueries();
assertEquals(4, myCaptureQueriesListener.countInsertQueries()); assertEquals(4, myCaptureQueriesListener.countInsertQueries());
myCaptureQueriesListener.logUpdateQueries(); myCaptureQueriesListener.logUpdateQueries();
@ -2256,7 +2256,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
outcome = mySystemDao.transaction(mySrd, input.get()); outcome = mySystemDao.transaction(mySrd, input.get());
ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
myCaptureQueriesListener.logSelectQueries(); myCaptureQueriesListener.logSelectQueries();
assertEquals(8, myCaptureQueriesListener.countSelectQueries()); assertEquals(7, myCaptureQueriesListener.countSelectQueries());
myCaptureQueriesListener.logInsertQueries(); myCaptureQueriesListener.logInsertQueries();
assertEquals(4, myCaptureQueriesListener.countInsertQueries()); assertEquals(4, myCaptureQueriesListener.countInsertQueries());
myCaptureQueriesListener.logUpdateQueries(); myCaptureQueriesListener.logUpdateQueries();
@ -2271,7 +2271,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
outcome = mySystemDao.transaction(mySrd, input.get()); outcome = mySystemDao.transaction(mySrd, input.get());
ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
myCaptureQueriesListener.logSelectQueries(); myCaptureQueriesListener.logSelectQueries();
assertEquals(6, myCaptureQueriesListener.countSelectQueries()); assertEquals(5, myCaptureQueriesListener.countSelectQueries());
myCaptureQueriesListener.logInsertQueries(); myCaptureQueriesListener.logInsertQueries();
assertEquals(4, myCaptureQueriesListener.countInsertQueries()); assertEquals(4, myCaptureQueriesListener.countInsertQueries());
myCaptureQueriesListener.logUpdateQueries(); myCaptureQueriesListener.logUpdateQueries();
@ -3365,7 +3365,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
myCaptureQueriesListener.clear(); myCaptureQueriesListener.clear();
Bundle outcome = mySystemDao.transaction(new SystemRequestDetails(), supplier.get()); Bundle outcome = mySystemDao.transaction(new SystemRequestDetails(), supplier.get());
assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
myCaptureQueriesListener.logInsertQueries(); myCaptureQueriesListener.logInsertQueries();
assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(6, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(6, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
@ -3388,7 +3388,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
myCaptureQueriesListener.clear(); myCaptureQueriesListener.clear();
outcome = mySystemDao.transaction(new SystemRequestDetails(), supplier.get()); outcome = mySystemDao.transaction(new SystemRequestDetails(), supplier.get());
assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
myCaptureQueriesListener.logInsertQueries(); myCaptureQueriesListener.logInsertQueries();
assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(6, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(6, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
@ -3449,7 +3449,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
myCaptureQueriesListener.clear(); myCaptureQueriesListener.clear();
mySystemDao.transaction(new SystemRequestDetails(), loadResourceFromClasspath(Bundle.class, "r4/transaction-perf-bundle-smallchanges.json")); mySystemDao.transaction(new SystemRequestDetails(), loadResourceFromClasspath(Bundle.class, "r4/transaction-perf-bundle-smallchanges.json"));
myCaptureQueriesListener.logSelectQueriesForCurrentThread(); myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(2, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(2, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(5, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(5, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());

View File

@ -2842,7 +2842,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
outcome = mySystemDao.transaction(mySrd, input.get()); outcome = mySystemDao.transaction(mySrd, input.get());
ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
myCaptureQueriesListener.logSelectQueriesForCurrentThread(); myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(9, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
myCaptureQueriesListener.logInsertQueriesForCurrentThread(); myCaptureQueriesListener.logInsertQueriesForCurrentThread();
assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
myCaptureQueriesListener.logUpdateQueriesForCurrentThread(); myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
@ -2859,7 +2859,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
outcome = mySystemDao.transaction(mySrd, input.get()); outcome = mySystemDao.transaction(mySrd, input.get());
ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
myCaptureQueriesListener.logSelectQueriesForCurrentThread(); myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(7, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
myCaptureQueriesListener.logInsertQueriesForCurrentThread(); myCaptureQueriesListener.logInsertQueriesForCurrentThread();
assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
myCaptureQueriesListener.logUpdateQueriesForCurrentThread(); myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
@ -2874,7 +2874,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
outcome = mySystemDao.transaction(mySrd, input.get()); outcome = mySystemDao.transaction(mySrd, input.get());
ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
myCaptureQueriesListener.logSelectQueriesForCurrentThread(); myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(5, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
myCaptureQueriesListener.logInsertQueriesForCurrentThread(); myCaptureQueriesListener.logInsertQueriesForCurrentThread();
assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
myCaptureQueriesListener.logUpdateQueriesForCurrentThread(); myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
@ -2924,7 +2924,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
output = mySystemDao.transaction(requestDetails, input); output = mySystemDao.transaction(requestDetails, input);
myCaptureQueriesListener.logSelectQueries(); myCaptureQueriesListener.logSelectQueries();
assertEquals(29, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(26, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());

View File

@ -64,7 +64,7 @@ public class ReindexStepTest extends BaseJpaR4Test {
// Verify // Verify
assertEquals(2, outcome.getRecordsProcessed()); assertEquals(2, outcome.getRecordsProcessed());
assertEquals(6, myCaptureQueriesListener.logSelectQueries().size()); assertEquals(5, myCaptureQueriesListener.logSelectQueries().size());
assertEquals(0, myCaptureQueriesListener.countInsertQueries()); assertEquals(0, myCaptureQueriesListener.countInsertQueries());
myCaptureQueriesListener.logUpdateQueries(); myCaptureQueriesListener.logUpdateQueries();
assertEquals(0, myCaptureQueriesListener.countUpdateQueries()); assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
@ -95,7 +95,7 @@ public class ReindexStepTest extends BaseJpaR4Test {
// Verify // Verify
assertEquals(2, outcome.getRecordsProcessed()); assertEquals(2, outcome.getRecordsProcessed());
assertEquals(8, myCaptureQueriesListener.logSelectQueries().size()); assertEquals(7, myCaptureQueriesListener.logSelectQueries().size());
assertEquals(0, myCaptureQueriesListener.countInsertQueries()); assertEquals(0, myCaptureQueriesListener.countInsertQueries());
assertEquals(0, myCaptureQueriesListener.countUpdateQueries()); assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
assertEquals(0, myCaptureQueriesListener.countDeleteQueries()); assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
@ -128,7 +128,7 @@ public class ReindexStepTest extends BaseJpaR4Test {
// Verify // Verify
assertEquals(2, outcome.getRecordsProcessed()); assertEquals(2, outcome.getRecordsProcessed());
assertEquals(6, myCaptureQueriesListener.logSelectQueries().size()); assertEquals(5, myCaptureQueriesListener.logSelectQueries().size());
// name, family, phonetic, deceased, active // name, family, phonetic, deceased, active
assertEquals(5, myCaptureQueriesListener.countInsertQueries()); assertEquals(5, myCaptureQueriesListener.countInsertQueries());
assertEquals(0, myCaptureQueriesListener.countUpdateQueries()); assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
@ -196,7 +196,7 @@ public class ReindexStepTest extends BaseJpaR4Test {
// Verify // Verify
assertEquals(2, outcome.getRecordsProcessed()); assertEquals(2, outcome.getRecordsProcessed());
assertEquals(10, myCaptureQueriesListener.logSelectQueries().size()); assertEquals(9, myCaptureQueriesListener.logSelectQueries().size());
assertEquals(0, myCaptureQueriesListener.countInsertQueries()); assertEquals(0, myCaptureQueriesListener.countInsertQueries());
assertEquals(4, myCaptureQueriesListener.countUpdateQueries()); assertEquals(4, myCaptureQueriesListener.countUpdateQueries());
assertEquals(0, myCaptureQueriesListener.countDeleteQueries()); assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
@ -241,7 +241,7 @@ public class ReindexStepTest extends BaseJpaR4Test {
// Verify // Verify
assertEquals(4, outcome.getRecordsProcessed()); assertEquals(4, outcome.getRecordsProcessed());
assertEquals(9, myCaptureQueriesListener.logSelectQueries().size()); assertEquals(8, myCaptureQueriesListener.logSelectQueries().size());
assertEquals(5, myCaptureQueriesListener.countInsertQueries()); assertEquals(5, myCaptureQueriesListener.countInsertQueries());
assertEquals(2, myCaptureQueriesListener.countUpdateQueries()); assertEquals(2, myCaptureQueriesListener.countUpdateQueries());
assertEquals(0, myCaptureQueriesListener.countDeleteQueries()); assertEquals(0, myCaptureQueriesListener.countDeleteQueries());

View File

@ -4,6 +4,7 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.util.BundleBuilder; import ca.uhn.fhir.util.BundleBuilder;
import jakarta.annotation.Nonnull;
import org.hl7.fhir.r5.model.BooleanType; import org.hl7.fhir.r5.model.BooleanType;
import org.hl7.fhir.r5.model.Bundle; import org.hl7.fhir.r5.model.Bundle;
import org.hl7.fhir.r5.model.CodeType; import org.hl7.fhir.r5.model.CodeType;
@ -20,7 +21,6 @@ import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource; import org.junit.jupiter.params.provider.CsvSource;
import org.junit.jupiter.params.provider.ValueSource; import org.junit.jupiter.params.provider.ValueSource;
import jakarta.annotation.Nonnull;
import java.io.IOException; import java.io.IOException;
import java.util.UUID; import java.util.UUID;
@ -28,7 +28,6 @@ import static org.apache.commons.lang3.StringUtils.countMatches;
import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.endsWith;
import static org.hamcrest.Matchers.in;
import static org.hamcrest.Matchers.matchesPattern; import static org.hamcrest.Matchers.matchesPattern;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertNotEquals;
@ -151,7 +150,7 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test {
// Verify // Verify
assertEquals(theMatchUrlCacheEnabled ? 4 : 5, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(theMatchUrlCacheEnabled ? 3 : 4, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
@ -203,7 +202,7 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test {
// Verify // Verify
assertEquals(theMatchUrlCacheEnabled ? 4 : 5, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(theMatchUrlCacheEnabled ? 3 : 4, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());

View File

@ -401,7 +401,7 @@ public class UpliftedRefchainsAndChainedSortingR5Test extends BaseJpaR5Test {
// 1- Resolve resource forced IDs, and 2- Resolve Practitioner/PR1 reference // 1- Resolve resource forced IDs, and 2- Resolve Practitioner/PR1 reference
myCaptureQueriesListener.logSelectQueriesForCurrentThread(); myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(10, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(9, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
// Verify correct indexes are written // Verify correct indexes are written
@ -441,7 +441,7 @@ public class UpliftedRefchainsAndChainedSortingR5Test extends BaseJpaR5Test {
bb.addTransactionUpdateEntry(newEncounter(ENCOUNTER_E2, p2Id)); bb.addTransactionUpdateEntry(newEncounter(ENCOUNTER_E2, p2Id));
bb.addTransactionCreateEntry(newPatientP1_HomerSimpson().setId(p1Id)).conditional("identifier=http://system|200"); bb.addTransactionCreateEntry(newPatientP1_HomerSimpson().setId(p1Id)).conditional("identifier=http://system|200");
bb.addTransactionCreateEntry(newPatientP2_MargeSimpson().setId(p2Id)).conditional("identifier=http://system|300"); bb.addTransactionCreateEntry(newPatientP2_MargeSimpson().setId(p2Id)).conditional("identifier=http://system|300");
;
Bundle requestBundle = bb.getBundleTyped(); Bundle requestBundle = bb.getBundleTyped();
myCaptureQueriesListener.clear(); myCaptureQueriesListener.clear();
@ -496,7 +496,7 @@ public class UpliftedRefchainsAndChainedSortingR5Test extends BaseJpaR5Test {
bb.addTransactionUpdateEntry(newEncounter(ENCOUNTER_E2, p2Id)); bb.addTransactionUpdateEntry(newEncounter(ENCOUNTER_E2, p2Id));
bb.addTransactionCreateEntry(new Patient().addIdentifier(new Identifier().setSystem("http://system").setValue("200")).setId(p1Id)).conditional("identifier=http://system|200"); bb.addTransactionCreateEntry(new Patient().addIdentifier(new Identifier().setSystem("http://system").setValue("200")).setId(p1Id)).conditional("identifier=http://system|200");
bb.addTransactionCreateEntry(new Patient().addIdentifier(new Identifier().setSystem("http://system").setValue("300")).setId(p2Id)).conditional("identifier=http://system|300"); bb.addTransactionCreateEntry(new Patient().addIdentifier(new Identifier().setSystem("http://system").setValue("300")).setId(p2Id)).conditional("identifier=http://system|300");
;
Bundle requestBundle = bb.getBundleTyped(); Bundle requestBundle = bb.getBundleTyped();
myCaptureQueriesListener.clear(); myCaptureQueriesListener.clear();