From bdea4b6900b51c85d4ffb9511ba6a04a3b42b91e Mon Sep 17 00:00:00 2001 From: Michael Buckley Date: Thu, 4 Apr 2024 19:53:35 -0400 Subject: [PATCH] Merge history table prefetch with resource table. (#5825) --- .../7_2_0/5824-fix-history-prefetch.yaml | 5 + .../fhir/jpa/dao/BaseHapiFhirSystemDao.java | 229 +++++++++--------- .../bulk/imprt2/ConsumeFilesStepR4Test.java | 7 +- .../r4/FhirResourceDaoR4QueryCountTest.java | 32 +-- .../jpa/dao/r4/PartitioningSqlR4Test.java | 8 +- .../uhn/fhir/jpa/reindex/ReindexStepTest.java | 10 +- .../r5/FhirSystemDaoTransactionR5Test.java | 7 +- ...iftedRefchainsAndChainedSortingR5Test.java | 6 +- 8 files changed, 150 insertions(+), 154 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5824-fix-history-prefetch.yaml diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5824-fix-history-prefetch.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5824-fix-history-prefetch.yaml new file mode 100644 index 00000000000..f1a9dd7f3be --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/5824-fix-history-prefetch.yaml @@ -0,0 +1,5 @@ +--- +type: fix +issue: 5824 +jira: SMILE-7999 +title: "We now avoid a query during reindex and transaction processing that was very slow on Sql Server." diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java index 951045de041..e1f7749801a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java @@ -28,7 +28,6 @@ import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.api.model.ExpungeOutcome; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; -import ca.uhn.fhir.jpa.dao.data.IResourceTagDao; import ca.uhn.fhir.jpa.dao.expunge.ExpungeService; import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService; @@ -38,7 +37,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; import ca.uhn.fhir.jpa.search.PersistedJpaBundleProviderFactory; -import ca.uhn.fhir.jpa.search.builder.SearchBuilder; +import ca.uhn.fhir.jpa.search.SearchConstants; import ca.uhn.fhir.jpa.util.QueryChunker; import ca.uhn.fhir.jpa.util.ResourceCountCache; import ca.uhn.fhir.rest.api.server.IBundleProvider; @@ -47,34 +46,31 @@ import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException; import ca.uhn.fhir.util.StopWatch; import com.google.common.annotations.VisibleForTesting; +import jakarta.annotation.Nonnull; import jakarta.annotation.Nullable; import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; import jakarta.persistence.PersistenceContextType; +import jakarta.persistence.Query; import jakarta.persistence.TypedQuery; -import jakarta.persistence.criteria.CriteriaBuilder; -import jakarta.persistence.criteria.CriteriaQuery; -import jakarta.persistence.criteria.JoinType; -import jakarta.persistence.criteria.Predicate; -import jakarta.persistence.criteria.Root; import org.hl7.fhir.instance.model.api.IBaseBundle; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; -import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.Predicate; import java.util.stream.Collectors; +import java.util.stream.Stream; public abstract class BaseHapiFhirSystemDao extends BaseStorageDao implements IFhirSystemDao { - - public static final Predicate[] EMPTY_PREDICATE_ARRAY = new Predicate[0]; private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiFhirSystemDao.class); + public ResourceCountCache myResourceCountsCache; @PersistenceContext(type = PersistenceContextType.TRANSACTION) @@ -95,9 +91,6 @@ public abstract class BaseHapiFhirSystemDao extends B @Autowired private PersistedJpaBundleProviderFactory myPersistedJpaBundleProviderFactory; - @Autowired - private IResourceTagDao myResourceTagDao; - @Autowired private IInterceptorBroadcaster myInterceptorBroadcaster; @@ -181,13 +174,25 @@ public abstract class BaseHapiFhirSystemDao extends B return myTransactionProcessor.transaction(theRequestDetails, theRequest, true); } + /** + * Prefetch entities into the Hibernate session. + * + * When processing several resources (e.g. transaction bundle, $reindex chunk, etc.) + * it would be slow to fetch each piece of a resource (e.g. all token index rows) + * one resource at a time. + * Instead, we fetch all the linked resources for the entire batch and populate the Hibernate Session. + * + * @param theResolvedIds the pids + * @param thePreFetchIndexes Should resource indexes be loaded + */ + @SuppressWarnings("rawtypes") @Override public

void preFetchResources( List

theResolvedIds, boolean thePreFetchIndexes) { HapiTransactionService.requireTransaction(); List pids = theResolvedIds.stream().map(t -> ((JpaPid) t).getId()).collect(Collectors.toList()); - new QueryChunker().chunk(pids, ids -> { + new QueryChunker().chunk(pids, idChunk -> { /* * Pre-fetch the resources we're touching in this transaction in mass - this reduced the @@ -200,122 +205,110 @@ public abstract class BaseHapiFhirSystemDao extends B * * However, for realistic average workloads, this should reduce the number of round trips. */ - if (ids.size() >= 2) { - List loadedResourceTableEntries = new ArrayList<>(); - - new QueryChunker() - .chunk( - ids, - nextChunk -> - loadedResourceTableEntries.addAll(myResourceTableDao.findAllById(nextChunk))); - - List entityIds; + if (idChunk.size() >= 2) { + List entityChunk = prefetchResourceTableHistoryAndProvenance(idChunk); if (thePreFetchIndexes) { - entityIds = loadedResourceTableEntries.stream() - .filter(ResourceTable::isParamsStringPopulated) - .map(ResourceTable::getId) - .collect(Collectors.toList()); - if (entityIds.size() > 0) { - preFetchIndexes(entityIds, "string", "myParamsString", null); - } - entityIds = loadedResourceTableEntries.stream() - .filter(ResourceTable::isParamsTokenPopulated) - .map(ResourceTable::getId) - .collect(Collectors.toList()); - if (entityIds.size() > 0) { - preFetchIndexes(entityIds, "token", "myParamsToken", null); - } + prefetchByField("string", "myParamsString", ResourceTable::isParamsStringPopulated, entityChunk); + prefetchByField("token", "myParamsToken", ResourceTable::isParamsTokenPopulated, entityChunk); + prefetchByField("date", "myParamsDate", ResourceTable::isParamsDatePopulated, entityChunk); + prefetchByField( + "quantity", "myParamsQuantity", ResourceTable::isParamsQuantityPopulated, entityChunk); + prefetchByField("resourceLinks", "myResourceLinks", ResourceTable::isHasLinks, entityChunk); - entityIds = loadedResourceTableEntries.stream() - .filter(ResourceTable::isParamsDatePopulated) - .map(ResourceTable::getId) - .collect(Collectors.toList()); - if (entityIds.size() > 0) { - preFetchIndexes(entityIds, "date", "myParamsDate", null); - } + prefetchByJoinClause( + "tags", + // fetch the TagResources and the actual TagDefinitions + "LEFT JOIN FETCH r.myTags t LEFT JOIN FETCH t.myTag", + BaseHasResource::isHasTags, + entityChunk); - entityIds = loadedResourceTableEntries.stream() - .filter(ResourceTable::isParamsQuantityPopulated) - .map(ResourceTable::getId) - .collect(Collectors.toList()); - if (entityIds.size() > 0) { - preFetchIndexes(entityIds, "quantity", "myParamsQuantity", null); - } - - entityIds = loadedResourceTableEntries.stream() - .filter(ResourceTable::isHasLinks) - .map(ResourceTable::getId) - .collect(Collectors.toList()); - if (entityIds.size() > 0) { - preFetchIndexes(entityIds, "resourceLinks", "myResourceLinks", null); - } - - entityIds = loadedResourceTableEntries.stream() - .filter(BaseHasResource::isHasTags) - .map(ResourceTable::getId) - .collect(Collectors.toList()); - if (entityIds.size() > 0) { - myResourceTagDao.findByResourceIds(entityIds); - preFetchIndexes(entityIds, "tags", "myTags", null); - } - - entityIds = loadedResourceTableEntries.stream() - .map(ResourceTable::getId) - .collect(Collectors.toList()); if (myStorageSettings.getIndexMissingFields() == JpaStorageSettings.IndexEnabledEnum.ENABLED) { - preFetchIndexes(entityIds, "searchParamPresence", "mySearchParamPresents", null); + prefetchByField("searchParamPresence", "mySearchParamPresents", r -> true, entityChunk); } } - - new QueryChunker() - .chunk(loadedResourceTableEntries, SearchBuilder.getMaximumPageSize() / 2, entries -> { - Map entities = - entries.stream().collect(Collectors.toMap(ResourceTable::getId, t -> t)); - - CriteriaBuilder b = myEntityManager.getCriteriaBuilder(); - CriteriaQuery q = b.createQuery(ResourceHistoryTable.class); - Root from = q.from(ResourceHistoryTable.class); - - from.fetch("myProvenance", JoinType.LEFT); - - List orPredicates = new ArrayList<>(); - for (ResourceTable next : entries) { - Predicate resId = b.equal(from.get("myResourceId"), next.getId()); - Predicate resVer = b.equal(from.get("myResourceVersion"), next.getVersion()); - orPredicates.add(b.and(resId, resVer)); - } - q.where(b.or(orPredicates.toArray(EMPTY_PREDICATE_ARRAY))); - List resultList = - myEntityManager.createQuery(q).getResultList(); - for (ResourceHistoryTable next : resultList) { - ResourceTable nextEntity = entities.get(next.getResourceId()); - if (nextEntity != null) { - nextEntity.setCurrentVersionEntity(next); - } - } - }); } }); } - private void preFetchIndexes( - List theIds, - String typeDesc, - String fieldName, - @Nullable List theEntityListToPopulate) { - new QueryChunker().chunk(theIds, ids -> { - TypedQuery query = myEntityManager.createQuery( - "FROM ResourceTable r LEFT JOIN FETCH r." + fieldName + " WHERE r.myId IN ( :IDS )", - ResourceTable.class); - query.setParameter("IDS", ids); - List indexFetchOutcome = query.getResultList(); - ourLog.debug("Pre-fetched {} {}} indexes", indexFetchOutcome.size(), typeDesc); - if (theEntityListToPopulate != null) { - theEntityListToPopulate.addAll(indexFetchOutcome); - } - }); + @Nonnull + private List prefetchResourceTableHistoryAndProvenance(List idChunk) { + assert idChunk.size() < SearchConstants.MAX_PAGE_SIZE : "assume pre-chunked"; + + Query query = myEntityManager.createQuery("select r, h " + + " FROM ResourceTable r " + + " LEFT JOIN fetch ResourceHistoryTable h " + + " on r.myVersion = h.myResourceVersion and r.id = h.myResourceId " + + " left join fetch h.myProvenance " + + " WHERE r.myId IN ( :IDS ) "); + query.setParameter("IDS", idChunk); + + @SuppressWarnings("unchecked") + Stream queryResultStream = query.getResultStream(); + return queryResultStream + .map(nextPair -> { + // Store the matching ResourceHistoryTable in the transient slot on ResourceTable + ResourceTable result = (ResourceTable) nextPair[0]; + ResourceHistoryTable currentVersion = (ResourceHistoryTable) nextPair[1]; + result.setCurrentVersionEntity(currentVersion); + return result; + }) + .collect(Collectors.toList()); + } + + /** + * Prefetch a join field for the active subset of some ResourceTable entities. + * Convenience wrapper around prefetchByJoinClause() for simple fields. + * + * @param theDescription for logging + * @param theJpaFieldName the join field from ResourceTable + * @param theEntityPredicate select which ResourceTable entities need this join + * @param theEntities the ResourceTable entities to consider + */ + private void prefetchByField( + String theDescription, + String theJpaFieldName, + Predicate theEntityPredicate, + List theEntities) { + + String joinClause = "LEFT JOIN FETCH r." + theJpaFieldName; + + prefetchByJoinClause(theDescription, joinClause, theEntityPredicate, theEntities); + } + + /** + * Prefetch a join field for the active subset of some ResourceTable entities. + * + * @param theDescription for logging + * @param theJoinClause the JPA join expression to add to `ResourceTable r` + * @param theEntityPredicate selects which entities need this prefetch + * @param theEntities the ResourceTable entities to consider + */ + private void prefetchByJoinClause( + String theDescription, + String theJoinClause, + Predicate theEntityPredicate, + List theEntities) { + + // Which entities need this prefetch? + List idSubset = theEntities.stream() + .filter(theEntityPredicate) + .map(ResourceTable::getId) + .collect(Collectors.toList()); + + if (idSubset.isEmpty()) { + // nothing to do + return; + } + + String jqlQuery = "FROM ResourceTable r " + theJoinClause + " WHERE r.myId IN ( :IDS )"; + + TypedQuery query = myEntityManager.createQuery(jqlQuery, ResourceTable.class); + query.setParameter("IDS", idSubset); + List indexFetchOutcome = query.getResultList(); + + ourLog.debug("Pre-fetched {} {} indexes", indexFetchOutcome.size(), theDescription); } @Nullable diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/imprt2/ConsumeFilesStepR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/imprt2/ConsumeFilesStepR4Test.java index ee86c0596df..2076cf970fa 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/imprt2/ConsumeFilesStepR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/imprt2/ConsumeFilesStepR4Test.java @@ -3,7 +3,6 @@ package ca.uhn.fhir.jpa.bulk.imprt2; import ca.uhn.fhir.batch2.api.JobExecutionFailedException; import ca.uhn.fhir.batch2.jobs.imprt.ConsumeFilesStep; import ca.uhn.fhir.interceptor.model.RequestPartitionId; -import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.dao.r4.BasePartitioningR4Test; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.r4.model.IdType; @@ -84,7 +83,7 @@ public class ConsumeFilesStepR4Test extends BasePartitioningR4Test { // Validate - assertEquals(7, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread(), myCaptureQueriesListener.getInsertQueriesForCurrentThread().stream().map(t->t.getSql(true, false)).collect(Collectors.joining("\n"))); assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); @@ -145,9 +144,9 @@ public class ConsumeFilesStepR4Test extends BasePartitioningR4Test { // Validate if (partitionEnabled) { - assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); - } else { assertEquals(7, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + } else { + assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); } assertEquals(2, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(4, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QueryCountTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QueryCountTest.java index 1f299d970ef..abba1fbe50c 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QueryCountTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QueryCountTest.java @@ -149,7 +149,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test @Autowired private ISubscriptionTriggeringSvc mySubscriptionTriggeringSvc; @Autowired - private ResourceModifiedSubmitterSvc myResourceModifiedSubmitterSvc;; + private ResourceModifiedSubmitterSvc myResourceModifiedSubmitterSvc; @Autowired private ReindexStep myReindexStep; @Autowired @@ -827,7 +827,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test DeleteMethodOutcome outcome = myPatientDao.deleteByUrl("Patient?active=true", new SystemRequestDetails()); // Validate - assertEquals(13, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(12, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(10, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(10, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(30, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); @@ -1026,10 +1026,10 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test @ParameterizedTest @CsvSource({ // OptimisticLock OptimizeMode ExpectedSelect ExpectedUpdate - " false, CURRENT_VERSION, 2, 0", - " true, CURRENT_VERSION, 12, 0", - " false, ALL_VERSIONS, 12, 0", - " true, ALL_VERSIONS, 22, 0", + " false, CURRENT_VERSION, 1, 0", + " true, CURRENT_VERSION, 11, 0", + " false, ALL_VERSIONS, 11, 0", + " true, ALL_VERSIONS, 21, 0", }) public void testReindexJob_OptimizeStorage(boolean theOptimisticLock, ReindexParameters.OptimizeStorageModeEnum theOptimizeStorageModeEnum, int theExpectedSelectCount, int theExpectedUpdateCount) { // Setup @@ -1841,7 +1841,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test outcome = mySystemDao.transaction(mySrd, input.get()); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); myCaptureQueriesListener.logSelectQueries(); - assertEquals(5, myCaptureQueriesListener.countSelectQueries()); + assertEquals(4, myCaptureQueriesListener.countSelectQueries()); myCaptureQueriesListener.logInsertQueries(); assertEquals(2, myCaptureQueriesListener.countInsertQueries()); myCaptureQueriesListener.logUpdateQueries(); @@ -1857,7 +1857,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test outcome = mySystemDao.transaction(mySrd, input.get()); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); myCaptureQueriesListener.logSelectQueries(); - assertEquals(5, myCaptureQueriesListener.countSelectQueries()); + assertEquals(4, myCaptureQueriesListener.countSelectQueries()); myCaptureQueriesListener.logInsertQueries(); assertEquals(2, myCaptureQueriesListener.countInsertQueries()); myCaptureQueriesListener.logUpdateQueries(); @@ -1927,7 +1927,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test outcome = mySystemDao.transaction(mySrd, input.get()); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); myCaptureQueriesListener.logSelectQueries(); - assertEquals(9, myCaptureQueriesListener.countSelectQueries()); + assertEquals(7, myCaptureQueriesListener.countSelectQueries()); myCaptureQueriesListener.logInsertQueries(); assertEquals(7, myCaptureQueriesListener.countInsertQueries()); myCaptureQueriesListener.logUpdateQueries(); @@ -1943,7 +1943,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test outcome = mySystemDao.transaction(mySrd, input.get()); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); myCaptureQueriesListener.logSelectQueries(); - assertEquals(7, myCaptureQueriesListener.countSelectQueries()); + assertEquals(5, myCaptureQueriesListener.countSelectQueries()); myCaptureQueriesListener.logInsertQueries(); assertEquals(5, myCaptureQueriesListener.countInsertQueries()); myCaptureQueriesListener.logUpdateQueries(); @@ -2239,7 +2239,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test outcome = mySystemDao.transaction(mySrd, input.get()); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); myCaptureQueriesListener.logSelectQueries(); - assertEquals(9, myCaptureQueriesListener.countSelectQueries()); + assertEquals(8, myCaptureQueriesListener.countSelectQueries()); myCaptureQueriesListener.logInsertQueries(); assertEquals(4, myCaptureQueriesListener.countInsertQueries()); myCaptureQueriesListener.logUpdateQueries(); @@ -2256,7 +2256,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test outcome = mySystemDao.transaction(mySrd, input.get()); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); myCaptureQueriesListener.logSelectQueries(); - assertEquals(8, myCaptureQueriesListener.countSelectQueries()); + assertEquals(7, myCaptureQueriesListener.countSelectQueries()); myCaptureQueriesListener.logInsertQueries(); assertEquals(4, myCaptureQueriesListener.countInsertQueries()); myCaptureQueriesListener.logUpdateQueries(); @@ -2271,7 +2271,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test outcome = mySystemDao.transaction(mySrd, input.get()); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); myCaptureQueriesListener.logSelectQueries(); - assertEquals(6, myCaptureQueriesListener.countSelectQueries()); + assertEquals(5, myCaptureQueriesListener.countSelectQueries()); myCaptureQueriesListener.logInsertQueries(); assertEquals(4, myCaptureQueriesListener.countInsertQueries()); myCaptureQueriesListener.logUpdateQueries(); @@ -3365,7 +3365,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test myCaptureQueriesListener.clear(); Bundle outcome = mySystemDao.transaction(new SystemRequestDetails(), supplier.get()); - assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); myCaptureQueriesListener.logInsertQueries(); assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(6, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); @@ -3388,7 +3388,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test myCaptureQueriesListener.clear(); outcome = mySystemDao.transaction(new SystemRequestDetails(), supplier.get()); - assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); myCaptureQueriesListener.logInsertQueries(); assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(6, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); @@ -3449,7 +3449,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test myCaptureQueriesListener.clear(); mySystemDao.transaction(new SystemRequestDetails(), loadResourceFromClasspath(Bundle.class, "r4/transaction-perf-bundle-smallchanges.json")); myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(2, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(5, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java index 3899c13939f..07c474b0ff0 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java @@ -2842,7 +2842,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { outcome = mySystemDao.transaction(mySrd, input.get()); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - assertEquals(9, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); myCaptureQueriesListener.logInsertQueriesForCurrentThread(); assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); myCaptureQueriesListener.logUpdateQueriesForCurrentThread(); @@ -2859,7 +2859,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { outcome = mySystemDao.transaction(mySrd, input.get()); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(7, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); myCaptureQueriesListener.logInsertQueriesForCurrentThread(); assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); myCaptureQueriesListener.logUpdateQueriesForCurrentThread(); @@ -2874,7 +2874,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { outcome = mySystemDao.transaction(mySrd, input.get()); ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(5, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); myCaptureQueriesListener.logInsertQueriesForCurrentThread(); assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); myCaptureQueriesListener.logUpdateQueriesForCurrentThread(); @@ -2924,7 +2924,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { output = mySystemDao.transaction(requestDetails, input); myCaptureQueriesListener.logSelectQueries(); - assertEquals(29, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(26, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexStepTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexStepTest.java index 81ad6c198c7..c619cc0b410 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexStepTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexStepTest.java @@ -64,7 +64,7 @@ public class ReindexStepTest extends BaseJpaR4Test { // Verify assertEquals(2, outcome.getRecordsProcessed()); - assertEquals(6, myCaptureQueriesListener.logSelectQueries().size()); + assertEquals(5, myCaptureQueriesListener.logSelectQueries().size()); assertEquals(0, myCaptureQueriesListener.countInsertQueries()); myCaptureQueriesListener.logUpdateQueries(); assertEquals(0, myCaptureQueriesListener.countUpdateQueries()); @@ -95,7 +95,7 @@ public class ReindexStepTest extends BaseJpaR4Test { // Verify assertEquals(2, outcome.getRecordsProcessed()); - assertEquals(8, myCaptureQueriesListener.logSelectQueries().size()); + assertEquals(7, myCaptureQueriesListener.logSelectQueries().size()); assertEquals(0, myCaptureQueriesListener.countInsertQueries()); assertEquals(0, myCaptureQueriesListener.countUpdateQueries()); assertEquals(0, myCaptureQueriesListener.countDeleteQueries()); @@ -128,7 +128,7 @@ public class ReindexStepTest extends BaseJpaR4Test { // Verify assertEquals(2, outcome.getRecordsProcessed()); - assertEquals(6, myCaptureQueriesListener.logSelectQueries().size()); + assertEquals(5, myCaptureQueriesListener.logSelectQueries().size()); // name, family, phonetic, deceased, active assertEquals(5, myCaptureQueriesListener.countInsertQueries()); assertEquals(0, myCaptureQueriesListener.countUpdateQueries()); @@ -196,7 +196,7 @@ public class ReindexStepTest extends BaseJpaR4Test { // Verify assertEquals(2, outcome.getRecordsProcessed()); - assertEquals(10, myCaptureQueriesListener.logSelectQueries().size()); + assertEquals(9, myCaptureQueriesListener.logSelectQueries().size()); assertEquals(0, myCaptureQueriesListener.countInsertQueries()); assertEquals(4, myCaptureQueriesListener.countUpdateQueries()); assertEquals(0, myCaptureQueriesListener.countDeleteQueries()); @@ -241,7 +241,7 @@ public class ReindexStepTest extends BaseJpaR4Test { // Verify assertEquals(4, outcome.getRecordsProcessed()); - assertEquals(9, myCaptureQueriesListener.logSelectQueries().size()); + assertEquals(8, myCaptureQueriesListener.logSelectQueries().size()); assertEquals(5, myCaptureQueriesListener.countInsertQueries()); assertEquals(2, myCaptureQueriesListener.countUpdateQueries()); assertEquals(0, myCaptureQueriesListener.countDeleteQueries()); diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoTransactionR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoTransactionR5Test.java index f5d7992b1fc..2fe8d2762ca 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoTransactionR5Test.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoTransactionR5Test.java @@ -4,6 +4,7 @@ import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; import ca.uhn.fhir.util.BundleBuilder; +import jakarta.annotation.Nonnull; import org.hl7.fhir.r5.model.BooleanType; import org.hl7.fhir.r5.model.Bundle; import org.hl7.fhir.r5.model.CodeType; @@ -20,7 +21,6 @@ import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.CsvSource; import org.junit.jupiter.params.provider.ValueSource; -import jakarta.annotation.Nonnull; import java.io.IOException; import java.util.UUID; @@ -28,7 +28,6 @@ import static org.apache.commons.lang3.StringUtils.countMatches; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; -import static org.hamcrest.Matchers.in; import static org.hamcrest.Matchers.matchesPattern; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotEquals; @@ -151,7 +150,7 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test { // Verify - assertEquals(theMatchUrlCacheEnabled ? 4 : 5, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(theMatchUrlCacheEnabled ? 3 : 4, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); @@ -203,7 +202,7 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test { // Verify - assertEquals(theMatchUrlCacheEnabled ? 4 : 5, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(theMatchUrlCacheEnabled ? 3 : 4, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/UpliftedRefchainsAndChainedSortingR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/UpliftedRefchainsAndChainedSortingR5Test.java index 453b541eebb..5cf0e2a4127 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/UpliftedRefchainsAndChainedSortingR5Test.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/UpliftedRefchainsAndChainedSortingR5Test.java @@ -401,7 +401,7 @@ public class UpliftedRefchainsAndChainedSortingR5Test extends BaseJpaR5Test { // 1- Resolve resource forced IDs, and 2- Resolve Practitioner/PR1 reference myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - assertEquals(10, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(9, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); // Verify correct indexes are written @@ -441,7 +441,7 @@ public class UpliftedRefchainsAndChainedSortingR5Test extends BaseJpaR5Test { bb.addTransactionUpdateEntry(newEncounter(ENCOUNTER_E2, p2Id)); bb.addTransactionCreateEntry(newPatientP1_HomerSimpson().setId(p1Id)).conditional("identifier=http://system|200"); bb.addTransactionCreateEntry(newPatientP2_MargeSimpson().setId(p2Id)).conditional("identifier=http://system|300"); - ; + Bundle requestBundle = bb.getBundleTyped(); myCaptureQueriesListener.clear(); @@ -496,7 +496,7 @@ public class UpliftedRefchainsAndChainedSortingR5Test extends BaseJpaR5Test { bb.addTransactionUpdateEntry(newEncounter(ENCOUNTER_E2, p2Id)); bb.addTransactionCreateEntry(new Patient().addIdentifier(new Identifier().setSystem("http://system").setValue("200")).setId(p1Id)).conditional("identifier=http://system|200"); bb.addTransactionCreateEntry(new Patient().addIdentifier(new Identifier().setSystem("http://system").setValue("300")).setId(p2Id)).conditional("identifier=http://system|300"); - ; + Bundle requestBundle = bb.getBundleTyped(); myCaptureQueriesListener.clear();