From 3554e9cf91ed516f0ad738750d3070cef475b07c Mon Sep 17 00:00:00 2001 From: Qingyixia <106992634+Qingyixia@users.noreply.github.com> Date: Thu, 23 Feb 2023 18:07:10 -0500 Subject: [PATCH 1/4] Fix the issue of missing meta.tags in subscription message (#4576) * Fix + Test * Addressing comments * Minor fix --- ...ing-meta-tags-in-subscription-message.yaml | 4 + .../ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java | 8 ++ .../message/MessageSubscriptionR4Test.java | 74 ++++++++++++++++--- 3 files changed, 74 insertions(+), 12 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4575-fix-missing-meta-tags-in-subscription-message.yaml diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4575-fix-missing-meta-tags-in-subscription-message.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4575-fix-missing-meta-tags-in-subscription-message.yaml new file mode 100644 index 00000000000..b146ddd2f2d --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4575-fix-missing-meta-tags-in-subscription-message.yaml @@ -0,0 +1,4 @@ +--- +type: fix +issue: 4575 +title: "Previously, the full set of meta tags is not present in the payload of the subscription message when UPDATE the resource. Now, this issue has been fixed." diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java index 7daea6cec9d..c49c0bc4e06 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java @@ -770,6 +770,14 @@ public abstract class BaseHapiFhirDao extends BaseStora }); + // Update the resource to contain the old tags + allTagsOld.forEach(tag -> { + theResource.getMeta() + .addTag() + .setCode(tag.getTag().getCode()) + .setSystem(tag.getTag().getSystem()); + }); + theEntity.setHasTags(!allTagsNew.isEmpty()); return !allTagsOld.equals(allTagsNew); } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/subscription/message/MessageSubscriptionR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/subscription/message/MessageSubscriptionR4Test.java index 3c31636f9f3..df71e873216 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/subscription/message/MessageSubscriptionR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/subscription/message/MessageSubscriptionR4Test.java @@ -1,6 +1,8 @@ package ca.uhn.fhir.jpa.subscription.message; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; +import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor; +import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel; import ca.uhn.fhir.jpa.subscription.BaseSubscriptionsR4Test; import ca.uhn.fhir.jpa.subscription.channel.api.ChannelConsumerSettings; import ca.uhn.fhir.jpa.subscription.channel.api.IChannelReceiver; @@ -8,11 +10,11 @@ import ca.uhn.fhir.jpa.subscription.channel.subscription.SubscriptionChannelFact import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedJsonMessage; import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage; import ca.uhn.fhir.jpa.test.util.StoppableSubscriptionDeliveringRestHookSubscriber; -import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.r4.model.CodeableConcept; +import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.Coding; import org.hl7.fhir.r4.model.Observation; +import org.hl7.fhir.r4.model.Patient; import org.hl7.fhir.r4.model.Subscription; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; @@ -20,18 +22,19 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; -import org.junit.jupiter.params.provider.ValueSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; +import java.util.List; +import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.awaitility.Awaitility.await; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * Test the rest-hook subscriptions @@ -50,7 +53,9 @@ public class MessageSubscriptionR4Test extends BaseSubscriptionsR4Test { myStoppableSubscriptionDeliveringRestHookSubscriber.setCountDownLatch(null); myStoppableSubscriptionDeliveringRestHookSubscriber.unPause(); myStorageSettings.setTriggerSubscriptionsForNonVersioningChanges(new JpaStorageSettings().isTriggerSubscriptionsForNonVersioningChanges()); + myStorageSettings.setTagStorageMode(new JpaStorageSettings().getTagStorageMode()); } + @BeforeEach public void beforeRegisterRestHookListener() { mySubscriptionTestUtil.registerMessageInterceptor(); @@ -60,11 +65,11 @@ public class MessageSubscriptionR4Test extends BaseSubscriptionsR4Test { receiver.subscribe(handler); } - private Subscription createObservationSubscription() { + private Subscription createSubscriptionWithCriteria(String theCriteria) { Subscription subscription = new Subscription(); subscription.setReason("Monitor new neonatal function (note, age will be determined by the monitor)"); subscription.setStatus(Subscription.SubscriptionStatus.REQUESTED); - subscription.setCriteria("[Observation]"); + subscription.setCriteria(theCriteria); Subscription.SubscriptionChannelComponent channel = subscription.getChannel(); channel.setType(Subscription.SubscriptionChannelType.MESSAGE); @@ -92,7 +97,7 @@ public class MessageSubscriptionR4Test extends BaseSubscriptionsR4Test { @MethodSource("sourceTypes") public void testCreateUpdateAndPatchRetainCorrectSourceThroughDelivery(JpaStorageSettings.StoreMetaSourceInformationEnum theStorageStyle, String theExplicitSource, String theRequestId, String theExpectedSourceValue) throws Exception { myStorageSettings.setStoreMetaSourceInformation(theStorageStyle); - createObservationSubscription(); + createSubscriptionWithCriteria("[Observation]"); waitForActivatedSubscriptionCount(1); @@ -106,19 +111,64 @@ public class MessageSubscriptionR4Test extends BaseSubscriptionsR4Test { waitForQueueToDrain(); //Should receive at our queue receiver - Observation receivedObs = fetchSingleObservationFromSubscriptionTerminalEndpoint(); + IBaseResource resource = fetchSingleResourceFromSubscriptionTerminalEndpoint(); + assertThat(resource, instanceOf(Observation.class)); + Observation receivedObs = (Observation) resource; assertThat(receivedObs.getMeta().getSource(), is(equalTo(theExpectedSourceValue))); } - private Observation fetchSingleObservationFromSubscriptionTerminalEndpoint() { + @Test + public void testUpdateResourceRetainCorrectMetaTagsThroughDelivery() throws Exception { + myStorageSettings.setTagStorageMode(JpaStorageSettings.TagStorageModeEnum.NON_VERSIONED); + createSubscriptionWithCriteria("[Patient]"); + + waitForActivatedSubscriptionCount(1); + + // Create Patient with two meta tags + Patient patient = new Patient(); + patient.setActive(true); + patient.getMeta().addTag().setSystem("http://www.example.com/tags").setCode("tag-1"); + patient.getMeta().addTag().setSystem("http://www.example.com/tags").setCode("tag-2"); + + IIdType id = myClient.create().resource(patient).execute().getId(); + + // Should see 1 subscription notification for CREATE + waitForQueueToDrain(); + + // Should receive two meta tags + IBaseResource resource = fetchSingleResourceFromSubscriptionTerminalEndpoint(); + assertThat(resource, instanceOf(Patient.class)); + Patient receivedPatient = (Patient) resource; + assertThat(receivedPatient.getMeta().getTag().size(), is(equalTo(2))); + + // Update the previous Patient and add one more tag + patient = new Patient(); + patient.setId(id); + patient.setActive(true); + patient.getMeta().getTag().add(new Coding().setSystem("http://www.example.com/tags").setCode("tag-3")); + myClient.update().resource(patient).execute(); + + waitForQueueToDrain(); + + // Should receive all three meta tags + List expected = List.of("tag-1", "tag-2", "tag-3"); + resource = fetchSingleResourceFromSubscriptionTerminalEndpoint(); + receivedPatient = (Patient) resource; + List receivedTagList = receivedPatient.getMeta().getTag(); + ourLog.info(getFhirContext().newJsonParser().setPrettyPrint(true).encodeResourceToString(receivedPatient)); + assertThat(receivedTagList.size(), is(equalTo(3))); + List actual = receivedTagList.stream().map(t -> t.getCode()).sorted().collect(Collectors.toList()); + assertTrue(expected.equals(actual)); + } + + private IBaseResource fetchSingleResourceFromSubscriptionTerminalEndpoint() { assertThat(handler.getMessages().size(), is(equalTo(1))); ResourceModifiedJsonMessage resourceModifiedJsonMessage = handler.getMessages().get(0); ResourceModifiedMessage payload = resourceModifiedJsonMessage.getPayload(); String payloadString = payload.getPayloadString(); IBaseResource resource = myFhirContext.newJsonParser().parseResource(payloadString); - Observation receivedObs = (Observation) resource; handler.clearMessages(); - return receivedObs; + return resource; } } From b25f364369c8b6d795c4f384fafd02209668677c Mon Sep 17 00:00:00 2001 From: Kevin Dougan SmileCDR <72025369+KevinDougan-SmileCDR@users.noreply.github.com> Date: Fri, 24 Feb 2023 14:05:07 -0500 Subject: [PATCH 2/4] Fix - POST Bundle With ifNoneExist Clause Incorrectly Rejected With Invalid Match URL ERROR (#4588) * Add a failing Unit Test and a changelog. * Fixed the regex to accept the dash character. --- .../4580-fix-basetransactionprocessor-matchurlregex.yaml | 4 ++++ .../fhir/storage/test/BaseTransactionProcessorTest.java | 8 ++++++++ .../ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java | 2 +- 3 files changed, 13 insertions(+), 1 deletion(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4580-fix-basetransactionprocessor-matchurlregex.yaml diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4580-fix-basetransactionprocessor-matchurlregex.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4580-fix-basetransactionprocessor-matchurlregex.yaml new file mode 100644 index 00000000000..9adcf06c06d --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4580-fix-basetransactionprocessor-matchurlregex.yaml @@ -0,0 +1,4 @@ +--- +type: fix +issue: 4580 +title: "Fix the BaseTransactionProcessor.UNQUALIFIED_MATCH_URL_START REGEX so that it correctly matches patterns that contain dashes in them." diff --git a/hapi-fhir-storage-test-utilities/src/test/java/ca/uhn/fhir/storage/test/BaseTransactionProcessorTest.java b/hapi-fhir-storage-test-utilities/src/test/java/ca/uhn/fhir/storage/test/BaseTransactionProcessorTest.java index abb20c6fab5..a7ce2524178 100644 --- a/hapi-fhir-storage-test-utilities/src/test/java/ca/uhn/fhir/storage/test/BaseTransactionProcessorTest.java +++ b/hapi-fhir-storage-test-utilities/src/test/java/ca/uhn/fhir/storage/test/BaseTransactionProcessorTest.java @@ -27,6 +27,7 @@ import org.hl7.fhir.r4.model.IdType; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; public class BaseTransactionProcessorTest { @@ -102,4 +103,11 @@ public class BaseTransactionProcessorTest { assertEquals(input, outcome); } + @Test + void testUnqualifiedMatchUrlStart_RegexPatternMatches() { + String matchUrl = "patient-first-identifier=MRN%7C123456789"; + boolean matchResult = BaseTransactionProcessor.UNQUALIFIED_MATCH_URL_START.matcher(matchUrl).find(); + assertTrue(matchResult, "Failed to find a Regex match using Url '" + matchUrl + "'"); + } + } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java index 43c1de97dab..ec4bc5ed248 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java @@ -137,7 +137,7 @@ public abstract class BaseTransactionProcessor { public static final String URN_PREFIX = "urn:"; public static final String URN_PREFIX_ESCAPED = UrlUtil.escapeUrlParam(URN_PREFIX); - public static final Pattern UNQUALIFIED_MATCH_URL_START = Pattern.compile("^[a-zA-Z0-9_]+="); + public static final Pattern UNQUALIFIED_MATCH_URL_START = Pattern.compile("^[a-zA-Z0-9_-]+="); public static final Pattern INVALID_PLACEHOLDER_PATTERN = Pattern.compile("[a-zA-Z]+:.*"); private static final Logger ourLog = LoggerFactory.getLogger(BaseTransactionProcessor.class); @Autowired From cedf69516b5ece527b8a6c941cacd4b7ac1e0857 Mon Sep 17 00:00:00 2001 From: michaelabuckley Date: Sun, 26 Feb 2023 22:12:12 -0500 Subject: [PATCH 3/4] Simultaneous $reindex and DELETE can orphan index rows (#4584) Add optimistic write lock to $reindex operation to prevent indexing stale data. Introduce Lockstep Phaser to make testing easier Add 1 query to every resource reindex :-( --- .../6_6_0/4584-reindex-orphan-index-rows.yaml | 4 + .../fhir/jpa/dao/BaseHapiFhirResourceDao.java | 13 +- .../extractor/BaseSearchParamExtractor.java | 1 + .../uhn/fhir/jpa/reindex/ReindexStepTest.java | 10 +- .../search/reindex/ReindexRaceBugTest.java | 192 ++++++++++++++++++ .../ca/uhn/fhir/jpa/test/BaseJpaR4Test.java | 1 + hapi-fhir-test-utilities/pom.xml | 14 +- .../test/concurrency/LockstepEnumPhaser.java | 104 ++++++++++ .../concurrency/LockstepEnumPhaserTest.java | 190 +++++++++++++++++ 9 files changed, 517 insertions(+), 12 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4584-reindex-orphan-index-rows.yaml create mode 100644 hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/search/reindex/ReindexRaceBugTest.java create mode 100644 hapi-fhir-test-utilities/src/main/java/ca/uhn/test/concurrency/LockstepEnumPhaser.java create mode 100644 hapi-fhir-test-utilities/src/test/java/ca/uhn/test/concurrency/LockstepEnumPhaserTest.java diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4584-reindex-orphan-index-rows.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4584-reindex-orphan-index-rows.yaml new file mode 100644 index 00000000000..2f23b4319a5 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4584-reindex-orphan-index-rows.yaml @@ -0,0 +1,4 @@ +--- +type: fix +issue: 4548 +title: "Simultaneous DELETE and $reindex operations could corrupt the search index. This has been fixed." diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java index 3d6377dc85e..0642f9f2158 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java @@ -133,6 +133,7 @@ import org.springframework.transaction.support.TransactionTemplate; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.annotation.PostConstruct; +import javax.persistence.LockModeType; import javax.persistence.NoResultException; import javax.persistence.TypedQuery; import javax.servlet.http.HttpServletResponse; @@ -1222,13 +1223,19 @@ public abstract class BaseHapiFhirResourceDao extends B @Override public void reindex(IResourcePersistentId thePid, RequestDetails theRequest, TransactionDetails theTransactionDetails) { JpaPid jpaPid = (JpaPid) thePid; - Optional entityOpt = myResourceTableDao.findById(jpaPid.getId()); - if (!entityOpt.isPresent()) { + + // Careful! Reindex only reads ResourceTable, but we tell Hibernate to check version + // to ensure Hibernate will catch concurrent updates (PUT/DELETE) elsewhere. + // Otherwise, we may index stale data. See #4584 + // We use the main entity as the lock object since all the index rows hang off it. + ResourceTable entity = + myEntityManager.find(ResourceTable.class, jpaPid.getId(), LockModeType.OPTIMISTIC); + + if (entity == null) { ourLog.warn("Unable to find entity with PID: {}", jpaPid.getId()); return; } - ResourceTable entity = entityOpt.get(); try { T resource = (T) myJpaStorageResourceParser.toResource(entity, false); reindex(resource, entity); diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java index e11d1b787d9..c3f3593cbde 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java @@ -1945,6 +1945,7 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor IPrimitiveType nextBaseDateTime = (IPrimitiveType) theValue; if (nextBaseDateTime.getValue() != null) { myIndexedSearchParamDate = new ResourceIndexedSearchParamDate(myPartitionSettings, theResourceType, theSearchParam.getName(), nextBaseDateTime.getValue(), nextBaseDateTime.getValueAsString(), nextBaseDateTime.getValue(), nextBaseDateTime.getValueAsString(), nextBaseDateTime.getValueAsString()); + ourLog.trace("DateExtractor - extracted {} for {}", nextBaseDateTime, theSearchParam.getName()); theParams.add(myIndexedSearchParamDate); } } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexStepTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexStepTest.java index 5a23c780ba4..fb4091a8279 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexStepTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexStepTest.java @@ -61,7 +61,7 @@ public class ReindexStepTest extends BaseJpaR4Test { // Verify assertEquals(2, outcome.getRecordsProcessed()); - assertEquals(4, myCaptureQueriesListener.logSelectQueries().size()); + assertEquals(6, myCaptureQueriesListener.logSelectQueries().size()); assertEquals(0, myCaptureQueriesListener.countInsertQueries()); assertEquals(0, myCaptureQueriesListener.countUpdateQueries()); assertEquals(0, myCaptureQueriesListener.countDeleteQueries()); @@ -91,7 +91,7 @@ public class ReindexStepTest extends BaseJpaR4Test { // Verify assertEquals(2, outcome.getRecordsProcessed()); - assertEquals(6, myCaptureQueriesListener.logSelectQueries().size()); + assertEquals(8, myCaptureQueriesListener.logSelectQueries().size()); assertEquals(0, myCaptureQueriesListener.countInsertQueries()); assertEquals(0, myCaptureQueriesListener.countUpdateQueries()); assertEquals(0, myCaptureQueriesListener.countDeleteQueries()); @@ -124,7 +124,7 @@ public class ReindexStepTest extends BaseJpaR4Test { // Verify assertEquals(2, outcome.getRecordsProcessed()); - assertEquals(4, myCaptureQueriesListener.logSelectQueries().size()); + assertEquals(6, myCaptureQueriesListener.logSelectQueries().size()); // name, family, phonetic, deceased, active assertEquals(5, myCaptureQueriesListener.countInsertQueries()); assertEquals(0, myCaptureQueriesListener.countUpdateQueries()); @@ -192,7 +192,7 @@ public class ReindexStepTest extends BaseJpaR4Test { // Verify assertEquals(2, outcome.getRecordsProcessed()); - assertEquals(8, myCaptureQueriesListener.logSelectQueries().size()); + assertEquals(10, myCaptureQueriesListener.logSelectQueries().size()); assertEquals(0, myCaptureQueriesListener.countInsertQueries()); assertEquals(4, myCaptureQueriesListener.countUpdateQueries()); assertEquals(0, myCaptureQueriesListener.countDeleteQueries()); @@ -237,7 +237,7 @@ public class ReindexStepTest extends BaseJpaR4Test { // Verify assertEquals(4, outcome.getRecordsProcessed()); - assertEquals(5, myCaptureQueriesListener.logSelectQueries().size()); + assertEquals(9, myCaptureQueriesListener.logSelectQueries().size()); assertEquals(5, myCaptureQueriesListener.countInsertQueries()); assertEquals(2, myCaptureQueriesListener.countUpdateQueries()); assertEquals(0, myCaptureQueriesListener.countDeleteQueries()); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/search/reindex/ReindexRaceBugTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/search/reindex/ReindexRaceBugTest.java new file mode 100644 index 00000000000..ca920975a30 --- /dev/null +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/search/reindex/ReindexRaceBugTest.java @@ -0,0 +1,192 @@ +package ca.uhn.fhir.jpa.search.reindex; + +import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; +import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; +import ca.uhn.fhir.jpa.model.dao.JpaPid; +import ca.uhn.fhir.jpa.test.BaseJpaR4Test; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.api.server.SystemRequestDetails; +import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; +import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException; +import ca.uhn.fhir.storage.test.DaoTestDataBuilder; +import ca.uhn.test.concurrency.LockstepEnumPhaser; +import org.hl7.fhir.instance.model.api.IIdType; +import org.hl7.fhir.r4.model.Observation; +import org.hl7.fhir.r4.model.SearchParameter; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.transaction.TransactionStatus; +import org.springframework.transaction.annotation.Propagation; + +import javax.annotation.Nonnull; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.ThreadFactory; +import java.util.function.BiFunction; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + +@ContextConfiguration(classes = { + DaoTestDataBuilder.Config.class +}) +class ReindexRaceBugTest extends BaseJpaR4Test { + private static final Logger ourLog = LoggerFactory.getLogger(ReindexRaceBugTest.class); + @Autowired + HapiTransactionService myHapiTransactionService; + + enum Steps { + STARTING, RUN_REINDEX, RUN_DELETE, COMMIT_REINDEX, FINISHED + } + + /** + * Simulate reindex job step overlapping with resource deletion. + * The $reindex step processes several resources in a single tx. + * The tested sequence here is: job step $reindexes a resouce, then another thread DELETEs the resource, + * then later, the $reindex step finishes the rest of the resources and commits AFTER the DELETE commits. + * + * This was inserting new index rows into HFJ_SPIDX_TOKEN even though the resource was gone. + * This is an illegal state for our index. Deleted resources should never have content in HFJ_SPIDX_* + */ + @Test + void deleteOverlapsWithReindex_leavesIndexRowsP() throws InterruptedException, ExecutionException { + LockstepEnumPhaser phaser = new LockstepEnumPhaser<>(2, Steps.class); + + ourLog.info("An observation is created"); + + var observationCreateOutcome = callInFreshTx((tx, rd) -> { + Observation o = (Observation) buildResource("Observation", withEffectiveDate("2021-01-01T00:00:00")); + return myObservationDao.create(o, rd); + }); + IIdType observationId = observationCreateOutcome.getId().toVersionless(); + long observationPid = Long.parseLong(observationCreateOutcome.getId().getIdPart()); + + assertEquals(1, getSPIDXDateCount(observationPid), "date index row for date"); + + ourLog.info("Then a SP is created after that matches data in the Observation"); + SearchParameter sp = myFhirContext.newJsonParser().parseResource(SearchParameter.class, """ + { + "resourceType": "SearchParameter", + "id": "observation-date2", + "status": "active", + "code": "date2", + "name": "date2", + "base": [ "Observation" ], + "type": "date", + "expression": "Observation.effective" + } + """); + this.myStorageSettings.setMarkResourcesForReindexingUponSearchParameterChange(false); + callInFreshTx((tx, rd) -> { + DaoMethodOutcome result = mySearchParameterDao.update(sp, rd); + mySearchParamRegistry.forceRefresh(); + return result; + }); + + assertEquals(1, getSPIDXDateCount(observationPid), "still only one index row before reindex"); + + + // suppose reindex job step starts here and loads the resource and ResourceTable entity + ThreadFactory loggingThreadFactory = getLoggingThreadFactory("Reindex-thread"); + ExecutorService backgroundReindexThread = Executors.newSingleThreadExecutor(loggingThreadFactory); + Future backgroundResult = backgroundReindexThread.submit(() -> { + try { + callInFreshTx((tx, rd) -> { + try { + ourLog.info("Starting background $reindex"); + phaser.arriveAndAwaitSharedEndOf(Steps.STARTING); + + phaser.assertInPhase(Steps.RUN_REINDEX); + ourLog.info("Run $reindex"); + myObservationDao.reindex(JpaPid.fromIdAndResourceType(observationPid, "Observation"), rd, new TransactionDetails()); + + ourLog.info("$reindex done release main thread to delete"); + phaser.arriveAndAwaitSharedEndOf(Steps.RUN_REINDEX); + + ourLog.info("Wait for delete to finish"); + phaser.arriveAndAwaitSharedEndOf(Steps.RUN_DELETE); + + phaser.assertInPhase(Steps.COMMIT_REINDEX); + ourLog.info("Commit $reindex now that delete is finished"); + // commit happens here at end of block + } catch (Exception e) { + ourLog.error("$reindex thread failed", e); + } + return 0; + }); + } finally { + phaser.arriveAndAwaitSharedEndOf(Steps.COMMIT_REINDEX); + } + return 1; + }); + + ourLog.info("Wait for $reindex to start"); + phaser.arriveAndAwaitSharedEndOf(Steps.STARTING); + + phaser.arriveAndAwaitSharedEndOf(Steps.RUN_REINDEX); + + // then the resource is deleted + phaser.assertInPhase(Steps.RUN_DELETE); + + ourLog.info("Deleting observation"); + callInFreshTx((tx, rd) -> myObservationDao.delete(observationId, rd)); + assertResourceDeleted(observationId); + assertEquals(0, getSPIDXDateCount(observationPid), "A deleted resource should have 0 index rows"); + + ourLog.info("Let $reindex commit"); + phaser.arriveAndAwaitSharedEndOf(Steps.RUN_DELETE); + + // then the reindex call finishes + ourLog.info("Await $reindex commit"); + phaser.arriveAndAwaitSharedEndOf(Steps.COMMIT_REINDEX); + + assertEquals(0, getSPIDXDateCount(observationPid), "A deleted resource should still have 0 index rows, after $reindex completes"); + } + + @Nonnull + static ThreadFactory getLoggingThreadFactory(String theThreadName) { + ThreadFactory loggingThreadFactory = r -> new Thread(() -> { + boolean success = false; + try { + r.run(); + success = true; + } finally { + if (!success) { + ourLog.error("Background thread failed"); + } + } + }, theThreadName); + return loggingThreadFactory; + } + + void assertResourceDeleted(IIdType observationId) { + try { + // confirm deleted + callInFreshTx((tx, rd)-> + myObservationDao.read(observationId, rd, false)); + fail("Read deleted resource"); + } catch (ResourceGoneException e) { + // expected + } + } + + T callInFreshTx(BiFunction theCallback) { + SystemRequestDetails requestDetails = new SystemRequestDetails(); + return myHapiTransactionService.withRequest(requestDetails) + .withTransactionDetails(new TransactionDetails()) + .withPropagation(Propagation.REQUIRES_NEW) + .execute(tx-> theCallback.apply(tx, requestDetails)); + } + + + int getSPIDXDateCount(long observationPid) { + return callInFreshTx((rd, tx) -> + myResourceIndexedSearchParamDateDao.findAllForResourceId(observationPid).size()); + } + +} diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java index 717fac3d358..5a8567dad00 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java @@ -103,6 +103,7 @@ import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.EncodingEnum; import ca.uhn.fhir.rest.server.BasePagingProvider; import ca.uhn.fhir.rest.server.provider.ResourceProviderFactory; +import ca.uhn.fhir.storage.test.DaoTestDataBuilder; import ca.uhn.fhir.test.utilities.ITestDataBuilder; import ca.uhn.fhir.util.UrlUtil; import ca.uhn.fhir.validation.FhirValidator; diff --git a/hapi-fhir-test-utilities/pom.xml b/hapi-fhir-test-utilities/pom.xml index 2cbf65827a3..ab366500783 100644 --- a/hapi-fhir-test-utilities/pom.xml +++ b/hapi-fhir-test-utilities/pom.xml @@ -182,10 +182,16 @@ - - org.mockito - mockito-core - + + org.mockito + mockito-core + + + com.github.seregamorph + hamcrest-more-matchers + 0.1 + test + diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/test/concurrency/LockstepEnumPhaser.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/test/concurrency/LockstepEnumPhaser.java new file mode 100644 index 00000000000..fb97fcef2fb --- /dev/null +++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/test/concurrency/LockstepEnumPhaser.java @@ -0,0 +1,104 @@ +package ca.uhn.test.concurrency; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.concurrent.Phaser; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + +/** + * Test helper to force a particular sequence on 2 or more threads. + * Wraps Phaser with an Enum for better messages, and some test support. + * The only use is to impose a particular execution sequence over multiple threads when reproducing bugs. + * + * The simplest usage is to declare the number of collaborators as theParticipantCount + * in the constructor, and then have each participant thread call {@link #arriveAndAwaitSharedEndOf} + * as they finish the work of every phase. + * Every thread needs to confirm, even if they do no work in that phase. + *

+ * Note: this is just a half-baked wrapper around Phaser. + * The behaviour is not especially precise, or tested. Comments welcome: MB. + * + * @param an enum used to name the phases. + */ +public class LockstepEnumPhaser> { + private static final Logger ourLog = LoggerFactory.getLogger(LockstepEnumPhaser.class); + final Phaser myPhaser; + final Class myEnumClass; + final E[] myEnumConstants; + + public LockstepEnumPhaser(int theParticipantCount, Class theEnumClass) { + myPhaser = new Phaser(theParticipantCount); + myEnumClass = theEnumClass; + myEnumConstants = myEnumClass.getEnumConstants(); + } + + public E arrive() { + E result = phaseToEnum(myPhaser.arrive()); + ourLog.info("Arrive in phase {}", result); + return result; + } + + public void assertInPhase(E theStageEnum) { + assertEquals(theStageEnum, getPhase(), "In stage " + theStageEnum); + } + + public E getPhase() { + return phaseToEnum(myPhaser.getPhase()); + } + + public E awaitAdvance(E thePhase) { + checkAwait(thePhase); + return doAwait(thePhase); + } + + /** + * Like arrive(), but verify stage first + */ + public E arriveAtMyEndOf(E thePhase) { + assertInPhase(thePhase); + return arrive(); + } + + public E arriveAndAwaitSharedEndOf(E thePhase) { + checkAwait(thePhase); + arrive(); + return doAwait(thePhase); + } + + public E arriveAndDeregister() { + return phaseToEnum(myPhaser.arriveAndDeregister()); + } + + public E register() { + return phaseToEnum(myPhaser.register()); + } + + private E doAwait(E thePhase) { + ourLog.debug("Start doAwait - {}", thePhase); + E phase = phaseToEnum(myPhaser.awaitAdvance(thePhase.ordinal())); + ourLog.info("Finish doAwait - {}", thePhase); + return phase; + } + + private void checkAwait(E thePhase) { + E currentPhase = getPhase(); + if (currentPhase.ordinal() < thePhase.ordinal()) { + fail("Can't wait for end of phase " + thePhase + ", still in phase " + currentPhase); + } else if (currentPhase.ordinal() > thePhase.ordinal()) { + ourLog.warn("Skip waiting for phase {}, already in phase {}", thePhase, currentPhase); + } + } + + + private E phaseToEnum(int resultOrdinal) { + if (resultOrdinal >= myEnumConstants.length) { + throw new IllegalStateException("Enum " + myEnumClass.getName() + " should declare one more enum value for post-completion reporting of phase " + resultOrdinal); + } + return myEnumConstants[resultOrdinal]; + } + + +} diff --git a/hapi-fhir-test-utilities/src/test/java/ca/uhn/test/concurrency/LockstepEnumPhaserTest.java b/hapi-fhir-test-utilities/src/test/java/ca/uhn/test/concurrency/LockstepEnumPhaserTest.java new file mode 100644 index 00000000000..8a4cafce706 --- /dev/null +++ b/hapi-fhir-test-utilities/src/test/java/ca/uhn/test/concurrency/LockstepEnumPhaserTest.java @@ -0,0 +1,190 @@ +package ca.uhn.test.concurrency; + +import com.github.seregamorph.hamcrest.OrderMatchers; +import org.apache.commons.lang3.tuple.Pair; +import org.hamcrest.Matchers; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.atomic.AtomicInteger; + +import static ca.uhn.test.concurrency.LockstepEnumPhaserTest.Stages.FINISHED; +import static ca.uhn.test.concurrency.LockstepEnumPhaserTest.Stages.ONE; +import static ca.uhn.test.concurrency.LockstepEnumPhaserTest.Stages.THREE; +import static ca.uhn.test.concurrency.LockstepEnumPhaserTest.Stages.TWO; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; + +class LockstepEnumPhaserTest { + private static final Logger ourLog = LoggerFactory.getLogger(LockstepEnumPhaserTest.class); + final ExecutorService myExecutorService = Executors.newFixedThreadPool(10); + final List> myProgressEvents = Collections.synchronizedList(new ArrayList<>()); + /** Compare progress records by stage */ + final Comparator> myProgressStageComparator = Comparator.comparing(Pair::getRight); + + enum Stages { + ONE, TWO, THREE, FINISHED + } + + LockstepEnumPhaser myPhaser; + + @Timeout(1) + @Test + void phaserWithOnePariticpant_worksFine() { + // given + myPhaser = new LockstepEnumPhaser<>(1, Stages.class); + + myPhaser.assertInPhase(ONE); + + myPhaser.arriveAtMyEndOf(ONE); + + myPhaser.arriveAndAwaitSharedEndOf(TWO); + + myPhaser.arriveAndAwaitSharedEndOf(THREE); + + myPhaser.assertInPhase(FINISHED); + } + + @Timeout(5) + @Test + void phaserWithTwoThreads_runsInLockStep() throws InterruptedException, ExecutionException { + // given + myPhaser = new LockstepEnumPhaser<>(2, Stages.class); + + // run two copies of the same schedule + AtomicInteger i = new AtomicInteger(0); + Callable schedule = ()->{ + // get unique ids for each thread. + int threadId = i.getAndIncrement(); + + myPhaser.assertInPhase(ONE); + ourLog.info("Starting"); + recordProgress(threadId); + + myPhaser.arriveAndAwaitSharedEndOf(ONE); + + myPhaser.assertInPhase(TWO); + recordProgress(threadId); + + myPhaser.arriveAndAwaitSharedEndOf(TWO); + + myPhaser.assertInPhase(THREE); + recordProgress(threadId); + + Stages nextStage = myPhaser.awaitAdvance(TWO); + assertEquals(THREE, nextStage); + + myPhaser.arriveAtMyEndOf(THREE); + + ourLog.info("Finished"); + + return 1; + }; + Future result1 = myExecutorService.submit(schedule); + Future result2 = myExecutorService.submit(schedule); + + assertEquals(1, result1.get()); + assertEquals(1, result2.get()); + assertThat("progress is ordered", myProgressEvents, OrderMatchers.softOrdered(myProgressStageComparator)); + assertThat("all progress logged", myProgressEvents, Matchers.hasSize(6)); + } + + private void recordProgress(int threadId) { + myProgressEvents.add(Pair.of(threadId, myPhaser.getPhase())); + } + + @Timeout(5) + @Test + void phaserWithTwoThreads_canAddThird_sequencContinues() throws InterruptedException, ExecutionException { + // given + myPhaser = new LockstepEnumPhaser<>(2, Stages.class); + + // run one simple schedule + Callable schedule1 = ()->{ + int threadId = 1; + ourLog.info("Starting schedule1"); + myPhaser.assertInPhase(ONE); + recordProgress(threadId); + + myPhaser.arriveAndAwaitSharedEndOf(ONE); + + recordProgress(threadId); + + myPhaser.arriveAndAwaitSharedEndOf(TWO); + + recordProgress(threadId); + + myPhaser.arriveAndAwaitSharedEndOf(THREE); + + ourLog.info("Finished schedule1"); + + return 1; + }; + // this schedule will start half-way in + Callable schedule2 = ()->{ + int threadId = 2; + ourLog.info("Starting schedule2"); + + myPhaser.assertInPhase(TWO); + recordProgress(threadId); + + myPhaser.arriveAndAwaitSharedEndOf(TWO); + + myPhaser.assertInPhase(THREE); + + recordProgress(threadId); + + myPhaser.arriveAndAwaitSharedEndOf(THREE); + + ourLog.info("Finished schedule2"); + + return 1; + }; + // this schedule will start schedule 2 half-way + Callable schedule3 = ()->{ + int threadId = 3; + myPhaser.assertInPhase(ONE); + ourLog.info("Starting schedule3"); + recordProgress(threadId); + + myPhaser.arriveAndAwaitSharedEndOf(ONE); + + recordProgress(threadId); + + // add a new thread to the mix + myPhaser.register(); // tell the phaser to expect one more + myExecutorService.submit(schedule2); + + myPhaser.arriveAndAwaitSharedEndOf(TWO); + + recordProgress(threadId); + + myPhaser.arriveAndAwaitSharedEndOf(THREE); + + ourLog.info("Finished schedule3"); + + return 1; + }; + Future result1 = myExecutorService.submit(schedule1); + Future result2 = myExecutorService.submit(schedule3); + + assertEquals(1, result1.get()); + assertEquals(1, result2.get()); + + assertThat("progress is ordered", myProgressEvents, OrderMatchers.softOrdered(myProgressStageComparator)); + assertThat("all progress logged", myProgressEvents, Matchers.hasSize(8)); + + } + +} From bf495e2d92567f45b536068b49f70a41328bec85 Mon Sep 17 00:00:00 2001 From: James Agnew Date: Mon, 27 Feb 2023 22:19:28 -0500 Subject: [PATCH 4/4] Don't merge yet: MegaScale API Changes 2 (#4544) * Megascale changes 2 * Cleanup * CLeanup * Sync master * Cleanup * Ongoing work * Move service * Fixes * Ongoing megascaler work * Work * Work * Fixes * Work * Test fixes * Work on remote services * Interceptor rework * Work on config refactor * Fixes * Docs tweaks * Address review comments * Test tweak * Test fixes * Try to fix tests * Test tweaks * Fix changelogs * Work on merging * Changes * Megascale fixes * Remove accidental commit * Address fixmes * Cleanup * Test fixes * Compile fix * Test fixes * Account for review comments * Bump HAPI FHIR version --- hapi-deployable-pom/pom.xml | 2 +- hapi-fhir-android/pom.xml | 2 +- hapi-fhir-base/pom.xml | 2 +- .../ca/uhn/fhir/interceptor/api/Pointcut.java | 47 ++- .../interceptor/model/RequestPartitionId.java | 12 +- .../java/ca/uhn/fhir/rest/api/Constants.java | 6 +- hapi-fhir-bom/pom.xml | 4 +- hapi-fhir-checkstyle/pom.xml | 2 +- hapi-fhir-cli/hapi-fhir-cli-api/pom.xml | 6 +- hapi-fhir-cli/hapi-fhir-cli-app/pom.xml | 2 +- hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml | 2 +- hapi-fhir-cli/pom.xml | 2 +- hapi-fhir-client-okhttp/pom.xml | 2 +- hapi-fhir-client/pom.xml | 2 +- .../interceptor/LoggingInterceptor.java | 18 +- hapi-fhir-converter/pom.xml | 2 +- hapi-fhir-dist/pom.xml | 2 +- hapi-fhir-docs/pom.xml | 2 +- ...44-externally-stored-resource-jpa-api.yaml | 6 + ...544-hapi-transaction-service-adoption.yaml | 7 + .../fhir/docs/interceptors/interceptors.md | 3 +- hapi-fhir-jacoco/pom.xml | 2 +- hapi-fhir-jaxrsserver-base/pom.xml | 2 +- hapi-fhir-jpa/pom.xml | 2 +- hapi-fhir-jpaserver-base/pom.xml | 2 +- .../ca/uhn/fhir/jpa/config/JpaConfig.java | 14 +- .../ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java | 121 ++++--- .../fhir/jpa/dao/BaseHapiFhirResourceDao.java | 192 +++++++---- .../fhir/jpa/dao/BaseHapiFhirSystemDao.java | 39 ++- .../jpa/dao/JpaStorageResourceParser.java | 23 +- .../dao/expunge/ExpungeEverythingService.java | 169 +++++----- .../fhir/jpa/entity/BulkExportJobEntity.java | 8 +- .../fhir/jpa/entity/BulkImportJobEntity.java | 7 +- .../jpa/entity/ResourceReindexJobEntity.java | 5 +- .../java/ca/uhn/fhir/jpa/entity/Search.java | 31 +- .../esr/ExternallyStoredResourceAddress.java | 52 +++ ...nallyStoredResourceAddressMetadataKey.java | 39 +++ ...ternallyStoredResourceServiceRegistry.java | 69 ++++ .../esr/IExternallyStoredResourceService.java | 41 +++ .../ca/uhn/fhir/jpa/esr/package-info.java | 34 ++ .../tasks/HapiFhirJpaMigrationTasks.java | 41 ++- .../provider/JpaConformanceProviderDstu2.java | 2 +- .../search/DatabaseBackedPagingProvider.java | 16 +- .../search/PersistedJpaBundleProvider.java | 64 +++- .../PersistedJpaBundleProviderFactory.java | 16 +- ...istedJpaSearchFirstPageBundleProvider.java | 19 +- .../jpa/search/SearchCoordinatorSvcImpl.java | 119 +++---- .../jpa/search/SynchronousSearchSvcImpl.java | 11 +- .../jpa/search/builder/SearchBuilder.java | 2 +- .../StorageInterceptorHooksFacade.java | 8 +- .../builder/tasks/SearchContinuationTask.java | 9 +- .../jpa/search/builder/tasks/SearchTask.java | 69 ++-- .../cache/DatabaseSearchCacheSvcImpl.java | 1 - .../DatabaseSearchResultCacheSvcImpl.java | 80 +++-- .../search/cache/ISearchResultCacheSvc.java | 12 +- .../jpa/util/JpaHapiTransactionService.java | 45 +++ .../pom.xml | 2 +- hapi-fhir-jpaserver-ips/pom.xml | 2 +- hapi-fhir-jpaserver-mdm/pom.xml | 2 +- .../jpa/mdm/svc/MdmControllerSvcImpl.java | 30 +- .../mdm/provider/MdmProviderMatchR4Test.java | 2 +- .../jpa/mdm/svc/MdmControllerSvcImplTest.java | 16 +- hapi-fhir-jpaserver-model/pom.xml | 2 +- .../jpa/model/config/PartitionSettings.java | 38 ++- .../model/cross/IBasePersistedResource.java | 3 +- .../fhir/jpa/model/cross/IResourceLookup.java | 2 +- .../dialect/HapiSequenceStyleGenerator.java | 8 +- .../model/dialect/ISequenceValueMassager.java | 13 + .../jpa/model/entity/BaseHasResource.java | 9 +- .../entity/PartitionablePartitionId.java | 11 + .../model/entity/ResourceEncodingEnum.java | 11 +- .../model/entity/ResourceHistoryTable.java | 1 + .../fhir/jpa/model/entity/ResourceLink.java | 3 + .../fhir/jpa/model/entity/ResourceTable.java | 12 +- .../jpa/model/entity/StorageSettings.java | 4 + hapi-fhir-jpaserver-searchparam/pom.xml | 2 +- .../model/ReadPartitionIdRequestDetails.java | 72 +++- .../fhir/jpa/cache/IResourceVersionSvc.java | 4 +- .../fhir/jpa/cache/ResourceVersionMap.java | 4 +- .../partition/IRequestPartitionHelperSvc.java | 19 +- .../extractor/BaseSearchParamExtractor.java | 2 +- .../CrossPartitionReferenceDetails.java | 78 +++++ .../SearchParamExtractorService.java | 57 ++-- .../SearchParamExtractorServiceTest.java | 4 +- hapi-fhir-jpaserver-subscription/pom.xml | 2 +- .../SubscriptionTriggeringSvcImpl.java | 18 +- .../matching/DaoSubscriptionMatcherTest.java | 6 + .../module/BaseSubscriptionTest.java | 8 + .../SubscriptionMatchingSubscriberTest.java | 27 +- hapi-fhir-jpaserver-test-dstu2/pom.xml | 2 +- .../FhirResourceDaoDstu2ValidateTest.java | 4 +- .../provider/ResourceProviderDstu2Test.java | 4 +- .../search/SearchCoordinatorSvcImplTest.java | 139 ++++---- .../src/test}/resources/logback-test.xml | 0 hapi-fhir-jpaserver-test-dstu3/pom.xml | 2 +- .../dstu3/ResourceProviderDstu3Test.java | 21 +- ...rceProviderDstu3ValueSetVersionedTest.java | 8 +- .../src/test/resources/logback-test.xml | 46 +++ hapi-fhir-jpaserver-test-r4/pom.xml | 2 +- .../fhir/jpa/batch2/Batch2CoordinatorIT.java | 6 +- .../jpa/bulk/BulkDataExportProviderTest.java | 224 ++++++------- .../jpa/dao/r4/BasePartitioningR4Test.java | 6 +- ...hirResourceDaoR4ExternalReferenceTest.java | 20 +- .../r4/FhirResourceDaoR4SearchNoFtTest.java | 29 +- .../r4/FhirResourceDaoR4TerminologyTest.java | 4 +- .../dao/r4/FhirResourceDaoR4ValidateTest.java | 1 + .../fhir/jpa/dao/r4/FhirSystemDaoR4Test.java | 2 +- .../jpa/dao/r4/PartitioningSqlR4Test.java | 8 +- .../PartitioningInterceptorR4Test.java | 10 +- .../PatientIdPartitionInterceptorTest.java | 4 +- .../jpa/packages/JpaPackageCacheTest.java | 4 +- .../ca/uhn/fhir/jpa/packages/NpmR4Test.java | 56 ++-- .../provider/r4/PatientEverythingR4Test.java | 38 ++- .../r4/PatientMemberMatchOperationR4Test.java | 316 +++++++++--------- ...minologyServiceResourceProviderR4Test.java | 4 +- .../r4/ResourceProviderHasParamR4Test.java | 2 +- .../r4/ResourceProviderInterceptorR4Test.java | 19 +- ...ProviderR4ValueSetHSearchDisabledTest.java | 111 +----- ...rceProviderR4ValueSetNoVerCSNoVerTest.java | 4 +- ...ourceProviderR4ValueSetVerCSNoVerTest.java | 4 +- ...esourceProviderR4ValueSetVerCSVerTest.java | 8 +- .../ResourceReindexingSvcImplTest.java | 17 +- .../stresstest/GiantTransactionPerfTest.java | 17 +- .../ca/uhn/fhir/jpa/term/BaseTermR4Test.java | 8 +- .../jpa/term/TerminologySvcImplR4Test.java | 3 + .../src/test/resources/logback-test.xml | 46 +++ hapi-fhir-jpaserver-test-r4b/pom.xml | 2 +- .../src/test/resources/logback-test.xml | 46 +++ hapi-fhir-jpaserver-test-r5/pom.xml | 2 +- .../ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java | 3 + .../dao/r5/CrossPartitionReferencesTest.java | 247 ++++++++++++++ .../r5/ExternallyStoredResourceR5Test.java | 86 +++++ .../r5/ResourceProviderR5ValueSetTest.java | 2 +- ...sourceProviderR5ValueSetVersionedTest.java | 8 +- .../src/test/resources/logback-test.xml | 46 +++ hapi-fhir-jpaserver-test-utilities/pom.xml | 2 +- .../r4/BaseResourceProviderR4Test.java | 3 + .../ca/uhn/fhir/jpa/test/BaseJpaTest.java | 3 +- .../fhir/jpa/test/config/TestR5Config.java | 1 + hapi-fhir-jpaserver-uhnfhirtest/pom.xml | 2 +- hapi-fhir-server-mdm/pom.xml | 2 +- .../mdm/provider/MdmControllerHelper.java | 4 +- hapi-fhir-server-openapi/pom.xml | 2 +- hapi-fhir-server/pom.xml | 2 +- .../util/CompositeInterceptorBroadcaster.java | 1 + .../server/util/ICachedSearchDetails.java | 4 + .../hapi-fhir-caching-api/pom.xml | 2 +- .../hapi-fhir-caching-caffeine/pom.xml | 4 +- .../hapi-fhir-caching-guava/pom.xml | 2 +- .../hapi-fhir-caching-testing/pom.xml | 2 +- hapi-fhir-serviceloaders/pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../hapi-fhir-spring-boot-samples/pom.xml | 2 +- .../hapi-fhir-spring-boot-starter/pom.xml | 2 +- hapi-fhir-spring-boot/pom.xml | 2 +- hapi-fhir-sql-migrate/pom.xml | 2 +- hapi-fhir-storage-batch2-jobs/pom.xml | 2 +- .../DeleteExpungeJobSubmitterImpl.java | 8 +- .../batch2/jobs/reindex/ReindexProvider.java | 5 +- .../jobs/reindex/ReindexProviderTest.java | 2 +- hapi-fhir-storage-batch2/pom.xml | 2 +- hapi-fhir-storage-cr/pom.xml | 2 +- .../src/test/resources/logback-test.xml | 15 + hapi-fhir-storage-mdm/pom.xml | 2 +- hapi-fhir-storage-test-utilities/pom.xml | 2 +- hapi-fhir-storage/pom.xml | 33 +- .../ca/uhn/fhir/jpa/api/IDaoRegistry.java | 2 +- .../fhir/jpa/api/dao/IFhirResourceDao.java | 33 +- .../jpa/api/model/DeleteMethodOutcome.java | 12 +- .../jpa/api/svc/ISearchCoordinatorSvc.java | 2 +- .../jpa/dao/BaseTransactionProcessor.java | 4 +- .../jpa/dao/tx/HapiTransactionService.java | 80 ++++- .../PatientIdPartitionInterceptor.java | 12 +- .../model/search/SearchRuntimeDetails.java | 2 +- .../jpa/model/search/SearchStatusEnum.java | 2 +- .../BaseRequestPartitionHelperSvc.java | 49 ++- .../jpa/util/BaseCaptureQueriesListener.java | 8 +- .../CircularQueueCaptureQueriesListener.java | 6 +- .../CurrentThreadCaptureQueriesListener.java | 2 +- .../java/ca/uhn/fhir/jpa/util/SqlQuery.java | 15 +- .../ca/uhn/fhir/jpa/util/SqlQueryList.java | 2 +- hapi-fhir-structures-dstu2.1/pom.xml | 2 +- hapi-fhir-structures-dstu2/pom.xml | 2 +- hapi-fhir-structures-dstu3/pom.xml | 2 +- .../uhn/fhir/rest/server/UpdateDstu3Test.java | 79 ++--- hapi-fhir-structures-hl7org-dstu2/pom.xml | 2 +- hapi-fhir-structures-r4/pom.xml | 2 +- hapi-fhir-structures-r4b/pom.xml | 2 +- hapi-fhir-structures-r5/pom.xml | 2 +- hapi-fhir-test-utilities/pom.xml | 2 +- hapi-fhir-testpage-overlay/pom.xml | 2 +- .../pom.xml | 2 +- hapi-fhir-validation-resources-dstu2/pom.xml | 2 +- hapi-fhir-validation-resources-dstu3/pom.xml | 2 +- hapi-fhir-validation-resources-r4/pom.xml | 2 +- hapi-fhir-validation-resources-r5/pom.xml | 2 +- hapi-fhir-validation/pom.xml | 2 +- hapi-tinder-plugin/pom.xml | 2 +- hapi-tinder-test/pom.xml | 2 +- pom.xml | 8 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- 206 files changed, 2721 insertions(+), 1378 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4544-externally-stored-resource-jpa-api.yaml create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4544-hapi-transaction-service-adoption.yaml create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceAddress.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceAddressMetadataKey.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceServiceRegistry.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/IExternallyStoredResourceService.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/package-info.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/JpaHapiTransactionService.java rename {hapi-fhir-base => hapi-fhir-jpaserver-searchparam}/src/main/java/ca/uhn/fhir/interceptor/model/ReadPartitionIdRequestDetails.java (51%) rename {hapi-fhir-storage => hapi-fhir-jpaserver-searchparam}/src/main/java/ca/uhn/fhir/jpa/partition/IRequestPartitionHelperSvc.java (81%) create mode 100644 hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/CrossPartitionReferenceDetails.java rename {hapi-fhir-jpaserver-test-utilities/src/main => hapi-fhir-jpaserver-test-dstu2/src/test}/resources/logback-test.xml (100%) create mode 100644 hapi-fhir-jpaserver-test-dstu3/src/test/resources/logback-test.xml create mode 100644 hapi-fhir-jpaserver-test-r4/src/test/resources/logback-test.xml create mode 100644 hapi-fhir-jpaserver-test-r4b/src/test/resources/logback-test.xml create mode 100644 hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/CrossPartitionReferencesTest.java create mode 100644 hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/ExternallyStoredResourceR5Test.java create mode 100644 hapi-fhir-jpaserver-test-r5/src/test/resources/logback-test.xml create mode 100644 hapi-fhir-storage-cr/src/test/resources/logback-test.xml rename {hapi-fhir-jpaserver-model => hapi-fhir-storage}/src/main/java/ca/uhn/fhir/jpa/api/IDaoRegistry.java (96%) rename {hapi-fhir-jpaserver-model => hapi-fhir-storage}/src/main/java/ca/uhn/fhir/jpa/model/search/SearchRuntimeDetails.java (99%) rename {hapi-fhir-jpaserver-model => hapi-fhir-storage}/src/main/java/ca/uhn/fhir/jpa/model/search/SearchStatusEnum.java (98%) rename {hapi-fhir-jpa => hapi-fhir-storage}/src/main/java/ca/uhn/fhir/jpa/util/BaseCaptureQueriesListener.java (93%) rename {hapi-fhir-jpa => hapi-fhir-storage}/src/main/java/ca/uhn/fhir/jpa/util/CircularQueueCaptureQueriesListener.java (98%) rename {hapi-fhir-jpa => hapi-fhir-storage}/src/main/java/ca/uhn/fhir/jpa/util/CurrentThreadCaptureQueriesListener.java (99%) rename {hapi-fhir-jpa => hapi-fhir-storage}/src/main/java/ca/uhn/fhir/jpa/util/SqlQuery.java (88%) rename {hapi-fhir-jpa => hapi-fhir-storage}/src/main/java/ca/uhn/fhir/jpa/util/SqlQueryList.java (97%) diff --git a/hapi-deployable-pom/pom.xml b/hapi-deployable-pom/pom.xml index 23caed22e13..85b3274978a 100644 --- a/hapi-deployable-pom/pom.xml +++ b/hapi-deployable-pom/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-android/pom.xml b/hapi-fhir-android/pom.xml index fbdd23757c7..c218688d43e 100644 --- a/hapi-fhir-android/pom.xml +++ b/hapi-fhir-android/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-base/pom.xml b/hapi-fhir-base/pom.xml index bc4d947753c..8ca362f2370 100644 --- a/hapi-fhir-base/pom.xml +++ b/hapi-fhir-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java index 70a6c80a4f3..dd92309a50e 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java @@ -628,6 +628,13 @@ public enum Pointcut implements IPointcut { * This method is called after all processing is completed for a request, but only if the * request completes normally (i.e. no exception is thrown). *

+ * This pointcut is called after the response has completely finished, meaning that the HTTP respsonse to the client + * may or may not have already completely been returned to the client by the time this pointcut is invoked. Use caution + * if you have timing-dependent logic, since there is no guarantee about whether the client will have already moved on + * by the time your method is invoked. If you need a guarantee that your method is invoked before returning to the + * client, consider using {@link #SERVER_OUTGOING_RESPONSE} instead. + *

+ *

* Hooks may accept the following parameters: *

    *
  • @@ -1245,7 +1252,8 @@ public enum Pointcut implements IPointcut { *
      *
    • * ca.uhn.fhir.rest.server.util.ICachedSearchDetails - Contains the details of the search that - * is being created and initialized + * is being created and initialized. Interceptors may use this parameter to modify aspects of the search + * before it is stored and executed. *
    • *
    • * ca.uhn.fhir.rest.api.server.RequestDetails - A bean containing details about the request that is about to be processed, including details such as the @@ -1264,6 +1272,9 @@ public enum Pointcut implements IPointcut { *
    • * ca.uhn.fhir.jpa.searchparam.SearchParameterMap - Contains the details of the search being checked. This can be modified. *
    • + *
    • + * ca.uhn.fhir.interceptor.model.RequestPartitionId - The partition associated with the request (or {@literal null} if the server is not partitioned) + *
    • *
    *

    * Hooks should return void. @@ -1273,7 +1284,8 @@ public enum Pointcut implements IPointcut { "ca.uhn.fhir.rest.server.util.ICachedSearchDetails", "ca.uhn.fhir.rest.api.server.RequestDetails", "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "ca.uhn.fhir.jpa.searchparam.SearchParameterMap" + "ca.uhn.fhir.jpa.searchparam.SearchParameterMap", + "ca.uhn.fhir.interceptor.model.RequestPartitionId" ), /** @@ -1912,8 +1924,11 @@ public enum Pointcut implements IPointcut { /** * Storage Hook: - * Invoked before FHIR read/access operation (e.g. read/vread, search, history, etc.) operation to request the - * identification of the partition ID to be associated with the resource(s) being searched for, read, etc. + * Invoked before any FHIR read/access/extended operation (e.g. read/vread, search, history, + * $reindex, etc.) operation to request the identification of the partition ID to be associated with + * the resource(s) being searched for, read, etc. Essentially any operations in the JPA server that are not + * creating a resource will use this pointcut. Creates will use {@link #STORAGE_PARTITION_IDENTIFY_CREATE}. + * *

    * This hook will only be called if * partitioning is enabled in the JPA server. @@ -2193,6 +2208,30 @@ public enum Pointcut implements IPointcut { "ca.uhn.fhir.rest.server.TransactionLogMessages", "ca.uhn.fhir.mdm.api.MdmLinkEvent"), + + /** + * JPA Hook: + * This hook is invoked when a cross-partition reference is about to be + * stored in the database. + *

    + * This is an experimental API - It may change in the future, use with caution. + *

    + *

    + * Hooks may accept the following parameters: + *

    + *
      + *
    • + * {@literal ca.uhn.fhir.jpa.searchparam.extractor.CrossPartitionReferenceDetails} - Contains details about the + * cross partition reference. + *
    • + *
    + *

    + * Hooks should return void. + *

    + */ + JPA_RESOLVE_CROSS_PARTITION_REFERENCE("ca.uhn.fhir.jpa.model.cross.IResourceLookup", + "ca.uhn.fhir.jpa.searchparam.extractor.CrossPartitionReferenceDetails"), + /** * Performance Tracing Hook: * This hook is invoked when any informational messages generated by the diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/RequestPartitionId.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/RequestPartitionId.java index 518bec91796..c57720fae7d 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/RequestPartitionId.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/RequestPartitionId.java @@ -137,12 +137,12 @@ public class RequestPartitionId implements IModelJson { RequestPartitionId that = (RequestPartitionId) theO; - return new EqualsBuilder() - .append(myAllPartitions, that.myAllPartitions) - .append(myPartitionDate, that.myPartitionDate) - .append(myPartitionIds, that.myPartitionIds) - .append(myPartitionNames, that.myPartitionNames) - .isEquals(); + EqualsBuilder b = new EqualsBuilder(); + b.append(myAllPartitions, that.myAllPartitions); + b.append(myPartitionDate, that.myPartitionDate); + b.append(myPartitionIds, that.myPartitionIds); + b.append(myPartitionNames, that.myPartitionNames); + return b.isEquals(); } @Override diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/Constants.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/Constants.java index 43a5236dc0f..87755bc6db8 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/Constants.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/Constants.java @@ -307,8 +307,12 @@ public class Constants { public static final String PARAMQUALIFIER_TOKEN_NOT_IN = ":not-in"; public static final String PARAMQUALIFIER_TOKEN_ABOVE = ":above"; public static final String PARAMQUALIFIER_TOKEN_BELOW = ":below"; + /** + * The number of characters in a UUID (36) + */ + public static final int UUID_LENGTH = 36; - static { + static { CHARSET_UTF8 = StandardCharsets.UTF_8; CHARSET_US_ASCII = StandardCharsets.ISO_8859_1; diff --git a/hapi-fhir-bom/pom.xml b/hapi-fhir-bom/pom.xml index 6aa8dbd53ea..310c30ea189 100644 --- a/hapi-fhir-bom/pom.xml +++ b/hapi-fhir-bom/pom.xml @@ -4,14 +4,14 @@ 4.0.0 ca.uhn.hapi.fhir hapi-fhir-bom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT pom HAPI FHIR BOM ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-checkstyle/pom.xml b/hapi-fhir-checkstyle/pom.xml index a1745005096..df2fad7c6a9 100644 --- a/hapi-fhir-checkstyle/pom.xml +++ b/hapi-fhir-checkstyle/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml index 3088a7fc8e3..3b1c4e1fc36 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../../hapi-deployable-pom/pom.xml @@ -105,8 +105,8 @@ mariadb-java-client - mysql - mysql-connector-java + com.mysql + mysql-connector-j org.postgresql diff --git a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml index 580a7014e27..5fa54ba4816 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir-cli - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml index b3190b3606a..4ec9b8726c4 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../../hapi-deployable-pom diff --git a/hapi-fhir-cli/pom.xml b/hapi-fhir-cli/pom.xml index ae1b4991371..d80b4d5b81f 100644 --- a/hapi-fhir-cli/pom.xml +++ b/hapi-fhir-cli/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-client-okhttp/pom.xml b/hapi-fhir-client-okhttp/pom.xml index c1d8c6f95e6..c187c4b0236 100644 --- a/hapi-fhir-client-okhttp/pom.xml +++ b/hapi-fhir-client-okhttp/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-client/pom.xml b/hapi-fhir-client/pom.xml index 364a0b3a561..6f985c5be52 100644 --- a/hapi-fhir-client/pom.xml +++ b/hapi-fhir-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/LoggingInterceptor.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/LoggingInterceptor.java index 7443d0f1ca2..8a3c138dcf2 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/LoggingInterceptor.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/LoggingInterceptor.java @@ -197,43 +197,49 @@ public class LoggingInterceptor implements IClientInterceptor { /** * Should a summary (one line) for each request be logged, containing the URL and other information */ - public void setLogRequestBody(boolean theValue) { + public LoggingInterceptor setLogRequestBody(boolean theValue) { myLogRequestBody = theValue; + return this; } /** * Should headers for each request be logged, containing the URL and other information */ - public void setLogRequestHeaders(boolean theValue) { + public LoggingInterceptor setLogRequestHeaders(boolean theValue) { myLogRequestHeaders = theValue; + return this; } /** * Should a summary (one line) for each request be logged, containing the URL and other information */ - public void setLogRequestSummary(boolean theValue) { + public LoggingInterceptor setLogRequestSummary(boolean theValue) { myLogRequestSummary = theValue; + return this; } /** * Should a summary (one line) for each request be logged, containing the URL and other information */ - public void setLogResponseBody(boolean theValue) { + public LoggingInterceptor setLogResponseBody(boolean theValue) { myLogResponseBody = theValue; + return this; } /** * Should headers for each request be logged, containing the URL and other information */ - public void setLogResponseHeaders(boolean theValue) { + public LoggingInterceptor setLogResponseHeaders(boolean theValue) { myLogResponseHeaders = theValue; + return this; } /** * Should a summary (one line) for each request be logged, containing the URL and other information */ - public void setLogResponseSummary(boolean theValue) { + public LoggingInterceptor setLogResponseSummary(boolean theValue) { myLogResponseSummary = theValue; + return this; } } diff --git a/hapi-fhir-converter/pom.xml b/hapi-fhir-converter/pom.xml index 168918b5995..23c8387d0de 100644 --- a/hapi-fhir-converter/pom.xml +++ b/hapi-fhir-converter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-dist/pom.xml b/hapi-fhir-dist/pom.xml index 0fd18cb2c49..2ef66951851 100644 --- a/hapi-fhir-dist/pom.xml +++ b/hapi-fhir-dist/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-docs/pom.xml b/hapi-fhir-docs/pom.xml index 665b615d89c..ceb4e65e51c 100644 --- a/hapi-fhir-docs/pom.xml +++ b/hapi-fhir-docs/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4544-externally-stored-resource-jpa-api.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4544-externally-stored-resource-jpa-api.yaml new file mode 100644 index 00000000000..e9f406196ad --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4544-externally-stored-resource-jpa-api.yaml @@ -0,0 +1,6 @@ +--- +type: add +issue: 4544 +title: "A new API has been added to the JPA server for storing resources which + are actually placeholders for externally stored payloads. This API is not yet + used for any published use cases but will be rolled out for future uses." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4544-hapi-transaction-service-adoption.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4544-hapi-transaction-service-adoption.yaml new file mode 100644 index 00000000000..baef88b2b20 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/6_6_0/4544-hapi-transaction-service-adoption.yaml @@ -0,0 +1,7 @@ +--- +type: add +issue: 4544 +title: "The HapiTransactionService (HAPI FHIR JPA Transaction Manager) has been improved + and used in a number of new places (replacing the @Transactional annotation). This + means that stack traces have much less transaction logic noise, and give a clear + indication of transaction boundaries." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/interceptors/interceptors.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/interceptors/interceptors.md index c44e517a534..bb56b3be0aa 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/interceptors/interceptors.md +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/interceptors/interceptors.md @@ -30,7 +30,8 @@ Creating your own interceptors is easy. Custom interceptor classes do not need t * The method must have an appropriate return value for the chosen [Pointcut](/apidocs/hapi-fhir-base/ca/uhn/fhir/interceptor/api/Pointcut.html). -* The method may have any of the parameters specified for the given [Pointcut](/apidocs/hapi-fhir-base/ca/uhn/fhir/interceptor/api/Pointcut.html). +* The method may have any of the parameters specified for the given [Pointcut](/apidocs/hapi-fhir-base/ca/uhn/fhir/interceptor/api/Pointcut.html). + * A parameter of type [Pointcut](/apidocs/hapi-fhir-base/ca/uhn/fhir/interceptor/api/Pointcut.html) may also be added, and will be passed the actual Pointcut which triggered the invocation. * The method must be public. diff --git a/hapi-fhir-jacoco/pom.xml b/hapi-fhir-jacoco/pom.xml index a93bccae47c..54c7e77e58a 100644 --- a/hapi-fhir-jacoco/pom.xml +++ b/hapi-fhir-jacoco/pom.xml @@ -11,7 +11,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jaxrsserver-base/pom.xml b/hapi-fhir-jaxrsserver-base/pom.xml index f4f804e5825..412531e27e0 100644 --- a/hapi-fhir-jaxrsserver-base/pom.xml +++ b/hapi-fhir-jaxrsserver-base/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpa/pom.xml b/hapi-fhir-jpa/pom.xml index b069e2407be..3a0ff147ef3 100644 --- a/hapi-fhir-jpa/pom.xml +++ b/hapi-fhir-jpa/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml 4.0.0 diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml index 75213a47599..39e3f312049 100644 --- a/hapi-fhir-jpaserver-base/pom.xml +++ b/hapi-fhir-jpaserver-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java index 43de4a6a32a..c7b4ecb8423 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java @@ -8,6 +8,7 @@ import ca.uhn.fhir.context.support.IValidationSupport; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.interceptor.api.IInterceptorService; import ca.uhn.fhir.interceptor.executor.InterceptorService; +import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.model.ExpungeOptions; @@ -53,6 +54,7 @@ import ca.uhn.fhir.jpa.delete.DeleteConflictService; import ca.uhn.fhir.jpa.delete.ThreadSafeResourceDeleterSvc; import ca.uhn.fhir.jpa.entity.MdmLink; import ca.uhn.fhir.jpa.entity.Search; +import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceServiceRegistry; import ca.uhn.fhir.jpa.graphql.DaoRegistryGraphQLStorageServices; import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor; import ca.uhn.fhir.jpa.interceptor.JpaConsentContextServices; @@ -133,6 +135,7 @@ import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc; import ca.uhn.fhir.jpa.term.api.ITermReadSvc; import ca.uhn.fhir.jpa.term.api.ITermReindexingSvc; import ca.uhn.fhir.jpa.term.config.TermCodeSystemConfig; +import ca.uhn.fhir.jpa.util.JpaHapiTransactionService; import ca.uhn.fhir.jpa.util.MemoryCacheService; import ca.uhn.fhir.jpa.validation.ResourceLoaderImpl; import ca.uhn.fhir.jpa.validation.ValidationSettings; @@ -227,6 +230,11 @@ public class JpaConfig { return new CascadingDeleteInterceptor(theFhirContext, theDaoRegistry, theInterceptorBroadcaster, threadSafeResourceDeleterSvc); } + @Bean + public ExternallyStoredResourceServiceRegistry ExternallyStoredResourceServiceRegistry() { + return new ExternallyStoredResourceServiceRegistry(); + } + @Lazy @Bean public ThreadSafeResourceDeleterSvc safeDeleter(DaoRegistry theDaoRegistry, IInterceptorBroadcaster theInterceptorBroadcaster, HapiTransactionService hapiTransactionService) { @@ -383,7 +391,7 @@ public class JpaConfig { @Bean public HapiTransactionService hapiTransactionService() { - return new HapiTransactionService(); + return new JpaHapiTransactionService(); } @Bean @@ -524,8 +532,8 @@ public class JpaConfig { @Bean(name = PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER) @Scope("prototype") - public PersistedJpaSearchFirstPageBundleProvider newPersistedJpaSearchFirstPageBundleProvider(RequestDetails theRequest, Search theSearch, SearchTask theSearchTask, ISearchBuilder theSearchBuilder) { - return new PersistedJpaSearchFirstPageBundleProvider(theSearch, theSearchTask, theSearchBuilder, theRequest); + public PersistedJpaSearchFirstPageBundleProvider newPersistedJpaSearchFirstPageBundleProvider(RequestDetails theRequest, Search theSearch, SearchTask theSearchTask, ISearchBuilder theSearchBuilder, RequestPartitionId theRequestPartitionId) { + return new PersistedJpaSearchFirstPageBundleProvider(theSearch, theSearchTask, theSearchBuilder, theRequest, theRequestPartitionId); } @Bean(name = RepositoryValidatingRuleBuilder.REPOSITORY_VALIDATING_RULE_BUILDER) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java index c49c0bc4e06..fa36871545d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java @@ -30,6 +30,9 @@ import ca.uhn.fhir.jpa.dao.index.SearchParamWithInlineReferencesExtractor; import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; import ca.uhn.fhir.jpa.delete.DeleteConflictService; import ca.uhn.fhir.jpa.entity.PartitionEntity; +import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceAddress; +import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceAddressMetadataKey; +import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceServiceRegistry; import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource; import ca.uhn.fhir.jpa.model.cross.IResourceLookup; @@ -221,8 +224,14 @@ public abstract class BaseHapiFhirDao extends BaseStora @Autowired protected InMemoryResourceMatcher myInMemoryResourceMatcher; @Autowired + protected IJpaStorageResourceParser myJpaStorageResourceParser; + @Autowired + protected PartitionSettings myPartitionSettings; + @Autowired ExpungeService myExpungeService; @Autowired + private ExternallyStoredResourceServiceRegistry myExternallyStoredResourceServiceRegistry; + @Autowired private ISearchParamPresenceSvc mySearchParamPresenceSvc; @Autowired private SearchParamWithInlineReferencesExtractor mySearchParamWithInlineReferencesExtractor; @@ -231,18 +240,18 @@ public abstract class BaseHapiFhirDao extends BaseStora private FhirContext myContext; private ApplicationContext myApplicationContext; @Autowired - private PartitionSettings myPartitionSettings; - @Autowired private IPartitionLookupSvc myPartitionLookupSvc; @Autowired private MemoryCacheService myMemoryCacheService; @Autowired(required = false) private IFulltextSearchSvc myFulltextSearchSvc; - @Autowired private PlatformTransactionManager myTransactionManager; - @Autowired - protected IJpaStorageResourceParser myJpaStorageResourceParser; + + @VisibleForTesting + public void setExternallyStoredResourceServiceRegistryForUnitTest(ExternallyStoredResourceServiceRegistry theExternallyStoredResourceServiceRegistry) { + myExternallyStoredResourceServiceRegistry = theExternallyStoredResourceServiceRegistry; + } @VisibleForTesting public void setSearchParamPresenceSvc(ISearchParamPresenceSvc theSearchParamPresenceSvc) { @@ -544,42 +553,58 @@ public abstract class BaseHapiFhirDao extends BaseStora if (thePerformIndexing) { - encoding = myStorageSettings.getResourceEncoding(); + ExternallyStoredResourceAddress address = null; + if (myExternallyStoredResourceServiceRegistry.hasProviders()) { + address = ExternallyStoredResourceAddressMetadataKey.INSTANCE.get(theResource); + } - String resourceType = theEntity.getResourceType(); + if (address != null) { - List excludeElements = new ArrayList<>(8); - IBaseMetaType meta = theResource.getMeta(); - - IBaseExtension sourceExtension = getExcludedElements(resourceType, excludeElements, meta); - - theEntity.setFhirVersion(myContext.getVersion().getVersion()); - - HashFunction sha256 = Hashing.sha256(); - HashCode hashCode; - String encodedResource = encodeResource(theResource, encoding, excludeElements, myContext); - if (getStorageSettings().getInlineResourceTextBelowSize() > 0 && encodedResource.length() < getStorageSettings().getInlineResourceTextBelowSize()) { - resourceText = encodedResource; + encoding = ResourceEncodingEnum.ESR; resourceBinary = null; - encoding = ResourceEncodingEnum.JSON; - hashCode = sha256.hashUnencodedChars(encodedResource); - } else { - resourceText = null; - resourceBinary = getResourceBinary(encoding, encodedResource); - hashCode = sha256.hashBytes(resourceBinary); - } - - String hashSha256 = hashCode.toString(); - if (hashSha256.equals(theEntity.getHashSha256()) == false) { + resourceText = address.getProviderId() + ":" + address.getLocation(); changed = true; - } - theEntity.setHashSha256(hashSha256); + + } else { + + encoding = myStorageSettings.getResourceEncoding(); + + String resourceType = theEntity.getResourceType(); + + List excludeElements = new ArrayList<>(8); + IBaseMetaType meta = theResource.getMeta(); + + IBaseExtension sourceExtension = getExcludedElements(resourceType, excludeElements, meta); + + theEntity.setFhirVersion(myContext.getVersion().getVersion()); + + HashFunction sha256 = Hashing.sha256(); + HashCode hashCode; + String encodedResource = encodeResource(theResource, encoding, excludeElements, myContext); + if (myStorageSettings.getInlineResourceTextBelowSize() > 0 && encodedResource.length() < myStorageSettings.getInlineResourceTextBelowSize()) { + resourceText = encodedResource; + resourceBinary = null; + encoding = ResourceEncodingEnum.JSON; + hashCode = sha256.hashUnencodedChars(encodedResource); + } else { + resourceText = null; + resourceBinary = getResourceBinary(encoding, encodedResource); + hashCode = sha256.hashBytes(resourceBinary); + } + + String hashSha256 = hashCode.toString(); + if (hashSha256.equals(theEntity.getHashSha256()) == false) { + changed = true; + } + theEntity.setHashSha256(hashSha256); - if (sourceExtension != null) { - IBaseExtension newSourceExtension = ((IBaseHasExtensions) meta).addExtension(); - newSourceExtension.setUrl(sourceExtension.getUrl()); - newSourceExtension.setValue(sourceExtension.getValue()); + if (sourceExtension != null) { + IBaseExtension newSourceExtension = ((IBaseHasExtensions) meta).addExtension(); + newSourceExtension.setUrl(sourceExtension.getUrl()); + newSourceExtension.setValue(sourceExtension.getValue()); + } + } } else { @@ -662,6 +687,7 @@ public abstract class BaseHapiFhirDao extends BaseStora break; default: case DEL: + case ESR: resourceBinary = new byte[0]; break; } @@ -700,7 +726,7 @@ public abstract class BaseHapiFhirDao extends BaseStora } } boolean allExtensionsRemoved = extensions.isEmpty(); - if(allExtensionsRemoved){ + if (allExtensionsRemoved) { hasExtensions = false; } } @@ -866,8 +892,6 @@ public abstract class BaseHapiFhirDao extends BaseStora } - - String toResourceName(IBaseResource theResource) { return myContext.getResourceType(theResource); } @@ -969,9 +993,9 @@ public abstract class BaseHapiFhirDao extends BaseStora List pids = existingParams .getResourceLinks() .stream() - .map(t->t.getId()) + .map(t -> t.getId()) .collect(Collectors.toList()); - new QueryChunker().chunk(pids, t->{ + new QueryChunker().chunk(pids, t -> { List targets = myResourceLinkDao.findByPidAndFetchTargetDetails(t); ourLog.trace("Prefetched targets: {}", targets); }); @@ -1579,6 +1603,14 @@ public abstract class BaseHapiFhirDao extends BaseStora myPartitionSettings = thePartitionSettings; } + /** + * Do not call this method outside of unit tests + */ + @VisibleForTesting + public void setJpaStorageResourceParserForUnitTest(IJpaStorageResourceParser theJpaStorageResourceParser) { + myJpaStorageResourceParser = theJpaStorageResourceParser; + } + private class AddTagDefinitionToCacheAfterCommitSynchronization implements TransactionSynchronization { private final TagDefinition myTagDefinition; @@ -1629,6 +1661,7 @@ public abstract class BaseHapiFhirDao extends BaseStora resourceText = GZipUtil.decompress(theResourceBytes); break; case DEL: + case ESR: break; } return resourceText; @@ -1688,12 +1721,4 @@ public abstract class BaseHapiFhirDao extends BaseStora ourValidationDisabledForUnitTest = theValidationDisabledForUnitTest; } - /** - * Do not call this method outside of unit tests - */ - @VisibleForTesting - public void setJpaStorageResourceParserForUnitTest(IJpaStorageResourceParser theJpaStorageResourceParser) { - myJpaStorageResourceParser = theJpaStorageResourceParser; - } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java index 0642f9f2158..5f387461ea6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java @@ -50,6 +50,7 @@ import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.entity.BaseHasResource; import ca.uhn.fhir.jpa.model.entity.BaseTag; import ca.uhn.fhir.jpa.model.entity.ForcedId; +import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId; import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.TagDefinition; @@ -99,6 +100,7 @@ import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException; import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; +import ca.uhn.fhir.rest.server.provider.ProviderConstants; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; import ca.uhn.fhir.util.ObjectUtil; @@ -147,6 +149,7 @@ import java.util.List; import java.util.Optional; import java.util.Set; import java.util.UUID; +import java.util.concurrent.Callable; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -242,7 +245,12 @@ public abstract class BaseHapiFhirResourceDao extends B @Override public DaoMethodOutcome create(T theResource, String theIfNoneExist, boolean thePerformIndexing, @Nonnull TransactionDetails theTransactionDetails, RequestDetails theRequestDetails) { - return myTransactionService.execute(theRequestDetails, theTransactionDetails, tx -> doCreateForPost(theResource, theIfNoneExist, thePerformIndexing, theTransactionDetails, theRequestDetails)); + RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest(theRequestDetails, theResource, getResourceName()); + return myTransactionService + .withRequest(theRequestDetails) + .withTransactionDetails(theTransactionDetails) + .withRequestPartitionId(requestPartitionId) + .execute(tx -> doCreateForPost(theResource, theIfNoneExist, thePerformIndexing, theTransactionDetails, theRequestDetails, requestPartitionId)); } @VisibleForTesting @@ -253,7 +261,7 @@ public abstract class BaseHapiFhirResourceDao extends B /** * Called for FHIR create (POST) operations */ - protected DaoMethodOutcome doCreateForPost(T theResource, String theIfNoneExist, boolean thePerformIndexing, TransactionDetails theTransactionDetails, RequestDetails theRequestDetails) { + protected DaoMethodOutcome doCreateForPost(T theResource, String theIfNoneExist, boolean thePerformIndexing, TransactionDetails theTransactionDetails, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) { if (theResource == null) { String msg = getContext().getLocalizer().getMessage(BaseStorageDao.class, "missingBody"); throw new InvalidRequestException(Msg.code(956) + msg); @@ -274,13 +282,12 @@ public abstract class BaseHapiFhirResourceDao extends B theResource.setUserData(JpaConstants.RESOURCE_ID_SERVER_ASSIGNED, Boolean.TRUE); } - RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest(theRequestDetails, theResource, getResourceName()); - return doCreateForPostOrPut(theRequestDetails, theResource, theIfNoneExist, true, thePerformIndexing, requestPartitionId, RestOperationTypeEnum.CREATE, theTransactionDetails); + return doCreateForPostOrPut(theRequestDetails, theResource, theIfNoneExist, true, thePerformIndexing, theRequestPartitionId, RestOperationTypeEnum.CREATE, theTransactionDetails); } /** - * Called both for FHIR create (POST) operations (via {@link #doCreateForPost(IBaseResource, String, boolean, TransactionDetails, RequestDetails)} - * as well as for FHIR update (PUT) where we're doing a create-with-client-assigned-ID (via {@link #doUpdate(IBaseResource, String, boolean, boolean, RequestDetails, TransactionDetails)}. + * Called both for FHIR create (POST) operations (via {@link #doCreateForPost(IBaseResource, String, boolean, TransactionDetails, RequestDetails, RequestPartitionId)} + * as well as for FHIR update (PUT) where we're doing a create-with-client-assigned-ID (via {@link #doUpdate(IBaseResource, String, boolean, boolean, RequestDetails, TransactionDetails, RequestPartitionId)}. */ private DaoMethodOutcome doCreateForPostOrPut(RequestDetails theRequest, T theResource, String theMatchUrl, boolean theProcessMatchUrl, boolean thePerformIndexing, RequestPartitionId theRequestPartitionId, RestOperationTypeEnum theOperationType, TransactionDetails theTransactionDetails) { StopWatch w = new StopWatch(); @@ -290,7 +297,7 @@ public abstract class BaseHapiFhirResourceDao extends B ResourceTable entity = new ResourceTable(); entity.setResourceType(toResourceName(theResource)); - entity.setPartitionId(myRequestPartitionHelperService.toStoragePartition(theRequestPartitionId)); + entity.setPartitionId(PartitionablePartitionId.toStoragePartition(theRequestPartitionId, myPartitionSettings)); entity.setCreatedByMatchUrl(theMatchUrl); entity.setVersion(1); @@ -895,10 +902,16 @@ public abstract class BaseHapiFhirResourceDao extends B } @Override - @Transactional public IBundleProvider history(Date theSince, Date theUntil, Integer theOffset, RequestDetails theRequestDetails) { StopWatch w = new StopWatch(); - IBundleProvider retVal = myPersistedJpaBundleProviderFactory.history(theRequestDetails, myResourceName, null, theSince, theUntil, theOffset); + ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forHistory(myResourceName, null); + RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, details); + IBundleProvider retVal = myTransactionService + .withRequest(theRequestDetails) + .withRequestPartitionId(requestPartitionId) + .execute(() -> { + return myPersistedJpaBundleProviderFactory.history(theRequestDetails, myResourceName, null, theSince, theUntil, theOffset, requestPartitionId); + }); ourLog.debug("Processed history on {} in {}ms", myResourceName, w.getMillisAndRestart()); return retVal; } @@ -907,35 +920,48 @@ public abstract class BaseHapiFhirResourceDao extends B * @deprecated Use {@link #history(IIdType, HistorySearchDateRangeParam, RequestDetails)} instead */ @Override - @Transactional public IBundleProvider history(final IIdType theId, final Date theSince, Date theUntil, Integer theOffset, RequestDetails theRequest) { StopWatch w = new StopWatch(); - IIdType id = theId.withResourceType(myResourceName).toUnqualifiedVersionless(); - BaseHasResource entity = readEntity(id, theRequest); + ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forHistory(myResourceName, theId); + RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequest, details); + IBundleProvider retVal = myTransactionService + .withRequest(theRequest) + .withRequestPartitionId(requestPartitionId) + .execute(() -> { + IIdType id = theId.withResourceType(myResourceName).toUnqualifiedVersionless(); + BaseHasResource entity = readEntity(id, true, theRequest, requestPartitionId); - IBundleProvider retVal = myPersistedJpaBundleProviderFactory.history(theRequest, myResourceName, entity.getId(), theSince, theUntil, theOffset); + return myPersistedJpaBundleProviderFactory.history(theRequest, myResourceName, entity.getId(), theSince, theUntil, theOffset, requestPartitionId); + }); - ourLog.debug("Processed history on {} in {}ms", id, w.getMillisAndRestart()); + ourLog.debug("Processed history on {} in {}ms", theId, w.getMillisAndRestart()); return retVal; } @Override - @Transactional public IBundleProvider history(final IIdType theId, final HistorySearchDateRangeParam theHistorySearchDateRangeParam, RequestDetails theRequest) { StopWatch w = new StopWatch(); + ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forHistory(myResourceName, theId); + RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequest, details); + IBundleProvider retVal = myTransactionService + .withRequest(theRequest) + .withRequestPartitionId(requestPartitionId) + .execute(() -> { + IIdType id = theId.withResourceType(myResourceName).toUnqualifiedVersionless(); + BaseHasResource entity = readEntity(id, true, theRequest, requestPartitionId); - IIdType id = theId.withResourceType(myResourceName).toUnqualifiedVersionless(); - BaseHasResource entity = readEntity(id, theRequest); + return myPersistedJpaBundleProviderFactory.history(theRequest, myResourceName, entity.getId(), + theHistorySearchDateRangeParam.getLowerBoundAsInstant(), + theHistorySearchDateRangeParam.getUpperBoundAsInstant(), + theHistorySearchDateRangeParam.getOffset(), + theHistorySearchDateRangeParam.getHistorySearchType(), + requestPartitionId + ); + }); - IBundleProvider retVal = myPersistedJpaBundleProviderFactory.history(theRequest, myResourceName, entity.getId(), - theHistorySearchDateRangeParam.getLowerBoundAsInstant(), - theHistorySearchDateRangeParam.getUpperBoundAsInstant(), - theHistorySearchDateRangeParam.getOffset(), - theHistorySearchDateRangeParam.getHistorySearchType()); - - ourLog.debug("Processed history on {} in {}ms", id, w.getMillisAndRestart()); + ourLog.debug("Processed history on {} in {}ms", theId, w.getMillisAndRestart()); return retVal; } @@ -962,8 +988,8 @@ public abstract class BaseHapiFhirResourceDao extends B addAllResourcesTypesToReindex(theBase, theRequestDetails, params); } - ReadPartitionIdRequestDetails details = new ReadPartitionIdRequestDetails(null, RestOperationTypeEnum.EXTENDED_OPERATION_SERVER, null, null, null); - RequestPartitionId requestPartition = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, null, details); + ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forOperation(null, null, ProviderConstants.OPERATION_REINDEX); + RequestPartitionId requestPartition = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, details); params.setRequestPartitionId(requestPartition); JobInstanceStartRequest request = new JobInstanceStartRequest(); @@ -1160,14 +1186,20 @@ public abstract class BaseHapiFhirResourceDao extends B validateResourceTypeAndThrowInvalidRequestException(theId); TransactionDetails transactionDetails = new TransactionDetails(); - return myTransactionService.execute(theRequest, transactionDetails, tx -> doRead(theId, theRequest, theDeletedOk)); + RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForRead(theRequest, myResourceName, theId); + + return myTransactionService + .withRequest(theRequest) + .withTransactionDetails(transactionDetails) + .withRequestPartitionId(requestPartitionId) + .execute(() -> doReadInTransaction(theId, theRequest, theDeletedOk, requestPartitionId)); } - public T doRead(IIdType theId, RequestDetails theRequest, boolean theDeletedOk) { + private T doReadInTransaction(IIdType theId, RequestDetails theRequest, boolean theDeletedOk, RequestPartitionId theRequestPartitionId) { assert TransactionSynchronizationManager.isActualTransactionActive(); StopWatch w = new StopWatch(); - BaseHasResource entity = readEntity(theId, theRequest); + BaseHasResource entity = readEntity(theId, true, theRequest, theRequestPartitionId); validateResourceType(entity); T retVal = myJpaStorageResourceParser.toResource(myResourceType, entity, null, false); @@ -1214,9 +1246,12 @@ public abstract class BaseHapiFhirResourceDao extends B } @Override - @Transactional public BaseHasResource readEntity(IIdType theId, RequestDetails theRequest) { - return readEntity(theId, true, theRequest); + RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForRead(theRequest, myResourceName, theId); + return myTransactionService + .withRequest(theRequest) + .withRequestPartitionId(requestPartitionId) + .execute(() -> readEntity(theId, true, theRequest, requestPartitionId)); } @SuppressWarnings("unchecked") @@ -1245,13 +1280,9 @@ public abstract class BaseHapiFhirResourceDao extends B } } - @Override - @Transactional - public BaseHasResource readEntity(IIdType theId, boolean theCheckForForcedId, RequestDetails theRequest) { + private BaseHasResource readEntity(IIdType theId, boolean theCheckForForcedId, RequestDetails theRequest, RequestPartitionId requestPartitionId) { validateResourceTypeAndThrowInvalidRequestException(theId); - RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForRead(theRequest, getResourceName(), theId); - BaseHasResource entity; JpaPid pid = myIdHelperService.resolveResourcePersistentIds(requestPartitionId, getResourceName(), theId.getIdPart()); Set readPartitions = null; @@ -1361,15 +1392,16 @@ public abstract class BaseHapiFhirResourceDao extends B } @Override - public void reindex(T theResource, ResourceTable theEntity) { + public void reindex(T theResource, IBasePersistedResource theEntity) { assert TransactionSynchronizationManager.isActualTransactionActive(); - ourLog.debug("Indexing resource {} - PID {}", theEntity.getIdDt().getValue(), theEntity.getId()); + ourLog.debug("Indexing resource {} - PID {}", theEntity.getIdDt().getValue(), theEntity.getPersistentId()); if (theResource != null) { CURRENTLY_REINDEXING.put(theResource, Boolean.TRUE); } - TransactionDetails transactionDetails = new TransactionDetails(theEntity.getUpdatedDate()); + ResourceTable entity = (ResourceTable) theEntity; + TransactionDetails transactionDetails = new TransactionDetails(entity.getUpdatedDate()); ResourceTable resourceTable = updateEntity(null, theResource, theEntity, theEntity.getDeleted(), true, false, transactionDetails, true, false); if (theResource != null) { CURRENTLY_REINDEXING.put(theResource, null); @@ -1450,6 +1482,7 @@ public abstract class BaseHapiFhirResourceDao extends B if (retVal instanceof PersistedJpaBundleProvider) { PersistedJpaBundleProvider provider = (PersistedJpaBundleProvider) retVal; + provider.setRequestPartitionId(requestPartitionId); if (provider.getCacheStatus() == SearchCacheStatusEnum.HIT) { if (theServletResponse != null && theRequest != null) { String value = "HIT from " + theRequest.getFhirServerBase(); @@ -1520,33 +1553,37 @@ public abstract class BaseHapiFhirResourceDao extends B @Override public List searchForIds(SearchParameterMap theParams, RequestDetails theRequest, @Nullable IBaseResource theConditionalOperationTargetOrNull) { TransactionDetails transactionDetails = new TransactionDetails(); + RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType(theRequest, myResourceName, theParams, theConditionalOperationTargetOrNull); - return myTransactionService.execute(theRequest, transactionDetails, tx -> { + return myTransactionService + .withRequest(theRequest) + .withTransactionDetails(transactionDetails) + .withRequestPartitionId(requestPartitionId) + .execute(() -> { - if (theParams.getLoadSynchronousUpTo() != null) { - theParams.setLoadSynchronousUpTo(Math.min(getStorageSettings().getInternalSynchronousSearchSize(), theParams.getLoadSynchronousUpTo())); - } else { - theParams.setLoadSynchronousUpTo(getStorageSettings().getInternalSynchronousSearchSize()); - } - - ISearchBuilder builder = mySearchBuilderFactory.newSearchBuilder(this, getResourceName(), getResourceType()); - - List ids = new ArrayList<>(); - - String uuid = UUID.randomUUID().toString(); - RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType(theRequest, getResourceName(), theParams, theConditionalOperationTargetOrNull); - - SearchRuntimeDetails searchRuntimeDetails = new SearchRuntimeDetails(theRequest, uuid); - try (IResultIterator iter = builder.createQuery(theParams, searchRuntimeDetails, theRequest, requestPartitionId)) { - while (iter.hasNext()) { - ids.add(iter.next()); + if (theParams.getLoadSynchronousUpTo() != null) { + theParams.setLoadSynchronousUpTo(Math.min(myStorageSettings.getInternalSynchronousSearchSize(), theParams.getLoadSynchronousUpTo())); + } else { + theParams.setLoadSynchronousUpTo(myStorageSettings.getInternalSynchronousSearchSize()); } - } catch (IOException e) { - ourLog.error("IO failure during database access", e); - } - return ids; - }); + ISearchBuilder builder = mySearchBuilderFactory.newSearchBuilder(this, getResourceName(), getResourceType()); + + List ids = new ArrayList<>(); + + String uuid = UUID.randomUUID().toString(); + + SearchRuntimeDetails searchRuntimeDetails = new SearchRuntimeDetails(theRequest, uuid); + try (IResultIterator iter = builder.createQuery(theParams, searchRuntimeDetails, theRequest, requestPartitionId)) { + while (iter.hasNext()) { + ids.add(iter.next()); + } + } catch (IOException e) { + ourLog.error("IO failure during database access", e); + } + + return ids; + }); } protected MT toMetaDt(Class theType, Collection tagDefinitions) { @@ -1634,15 +1671,25 @@ public abstract class BaseHapiFhirResourceDao extends B String id = theResource.getIdElement().getValue(); Runnable onRollback = () -> theResource.getIdElement().setValue(id); - // Execute the update in a retryable transaction + RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest(theRequest, theResource, getResourceName()); + + Callable updateCallback; if (myStorageSettings.isUpdateWithHistoryRewriteEnabled() && theRequest != null && theRequest.isRewriteHistory()) { - return myTransactionService.execute(theRequest, theTransactionDetails, tx -> doUpdateWithHistoryRewrite(theResource, theRequest, theTransactionDetails), onRollback); + updateCallback = () -> doUpdateWithHistoryRewrite(theResource, theRequest, theTransactionDetails, requestPartitionId); } else { - return myTransactionService.execute(theRequest, theTransactionDetails, tx -> doUpdate(theResource, theMatchUrl, thePerformIndexing, theForceUpdateVersion, theRequest, theTransactionDetails), onRollback); + updateCallback = () -> doUpdate(theResource, theMatchUrl, thePerformIndexing, theForceUpdateVersion, theRequest, theTransactionDetails, requestPartitionId); } + + // Execute the update in a retryable transaction + return myTransactionService + .withRequest(theRequest) + .withTransactionDetails(theTransactionDetails) + .withRequestPartitionId(requestPartitionId) + .onRollback(onRollback) + .execute(updateCallback); } - private DaoMethodOutcome doUpdate(T theResource, String theMatchUrl, boolean thePerformIndexing, boolean theForceUpdateVersion, RequestDetails theRequest, TransactionDetails theTransactionDetails) { + private DaoMethodOutcome doUpdate(T theResource, String theMatchUrl, boolean thePerformIndexing, boolean theForceUpdateVersion, RequestDetails theRequest, TransactionDetails theTransactionDetails, RequestPartitionId theRequestPartitionId) { T resource = theResource; preProcessResourceForStorage(resource); @@ -1662,8 +1709,7 @@ public abstract class BaseHapiFhirResourceDao extends B entity = myEntityManager.find(ResourceTable.class, pid.getId()); resourceId = entity.getIdDt(); } else { - RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest(theRequest, theResource, getResourceName()); - DaoMethodOutcome outcome = doCreateForPostOrPut(theRequest, resource, theMatchUrl, false, thePerformIndexing, requestPartitionId, update, theTransactionDetails); + DaoMethodOutcome outcome = doCreateForPostOrPut(theRequest, resource, theMatchUrl, false, thePerformIndexing, theRequestPartitionId, update, theTransactionDetails); // Pre-cache the match URL if (outcome.getPersistentId() != null) { @@ -1682,8 +1728,6 @@ public abstract class BaseHapiFhirResourceDao extends B assert resourceId != null; assert resourceId.hasIdPart(); - RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest(theRequest, theResource, getResourceName()); - boolean create = false; if (theRequest != null) { @@ -1695,14 +1739,14 @@ public abstract class BaseHapiFhirResourceDao extends B if (!create) { try { - entity = readEntityLatestVersion(resourceId, requestPartitionId, theTransactionDetails); + entity = readEntityLatestVersion(resourceId, theRequestPartitionId, theTransactionDetails); } catch (ResourceNotFoundException e) { create = true; } } if (create) { - return doCreateForPostOrPut(theRequest, resource, null, false, thePerformIndexing, requestPartitionId, update, theTransactionDetails); + return doCreateForPostOrPut(theRequest, resource, null, false, thePerformIndexing, theRequestPartitionId, update, theTransactionDetails); } } @@ -1720,7 +1764,7 @@ public abstract class BaseHapiFhirResourceDao extends B * @param theTransactionDetails details of the transaction * @return the outcome of the operation */ - private DaoMethodOutcome doUpdateWithHistoryRewrite(T theResource, RequestDetails theRequest, TransactionDetails theTransactionDetails) { + private DaoMethodOutcome doUpdateWithHistoryRewrite(T theResource, RequestDetails theRequest, TransactionDetails theTransactionDetails, RequestPartitionId theRequestPartitionId) { StopWatch w = new StopWatch(); // No need for indexing as this will update a non-current version of the resource which will not be searchable @@ -1736,7 +1780,7 @@ public abstract class BaseHapiFhirResourceDao extends B assert resourceId.hasIdPart(); try { - currentEntity = readEntityLatestVersion(resourceId.toVersionless(), theRequest, theTransactionDetails); + currentEntity = readEntityLatestVersion(resourceId.toVersionless(), theRequestPartitionId, theTransactionDetails); if (!resourceId.hasVersionIdPart()) { throw new InvalidRequestException(Msg.code(2093) + "Invalid resource ID, ID must contain a history version"); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java index cfb79a273d2..b844ae87e42 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java @@ -3,6 +3,8 @@ package ca.uhn.fhir.jpa.dao; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; +import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails; +import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.model.ExpungeOptions; @@ -10,9 +12,12 @@ import ca.uhn.fhir.jpa.api.model.ExpungeOutcome; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; import ca.uhn.fhir.jpa.dao.data.IResourceTagDao; import ca.uhn.fhir.jpa.dao.expunge.ExpungeService; +import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; +import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService; import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable; +import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; import ca.uhn.fhir.jpa.search.PersistedJpaBundleProviderFactory; import ca.uhn.fhir.jpa.search.builder.SearchBuilder; import ca.uhn.fhir.jpa.util.QueryChunker; @@ -88,6 +93,10 @@ public abstract class BaseHapiFhirSystemDao extends B private IResourceTagDao myResourceTagDao; @Autowired private IInterceptorBroadcaster myInterceptorBroadcaster; + @Autowired + private IRequestPartitionHelperSvc myRequestPartitionHelperService; + @Autowired + private IHapiTransactionService myTransactionService; @VisibleForTesting public void setTransactionProcessorForUnitTest(TransactionProcessor theTransactionProcessor) { @@ -124,7 +133,6 @@ public abstract class BaseHapiFhirSystemDao extends B return retVal; } - @Transactional(propagation = Propagation.SUPPORTS) @Nullable @Override public Map getResourceCountsFromCache() { @@ -138,32 +146,37 @@ public abstract class BaseHapiFhirSystemDao extends B @Override public IBundleProvider history(Date theSince, Date theUntil, Integer theOffset, RequestDetails theRequestDetails) { StopWatch w = new StopWatch(); - IBundleProvider retVal = myPersistedJpaBundleProviderFactory.history(theRequestDetails, null, null, theSince, theUntil, theOffset); + ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forHistory(null, null); + RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, details); + IBundleProvider retVal = myTransactionService + .withRequest(theRequestDetails) + .withRequestPartitionId(requestPartitionId) + .execute(() -> myPersistedJpaBundleProviderFactory.history(theRequestDetails, null, null, theSince, theUntil, theOffset, requestPartitionId)); ourLog.info("Processed global history in {}ms", w.getMillisAndRestart()); return retVal; } @Override - @Transactional(propagation = Propagation.NEVER) public T transaction(RequestDetails theRequestDetails, T theRequest) { + HapiTransactionService.noTransactionAllowed(); return myTransactionProcessor.transaction(theRequestDetails, theRequest, false); } @Override - @Transactional(propagation = Propagation.MANDATORY) public T transactionNested(RequestDetails theRequestDetails, T theRequest) { + HapiTransactionService.requireTransaction(); return myTransactionProcessor.transaction(theRequestDetails, theRequest, true); } @Override - @Transactional(propagation = Propagation.MANDATORY) public

    void preFetchResources(List

    theResolvedIds) { + HapiTransactionService.requireTransaction(); List pids = theResolvedIds .stream() .map(t -> ((JpaPid) t).getId()) .collect(Collectors.toList()); - new QueryChunker().chunk(pids, ids->{ + new QueryChunker().chunk(pids, ids -> { /* * Pre-fetch the resources we're touching in this transaction in mass - this reduced the @@ -182,32 +195,32 @@ public abstract class BaseHapiFhirSystemDao extends B List entityIds; - entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsStringPopulated()).map(t->t.getId()).collect(Collectors.toList()); + entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsStringPopulated()).map(t -> t.getId()).collect(Collectors.toList()); if (entityIds.size() > 0) { preFetchIndexes(entityIds, "string", "myParamsString", null); } - entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsTokenPopulated()).map(t->t.getId()).collect(Collectors.toList()); + entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsTokenPopulated()).map(t -> t.getId()).collect(Collectors.toList()); if (entityIds.size() > 0) { preFetchIndexes(entityIds, "token", "myParamsToken", null); } - entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsDatePopulated()).map(t->t.getId()).collect(Collectors.toList()); + entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsDatePopulated()).map(t -> t.getId()).collect(Collectors.toList()); if (entityIds.size() > 0) { preFetchIndexes(entityIds, "date", "myParamsDate", null); } - entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsQuantityPopulated()).map(t->t.getId()).collect(Collectors.toList()); + entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsQuantityPopulated()).map(t -> t.getId()).collect(Collectors.toList()); if (entityIds.size() > 0) { preFetchIndexes(entityIds, "quantity", "myParamsQuantity", null); } - entityIds = loadedResourceTableEntries.stream().filter(t -> t.isHasLinks()).map(t->t.getId()).collect(Collectors.toList()); + entityIds = loadedResourceTableEntries.stream().filter(t -> t.isHasLinks()).map(t -> t.getId()).collect(Collectors.toList()); if (entityIds.size() > 0) { preFetchIndexes(entityIds, "resourceLinks", "myResourceLinks", null); } - entityIds = loadedResourceTableEntries.stream().filter(t -> t.isHasTags()).map(t->t.getId()).collect(Collectors.toList()); + entityIds = loadedResourceTableEntries.stream().filter(t -> t.isHasTags()).map(t -> t.getId()).collect(Collectors.toList()); if (entityIds.size() > 0) { myResourceTagDao.findByResourceIds(entityIds); preFetchIndexes(entityIds, "tags", "myTags", null); @@ -255,7 +268,7 @@ public abstract class BaseHapiFhirSystemDao extends B } private void preFetchIndexes(List theIds, String typeDesc, String fieldName, @Nullable List theEntityListToPopulate) { - new QueryChunker().chunk(theIds, ids->{ + new QueryChunker().chunk(theIds, ids -> { TypedQuery query = myEntityManager.createQuery("FROM ResourceTable r LEFT JOIN FETCH r." + fieldName + " WHERE r.myId IN ( :IDS )", ResourceTable.class); query.setParameter("IDS", ids); List indexFetchOutcome = query.getResultList(); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java index 16e524d7ced..6f004aafe94 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java @@ -29,6 +29,8 @@ import ca.uhn.fhir.jpa.api.dao.IDao; import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; import ca.uhn.fhir.jpa.entity.PartitionEntity; import ca.uhn.fhir.jpa.entity.ResourceSearchView; +import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceServiceRegistry; +import ca.uhn.fhir.jpa.esr.IExternallyStoredResourceService; import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource; import ca.uhn.fhir.jpa.model.entity.BaseTag; @@ -87,6 +89,8 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser { private PartitionSettings myPartitionSettings; @Autowired private IPartitionLookupSvc myPartitionLookupSvc; + @Autowired + private ExternallyStoredResourceServiceRegistry myExternallyStoredResourceServiceRegistry; @Override public IBaseResource toResource(IBasePersistedResource theEntity, boolean theForHistoryOperation) { @@ -231,18 +235,29 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser { } @SuppressWarnings("unchecked") - private R parseResource(IBaseResourceEntity theEntity, ResourceEncodingEnum resourceEncoding, String decodedResourceText, Class resourceType) { + private R parseResource(IBaseResourceEntity theEntity, ResourceEncodingEnum theResourceEncoding, String theDecodedResourceText, Class theResourceType) { R retVal; - if (resourceEncoding != ResourceEncodingEnum.DEL) { + if (theResourceEncoding == ResourceEncodingEnum.ESR) { + + int colonIndex = theDecodedResourceText.indexOf(':'); + Validate.isTrue(colonIndex > 0, "Invalid ESR address: %s", theDecodedResourceText); + String providerId = theDecodedResourceText.substring(0, colonIndex); + String address = theDecodedResourceText.substring(colonIndex + 1); + Validate.notBlank(providerId, "No provider ID in ESR address: %s", theDecodedResourceText); + Validate.notBlank(address, "No address in ESR address: %s", theDecodedResourceText); + IExternallyStoredResourceService provider = myExternallyStoredResourceServiceRegistry.getProvider(providerId); + retVal = (R) provider.fetchResource(address); + + } else if (theResourceEncoding != ResourceEncodingEnum.DEL) { IParser parser = new TolerantJsonParser(getContext(theEntity.getFhirVersion()), LENIENT_ERROR_HANDLER, theEntity.getId()); try { - retVal = parser.parseResource(resourceType, decodedResourceText); + retVal = parser.parseResource(theResourceType, theDecodedResourceText); } catch (Exception e) { StringBuilder b = new StringBuilder(); b.append("Failed to parse database resource["); - b.append(myFhirContext.getResourceType(resourceType)); + b.append(myFhirContext.getResourceType(theResourceType)); b.append("/"); b.append(theEntity.getIdDt().getIdPart()); b.append(" (pid "); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java index 5a174360f57..76af53d0f4d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java @@ -23,6 +23,8 @@ package ca.uhn.fhir.jpa.dao.expunge; import ca.uhn.fhir.interceptor.api.HookParams; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.interceptor.api.Pointcut; +import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails; +import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; import ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity; import ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity; @@ -69,8 +71,10 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTag; import ca.uhn.fhir.jpa.model.entity.SearchParamPresentEntity; import ca.uhn.fhir.jpa.model.entity.TagDefinition; +import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; import ca.uhn.fhir.jpa.util.MemoryCacheService; import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.server.provider.ProviderConstants; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; import ca.uhn.fhir.util.StopWatch; @@ -96,12 +100,13 @@ public class ExpungeEverythingService implements IExpungeEverythingService { @PersistenceContext(type = PersistenceContextType.TRANSACTION) protected EntityManager myEntityManager; @Autowired + protected IInterceptorBroadcaster myInterceptorBroadcaster; + @Autowired private HapiTransactionService myTxService; - @Autowired - protected IInterceptorBroadcaster myInterceptorBroadcaster; - @Autowired private MemoryCacheService myMemoryCacheService; + @Autowired + private IRequestPartitionHelperSvc myRequestPartitionHelperSvc; private int deletedResourceEntityCount; @@ -118,67 +123,71 @@ public class ExpungeEverythingService implements IExpungeEverythingService { CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRESTORAGE_EXPUNGE_EVERYTHING, hooks); ourLog.info("BEGINNING GLOBAL $expunge"); - myTxService.withRequest(theRequest).withPropagation(Propagation.REQUIRES_NEW).execute(()-> { + Propagation propagation = Propagation.REQUIRES_NEW; + ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forOperation(null, null, ProviderConstants.OPERATION_EXPUNGE); + RequestPartitionId requestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequest, details); + + myTxService.withRequest(theRequest).withPropagation(propagation).withRequestPartitionId(requestPartitionId).execute(() -> { counter.addAndGet(doExpungeEverythingQuery("UPDATE " + TermCodeSystem.class.getSimpleName() + " d SET d.myCurrentVersion = null")); }); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, Batch2WorkChunkEntity.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, Batch2JobInstanceEntity.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, NpmPackageVersionResourceEntity.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, NpmPackageVersionEntity.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, NpmPackageEntity.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SearchParamPresentEntity.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, BulkImportJobFileEntity.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, BulkImportJobEntity.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ForcedId.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamDate.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamNumber.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamQuantity.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamQuantityNormalized.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamString.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamToken.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamUri.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamCoords.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedComboStringUnique.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedComboTokenNonUnique.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceLink.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SearchResult.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SearchInclude.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermValueSetConceptDesignation.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermValueSetConcept.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermValueSet.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptParentChildLink.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMapGroupElementTarget.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMapGroupElement.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMapGroup.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMap.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptProperty.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptDesignation.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConcept.class)); - myTxService.withRequest(theRequest).withPropagation(Propagation.REQUIRES_NEW).execute(()-> { + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, Batch2WorkChunkEntity.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, Batch2JobInstanceEntity.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, NpmPackageVersionResourceEntity.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, NpmPackageVersionEntity.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, NpmPackageEntity.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SearchParamPresentEntity.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, BulkImportJobFileEntity.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, BulkImportJobEntity.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ForcedId.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamDate.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamNumber.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamQuantity.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamQuantityNormalized.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamString.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamToken.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamUri.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamCoords.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedComboStringUnique.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedComboTokenNonUnique.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceLink.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SearchResult.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SearchInclude.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermValueSetConceptDesignation.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermValueSetConcept.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermValueSet.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptParentChildLink.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMapGroupElementTarget.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMapGroupElement.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMapGroup.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMap.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptProperty.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptDesignation.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConcept.class, requestPartitionId)); + myTxService.withRequest(theRequest).withPropagation(propagation).withRequestPartitionId(requestPartitionId).execute(() -> { for (TermCodeSystem next : myEntityManager.createQuery("SELECT c FROM " + TermCodeSystem.class.getName() + " c", TermCodeSystem.class).getResultList()) { next.setCurrentVersion(null); myEntityManager.merge(next); } }); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermCodeSystemVersion.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermCodeSystem.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SubscriptionTable.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceHistoryTag.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceTag.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TagDefinition.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceHistoryProvenanceEntity.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceHistoryTable.class)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermCodeSystemVersion.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermCodeSystem.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SubscriptionTable.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceHistoryTag.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceTag.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TagDefinition.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceHistoryProvenanceEntity.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceHistoryTable.class, requestPartitionId)); int counterBefore = counter.get(); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceTable.class)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, PartitionEntity.class)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceTable.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, PartitionEntity.class, requestPartitionId)); deletedResourceEntityCount = counter.get() - counterBefore; - myTxService.withRequest(theRequest).withPropagation(Propagation.REQUIRES_NEW).execute(()-> { + myTxService.withRequest(theRequest).withPropagation(propagation).withRequestPartitionId(requestPartitionId).execute(() -> { counter.addAndGet(doExpungeEverythingQuery("DELETE from " + Search.class.getSimpleName() + " d")); }); - purgeAllCaches(); + purgeAllCaches(); ourLog.info("COMPLETED GLOBAL $expunge - Deleted {} rows", counter.get()); } @@ -189,42 +198,42 @@ public class ExpungeEverythingService implements IExpungeEverythingService { } private void purgeAllCaches() { - myMemoryCacheService.invalidateAllCaches(); - } + myMemoryCacheService.invalidateAllCaches(); + } - private int expungeEverythingByTypeWithoutPurging(RequestDetails theRequest, Class theEntityType) { - int outcome = 0; - while (true) { - StopWatch sw = new StopWatch(); + private int expungeEverythingByTypeWithoutPurging(RequestDetails theRequest, Class theEntityType, RequestPartitionId theRequestPartitionId) { + int outcome = 0; + while (true) { + StopWatch sw = new StopWatch(); - int count = myTxService.withRequest(theRequest).withPropagation(Propagation.REQUIRES_NEW).execute(()-> { - CriteriaBuilder cb = myEntityManager.getCriteriaBuilder(); - CriteriaQuery cq = cb.createQuery(theEntityType); - cq.from(theEntityType); - TypedQuery query = myEntityManager.createQuery(cq); - query.setMaxResults(1000); - List results = query.getResultList(); - for (Object result : results) { - myEntityManager.remove(result); - } - return results.size(); - }); + int count = myTxService.withRequest(theRequest).withPropagation(Propagation.REQUIRES_NEW).withRequestPartitionId(theRequestPartitionId).execute(() -> { + CriteriaBuilder cb = myEntityManager.getCriteriaBuilder(); + CriteriaQuery cq = cb.createQuery(theEntityType); + cq.from(theEntityType); + TypedQuery query = myEntityManager.createQuery(cq); + query.setMaxResults(1000); + List results = query.getResultList(); + for (Object result : results) { + myEntityManager.remove(result); + } + return results.size(); + }); - outcome += count; - if (count == 0) { - break; - } + outcome += count; + if (count == 0) { + break; + } - ourLog.info("Have deleted {} entities of type {} in {}", outcome, theEntityType.getSimpleName(), sw.toString()); - } - return outcome; - } + ourLog.info("Have deleted {} entities of type {} in {}", outcome, theEntityType.getSimpleName(), sw); + } + return outcome; + } @Override public int expungeEverythingByType(Class theEntityType) { - int result = expungeEverythingByTypeWithoutPurging(null, theEntityType); - purgeAllCaches(); - return result; + int result = expungeEverythingByTypeWithoutPurging(null, theEntityType, RequestPartitionId.allPartitions()); + purgeAllCaches(); + return result; } @Override @@ -235,7 +244,7 @@ public class ExpungeEverythingService implements IExpungeEverythingService { private int doExpungeEverythingQuery(String theQuery) { StopWatch sw = new StopWatch(); int outcome = myEntityManager.createQuery(theQuery).executeUpdate(); - ourLog.debug("SqlQuery affected {} rows in {}: {}", outcome, sw.toString(), theQuery); + ourLog.debug("SqlQuery affected {} rows in {}: {}", outcome, sw, theQuery); return outcome; } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java index 07d44f949eb..050860b47da 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java @@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.entity; */ import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum; +import ca.uhn.fhir.rest.api.Constants; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import org.hl7.fhir.r5.model.InstantType; @@ -46,6 +47,7 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Date; +import static ca.uhn.fhir.rest.api.Constants.UUID_LENGTH; import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.left; @@ -58,7 +60,7 @@ import static org.apache.commons.lang3.StringUtils.left; * See the BulkExportAppCtx for job details */ @Entity -@Table(name = "HFJ_BLK_EXPORT_JOB", uniqueConstraints = { +@Table(name = BulkExportJobEntity.HFJ_BLK_EXPORT_JOB, uniqueConstraints = { @UniqueConstraint(name = "IDX_BLKEX_JOB_ID", columnNames = "JOB_ID") }, indexes = { @Index(name = "IDX_BLKEX_EXPTIME", columnList = "EXP_TIME") @@ -68,13 +70,15 @@ public class BulkExportJobEntity implements Serializable { public static final int REQUEST_LENGTH = 1024; public static final int STATUS_MESSAGE_LEN = 500; + public static final String JOB_ID = "JOB_ID"; + public static final String HFJ_BLK_EXPORT_JOB = "HFJ_BLK_EXPORT_JOB"; @Id @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_BLKEXJOB_PID") @SequenceGenerator(name = "SEQ_BLKEXJOB_PID", sequenceName = "SEQ_BLKEXJOB_PID") @Column(name = "PID") private Long myId; - @Column(name = "JOB_ID", length = Search.UUID_COLUMN_LENGTH, nullable = false) + @Column(name = JOB_ID, length = UUID_LENGTH, nullable = false) private String myJobId; @Enumerated(EnumType.STRING) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobEntity.java index 5c4195b5423..27d75dbedc9 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobEntity.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobEntity.java @@ -40,21 +40,24 @@ import javax.persistence.Version; import java.io.Serializable; import java.util.Date; +import static ca.uhn.fhir.rest.api.Constants.UUID_LENGTH; import static org.apache.commons.lang3.StringUtils.left; @Entity -@Table(name = "HFJ_BLK_IMPORT_JOB", uniqueConstraints = { +@Table(name = BulkImportJobEntity.HFJ_BLK_IMPORT_JOB, uniqueConstraints = { @UniqueConstraint(name = "IDX_BLKIM_JOB_ID", columnNames = "JOB_ID") }) public class BulkImportJobEntity implements Serializable { + public static final String HFJ_BLK_IMPORT_JOB = "HFJ_BLK_IMPORT_JOB"; + public static final String JOB_ID = "JOB_ID"; @Id @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_BLKIMJOB_PID") @SequenceGenerator(name = "SEQ_BLKIMJOB_PID", sequenceName = "SEQ_BLKIMJOB_PID") @Column(name = "PID") private Long myId; - @Column(name = "JOB_ID", length = Search.UUID_COLUMN_LENGTH, nullable = false, updatable = false) + @Column(name = JOB_ID, length = UUID_LENGTH, nullable = false, updatable = false) private String myJobId; @Column(name = "JOB_DESC", nullable = true, length = BulkExportJobEntity.STATUS_MESSAGE_LEN) private String myJobDescription; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceReindexJobEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceReindexJobEntity.java index f49e3d10b11..b76e632a75d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceReindexJobEntity.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceReindexJobEntity.java @@ -24,6 +24,7 @@ import ca.uhn.fhir.rest.api.Constants; import com.google.common.annotations.VisibleForTesting; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; +import org.hl7.fhir.r4.model.InstantType; import javax.persistence.Column; import javax.persistence.Entity; @@ -149,8 +150,8 @@ public class ResourceReindexJobEntity implements Serializable { ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) .append("id", myId) .append("resourceType", myResourceType) - .append("thresholdLow", myThresholdLow) - .append("thresholdHigh", myThresholdHigh); + .append("thresholdLow", new InstantType(myThresholdLow)) + .append("thresholdHigh", new InstantType(myThresholdHigh)); if (myDeleted) { b.append("deleted", myDeleted); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Search.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Search.java index 6170cdc8ffc..d656df3778d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Search.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Search.java @@ -76,13 +76,18 @@ import static org.apache.commons.lang3.StringUtils.left; }) public class Search implements ICachedSearchDetails, Serializable { + /** + * Long enough to accommodate a full UUID (36) with an additional prefix + * used by megascale (12) + */ @SuppressWarnings("WeakerAccess") - public static final int UUID_COLUMN_LENGTH = 36; + public static final int SEARCH_UUID_COLUMN_LENGTH = 48; public static final String HFJ_SEARCH = "HFJ_SEARCH"; private static final int MAX_SEARCH_QUERY_STRING = 10000; private static final int FAILURE_MESSAGE_LENGTH = 500; private static final long serialVersionUID = 1L; private static final Logger ourLog = LoggerFactory.getLogger(Search.class); + public static final String SEARCH_UUID = "SEARCH_UUID"; @Temporal(TemporalType.TIMESTAMP) @Column(name = "CREATED", nullable = false, updatable = false) private Date myCreated; @@ -136,7 +141,7 @@ public class Search implements ICachedSearchDetails, Serializable { private SearchStatusEnum myStatus; @Column(name = "TOTAL_COUNT", nullable = true) private Integer myTotalCount; - @Column(name = "SEARCH_UUID", length = UUID_COLUMN_LENGTH, nullable = false, updatable = false) + @Column(name = SEARCH_UUID, length = SEARCH_UUID_COLUMN_LENGTH, nullable = false, updatable = false) private String myUuid; @SuppressWarnings("unused") @Version @@ -146,6 +151,9 @@ public class Search implements ICachedSearchDetails, Serializable { @Column(name = "SEARCH_PARAM_MAP", nullable = true) private byte[] mySearchParameterMap; + @Transient + private transient SearchParameterMap mySearchParameterMapTransient; + /** * This isn't currently persisted in the DB as it's only used for offset mode. We could * change this if needed in the future. @@ -363,10 +371,12 @@ public class Search implements ICachedSearchDetails, Serializable { myTotalCount = theTotalCount; } + @Override public String getUuid() { return myUuid; } + @Override public void setUuid(String theUuid) { myUuid = theUuid; } @@ -402,11 +412,26 @@ public class Search implements ICachedSearchDetails, Serializable { return myVersion; } + /** + * Note that this is not always set! We set this if we're storing a + * Search in {@link SearchStatusEnum#PASSCMPLET} status since we'll need + * the map in order to restart, but otherwise we save space and time by + * not storing it. + */ public Optional getSearchParameterMap() { - return Optional.ofNullable(mySearchParameterMap).map(t -> SerializationUtils.deserialize(mySearchParameterMap)); + if (mySearchParameterMapTransient != null) { + return Optional.of(mySearchParameterMapTransient); + } + SearchParameterMap searchParameterMap = null; + if (mySearchParameterMap != null) { + searchParameterMap = SerializationUtils.deserialize(mySearchParameterMap); + mySearchParameterMapTransient = searchParameterMap; + } + return Optional.ofNullable(searchParameterMap); } public void setSearchParameterMap(SearchParameterMap theSearchParameterMap) { + mySearchParameterMapTransient = theSearchParameterMap; mySearchParameterMap = SerializationUtils.serialize(theSearchParameterMap); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceAddress.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceAddress.java new file mode 100644 index 00000000000..6c2abc8601c --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceAddress.java @@ -0,0 +1,52 @@ +package ca.uhn.fhir.jpa.esr; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +public class ExternallyStoredResourceAddress { + + private final String myProviderId; + private final String myLocation; + + /** + * Constructor + * + * @param theProviderId The ID of the provider which will handle this address. Must match the ID returned by a registered {@link IExternallyStoredResourceService}. + * @param theLocation The actual location for the provider to resolve. The format of this string is entirely up to the {@link IExternallyStoredResourceService} and only needs to make sense to it. + */ + public ExternallyStoredResourceAddress(String theProviderId, String theLocation) { + myProviderId = theProviderId; + myLocation = theLocation; + } + + /** + * @return The ID of the provider which will handle this address. Must match the ID returned by a registered {@link IExternallyStoredResourceService}. + */ + public String getProviderId() { + return myProviderId; + } + + /** + * @return The actual location for the provider to resolve. The format of this string is entirely up to the {@link IExternallyStoredResourceService} and only needs to make sense to it. + */ + public String getLocation() { + return myLocation; + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceAddressMetadataKey.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceAddressMetadataKey.java new file mode 100644 index 00000000000..dfe1e1a61b1 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceAddressMetadataKey.java @@ -0,0 +1,39 @@ +package ca.uhn.fhir.jpa.esr; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; + +public class ExternallyStoredResourceAddressMetadataKey extends ResourceMetadataKeyEnum { + + /** + * Singleton instance + */ + public static final ExternallyStoredResourceAddressMetadataKey INSTANCE = new ExternallyStoredResourceAddressMetadataKey(); + + /** + * Constructor + */ + private ExternallyStoredResourceAddressMetadataKey() { + super("ExternallyStoredResourceAddress", ExternallyStoredResourceAddress.class); + } + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceServiceRegistry.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceServiceRegistry.java new file mode 100644 index 00000000000..1a89700110f --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceServiceRegistry.java @@ -0,0 +1,69 @@ +package ca.uhn.fhir.jpa.esr; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.apache.commons.lang3.Validate; + +import javax.annotation.Nonnull; +import java.util.HashMap; +import java.util.Map; + +import static org.apache.commons.lang3.StringUtils.defaultString; + +public class ExternallyStoredResourceServiceRegistry { + + private static final String VALID_ID_PATTERN = "[a-zA-Z0-9_.-]+"; + private final Map myIdToProvider = new HashMap<>(); + + /** + * Registers a new provider. Do not call this method after the server has been started. + * + * @param theProvider The provider to register. + */ + public void registerProvider(@Nonnull IExternallyStoredResourceService theProvider) { + String id = defaultString(theProvider.getId()); + Validate.isTrue(id.matches(VALID_ID_PATTERN), "Invalid provider ID (must match pattern " + VALID_ID_PATTERN + "): %s", id); + Validate.isTrue(!myIdToProvider.containsKey(id), "Already have a provider with ID: %s", id); + + myIdToProvider.put(id, theProvider); + } + + /** + * Do we have any providers registered? + */ + public boolean hasProviders() { + return !myIdToProvider.isEmpty(); + } + + /** + * Clears all registered providers. This method is mostly intended for unit tests. + */ + public void clearProviders() { + myIdToProvider.clear(); + } + + @Nonnull + public IExternallyStoredResourceService getProvider(@Nonnull String theProviderId) { + IExternallyStoredResourceService retVal = myIdToProvider.get(theProviderId); + Validate.notNull(retVal, "Invalid ESR provider ID: %s", theProviderId); + return retVal; + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/IExternallyStoredResourceService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/IExternallyStoredResourceService.java new file mode 100644 index 00000000000..da8b9c43097 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/IExternallyStoredResourceService.java @@ -0,0 +1,41 @@ +package ca.uhn.fhir.jpa.esr; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.hl7.fhir.instance.model.api.IBaseResource; + +public interface IExternallyStoredResourceService { + + /** + * Returns the ID of this provider. No two providers may return the same + * ID, and this provider should always return the same ID. + */ + String getId(); + + /** + * Fetches the given resource using the given address string + * + * @param theAddress The address string is a format that is entirely up to the individual provider. HAPI FHIR + * doesn't try to understand it. + * @return HAPI FHIR may modify the returned object, so it is important to always return a new object for every call here (careful with caching!) + */ + IBaseResource fetchResource(String theAddress); +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/package-info.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/package-info.java new file mode 100644 index 00000000000..8e0c2b9f59a --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/package-info.java @@ -0,0 +1,34 @@ + +/** + * This package stores the mechanism for Externally Stored Resources (ESRs). + *

    + * A normal resource stores its contents in the {@link ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable} + * entity as text (compressed JSON or otherwise) with one row in that table per version of the + * resource. An ESR still creates a row in that table, but stores a reference to actual body + * storage instead. + *

    + * THIS IS AN EXPERIMENTAL API - IT MAY GO AWAY OR CHANGE IN THE FUTURE + * + * @since 6.6.0 + */ +package ca.uhn.fhir.jpa.esr; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java index bf682ab110f..be042d64c3c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java @@ -21,6 +21,8 @@ package ca.uhn.fhir.jpa.migrate.tasks; */ import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.jpa.entity.BulkExportJobEntity; +import ca.uhn.fhir.jpa.entity.BulkImportJobEntity; import ca.uhn.fhir.jpa.entity.Search; import ca.uhn.fhir.jpa.migrate.DriverTypeEnum; import ca.uhn.fhir.jpa.migrate.taskdef.ArbitrarySqlTask; @@ -31,13 +33,13 @@ import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks; import ca.uhn.fhir.jpa.migrate.tasks.api.Builder; import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam; -import ca.uhn.fhir.jpa.model.entity.StorageSettings; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantity; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri; import ca.uhn.fhir.jpa.model.entity.SearchParamPresentEntity; +import ca.uhn.fhir.jpa.model.entity.StorageSettings; import ca.uhn.fhir.util.VersionEnum; import java.util.Arrays; @@ -48,6 +50,8 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; +import static ca.uhn.fhir.rest.api.Constants.UUID_LENGTH; + @SuppressWarnings({"SqlNoDataSourceInspection", "SpellCheckingInspection"}) public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { @@ -86,15 +90,13 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { init600(); // 20211102 - init610(); init620(); - init630(); init640(); init660(); } protected void init660() { - - Builder version = forVersion(VersionEnum.V6_6_0); + // fix Postgres clob types - that stupid oid driver problem is still there // BT2_JOB_INSTANCE.PARAMS_JSON_LOB version.onTable("BT2_JOB_INSTANCE") @@ -105,12 +107,25 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { // BT2_WORK_CHUNK.CHUNK_DATA version.onTable("BT2_WORK_CHUNK") .migratePostgresTextClobToBinaryClob("20230208.3", "CHUNK_DATA"); + + version + .onTable(Search.HFJ_SEARCH) + .addColumn("20230215.1", Search.SEARCH_UUID) + .nullable() + .type(ColumnTypeEnum.STRING, Search.SEARCH_UUID_COLUMN_LENGTH); + version + .onTable(BulkImportJobEntity.HFJ_BLK_IMPORT_JOB) + .addColumn("20230215.2", BulkImportJobEntity.JOB_ID) + .nullable() + .type(ColumnTypeEnum.STRING, UUID_LENGTH); + version + .onTable(BulkExportJobEntity.HFJ_BLK_EXPORT_JOB) + .addColumn("20230215.3", BulkExportJobEntity.JOB_ID) + .nullable() + .type(ColumnTypeEnum.STRING, UUID_LENGTH); } + protected void init640() { - - } - - protected void init630() { Builder version = forVersion(VersionEnum.V6_3_0); // start forced_id inline migration @@ -129,7 +144,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { .online(true) .withColumns("SEARCH_PID") .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); -; + ; } @@ -474,10 +489,10 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { // Fix for https://github.com/hapifhir/hapi-fhir-jpaserver-starter/issues/328 version.onTable("NPM_PACKAGE_VER") - .modifyColumn("20220501.1","FHIR_VERSION_ID").nonNullable().withType(ColumnTypeEnum.STRING, 20); + .modifyColumn("20220501.1", "FHIR_VERSION_ID").nonNullable().withType(ColumnTypeEnum.STRING, 20); version.onTable("NPM_PACKAGE_VER_RES") - .modifyColumn("20220501.2","FHIR_VERSION_ID").nonNullable().withType(ColumnTypeEnum.STRING, 20); + .modifyColumn("20220501.2", "FHIR_VERSION_ID").nonNullable().withType(ColumnTypeEnum.STRING, 20); // Fix for https://gitlab.com/simpatico.ai/cdr/-/issues/3166 version.onTable("MPI_LINK") @@ -822,7 +837,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { // Bulk Import Job Builder.BuilderAddTableByColumns blkImportJobTable = version.addTableByColumns("20210410.1", "HFJ_BLK_IMPORT_JOB", "PID"); blkImportJobTable.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG); - blkImportJobTable.addColumn("JOB_ID").nonNullable().type(ColumnTypeEnum.STRING, Search.UUID_COLUMN_LENGTH); + blkImportJobTable.addColumn("JOB_ID").nonNullable().type(ColumnTypeEnum.STRING, UUID_LENGTH); blkImportJobTable.addColumn("JOB_STATUS").nonNullable().type(ColumnTypeEnum.STRING, 10); blkImportJobTable.addColumn("STATUS_TIME").nonNullable().type(ColumnTypeEnum.DATE_TIMESTAMP); blkImportJobTable.addColumn("STATUS_MESSAGE").nullable().type(ColumnTypeEnum.STRING, 500); @@ -1500,7 +1515,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { version.onTable("HFJ_SEARCH") .addColumn("20190814.6", "SEARCH_PARAM_MAP").nullable().type(ColumnTypeEnum.BLOB); version.onTable("HFJ_SEARCH") - .modifyColumn("20190814.7", "SEARCH_UUID").nonNullable().withType(ColumnTypeEnum.STRING, 36); + .modifyColumn("20190814.7", "SEARCH_UUID").nonNullable().withType(ColumnTypeEnum.STRING, Search.SEARCH_UUID_COLUMN_LENGTH); version.onTable("HFJ_SEARCH_PARM").dropThisTable("20190814.8"); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaConformanceProviderDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaConformanceProviderDstu2.java index 3bcb29103a8..fb000b232a0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaConformanceProviderDstu2.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaConformanceProviderDstu2.java @@ -69,7 +69,7 @@ public class JpaConformanceProviderDstu2 extends ServerConformanceProvider { * Constructor */ @CoverageIgnore - public JpaConformanceProviderDstu2(){ + public JpaConformanceProviderDstu2() { super(); super.setCache(false); setIncludeResourceCounts(true); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/DatabaseBackedPagingProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/DatabaseBackedPagingProvider.java index c86aa3b83bd..db9baad7b3d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/DatabaseBackedPagingProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/DatabaseBackedPagingProvider.java @@ -28,6 +28,8 @@ import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.BasePagingProvider; import org.springframework.beans.factory.annotation.Autowired; +import javax.annotation.Nullable; + // Note: this class is not annotated with @Service because we want to // explicitly define it in BaseConfig.java. This is done so that // implementors can override if they want to. @@ -51,6 +53,7 @@ public class DatabaseBackedPagingProvider extends BasePagingProvider { /** * Constructor + * * @deprecated Use {@link DatabaseBackedPagingProvider} as this constructor has no purpose */ @Deprecated @@ -60,12 +63,19 @@ public class DatabaseBackedPagingProvider extends BasePagingProvider { @Override public synchronized IBundleProvider retrieveResultList(RequestDetails theRequestDetails, String theId) { - myRequestPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequestDetails, "Bundle", null, null); PersistedJpaBundleProvider provider = myPersistedJpaBundleProviderFactory.newInstance(theRequestDetails, theId); - if (!provider.ensureSearchEntityLoaded()) { + return validateAndReturnBundleProvider(provider); + } + + /** + * Subclasses may override in order to modify the bundle provider being returned + */ + @Nullable + protected PersistedJpaBundleProvider validateAndReturnBundleProvider(PersistedJpaBundleProvider theBundleProvider) { + if (!theBundleProvider.ensureSearchEntityLoaded()) { return null; } - return provider; + return theBundleProvider; } @Override diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProvider.java index dbc45228ccb..54d53fefd84 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProvider.java @@ -24,6 +24,7 @@ import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.interceptor.api.HookParams; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.interceptor.api.Pointcut; +import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; @@ -40,9 +41,10 @@ import ca.uhn.fhir.jpa.entity.SearchTypeEnum; import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.entity.BaseHasResource; import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; -import ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc; +import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc; import ca.uhn.fhir.jpa.search.cache.SearchCacheStatusEnum; +import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.util.MemoryCacheService; import ca.uhn.fhir.jpa.util.QueryParameterUtils; import ca.uhn.fhir.model.primitive.InstantDt; @@ -98,7 +100,7 @@ public class PersistedJpaBundleProvider implements IBundleProvider { @Autowired private ISearchCacheSvc mySearchCacheSvc; @Autowired - private RequestPartitionHelperSvc myRequestPartitionHelperSvc; + private IRequestPartitionHelperSvc myRequestPartitionHelperSvc; @Autowired private JpaStorageSettings myStorageSettings; @Autowired @@ -130,6 +132,11 @@ public class PersistedJpaBundleProvider implements IBundleProvider { myUuid = theSearch.getUuid(); } + @VisibleForTesting + public void setRequestPartitionHelperSvcForUnitTest(IRequestPartitionHelperSvc theRequestPartitionHelperSvc) { + myRequestPartitionHelperSvc = theRequestPartitionHelperSvc; + } + protected Search getSearchEntity() { return mySearchEntity; } @@ -148,7 +155,7 @@ public class PersistedJpaBundleProvider implements IBundleProvider { HistoryBuilder historyBuilder = myHistoryBuilderFactory.newHistoryBuilder(mySearchEntity.getResourceType(), mySearchEntity.getResourceId(), mySearchEntity.getLastUpdatedLow(), mySearchEntity.getLastUpdatedHigh()); - RequestPartitionId partitionId = getRequestPartitionIdForHistory(); + RequestPartitionId partitionId = getRequestPartitionId(); List results = historyBuilder.fetchEntities(partitionId, theOffset, theFromIndex, theToIndex, mySearchEntity.getHistorySearchStyle()); @@ -194,18 +201,26 @@ public class PersistedJpaBundleProvider implements IBundleProvider { } @Nonnull - private RequestPartitionId getRequestPartitionIdForHistory() { + protected final RequestPartitionId getRequestPartitionId() { if (myRequestPartitionId == null) { - if (mySearchEntity.getResourceId() != null) { - // If we have an ID, we've already checked the partition and made sure it's appropriate - myRequestPartitionId = RequestPartitionId.allPartitions(); + ReadPartitionIdRequestDetails details; + if (mySearchEntity == null) { + details = ReadPartitionIdRequestDetails.forSearchUuid(myUuid); + } else if (mySearchEntity.getSearchType() == SearchTypeEnum.HISTORY) { + details = ReadPartitionIdRequestDetails.forHistory(mySearchEntity.getResourceType(), null); } else { - myRequestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(myRequest, mySearchEntity.getResourceType(), null); + SearchParameterMap params = mySearchEntity.getSearchParameterMap().orElse(null); + details = ReadPartitionIdRequestDetails.forSearchType(mySearchEntity.getResourceType(), params, null); } + myRequestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(myRequest, details); } return myRequestPartitionId; } + public void setRequestPartitionId(RequestPartitionId theRequestPartitionId) { + myRequestPartitionId = theRequestPartitionId; + } + protected List doSearchOrEverything(final int theFromIndex, final int theToIndex) { if (mySearchEntity.getTotalCount() != null && mySearchEntity.getNumFound() <= 0) { // No resources to fetch (e.g. we did a _summary=count search) @@ -217,8 +232,14 @@ public class PersistedJpaBundleProvider implements IBundleProvider { final ISearchBuilder sb = mySearchBuilderFactory.newSearchBuilder(dao, resourceName, resourceType); - final List pidsSubList = mySearchCoordinatorSvc.getResources(myUuid, theFromIndex, theToIndex, myRequest); - return myTxService.withRequest(myRequest).execute(() -> toResourceList(sb, pidsSubList)); + RequestPartitionId requestPartitionId = getRequestPartitionId(); + final List pidsSubList = mySearchCoordinatorSvc.getResources(myUuid, theFromIndex, theToIndex, myRequest, requestPartitionId); + return myTxService + .withRequest(myRequest) + .withRequestPartitionId(requestPartitionId) + .execute(() -> { + return toResourceList(sb, pidsSubList); + }); } /** @@ -226,7 +247,10 @@ public class PersistedJpaBundleProvider implements IBundleProvider { */ public boolean ensureSearchEntityLoaded() { if (mySearchEntity == null) { - Optional searchOpt = myTxService.withRequest(myRequest).execute(() -> mySearchCacheSvc.fetchByUuid(myUuid)); + Optional searchOpt = myTxService + .withRequest(myRequest) + .withRequestPartitionId(myRequestPartitionId) + .execute(() -> mySearchCacheSvc.fetchByUuid(myUuid)); if (!searchOpt.isPresent()) { return false; } @@ -263,11 +287,14 @@ public class PersistedJpaBundleProvider implements IBundleProvider { key = MemoryCacheService.HistoryCountKey.forSystem(); } - Function supplier = k -> myTxService.withRequest(myRequest).execute(() -> { - HistoryBuilder historyBuilder = myHistoryBuilderFactory.newHistoryBuilder(mySearchEntity.getResourceType(), mySearchEntity.getResourceId(), mySearchEntity.getLastUpdatedLow(), mySearchEntity.getLastUpdatedHigh()); - Long count = historyBuilder.fetchCount(getRequestPartitionIdForHistory()); - return count.intValue(); - }); + Function supplier = k -> myTxService + .withRequest(myRequest) + .withRequestPartitionId(getRequestPartitionId()) + .execute(() -> { + HistoryBuilder historyBuilder = myHistoryBuilderFactory.newHistoryBuilder(mySearchEntity.getResourceType(), mySearchEntity.getResourceId(), mySearchEntity.getLastUpdatedLow(), mySearchEntity.getLastUpdatedHigh()); + Long count = historyBuilder.fetchCount(getRequestPartitionId()); + return count.intValue(); + }); boolean haveOffset = mySearchEntity.getLastUpdatedLow() != null || mySearchEntity.getLastUpdatedHigh() != null; @@ -307,7 +334,10 @@ public class PersistedJpaBundleProvider implements IBundleProvider { switch (mySearchEntity.getSearchType()) { case HISTORY: - return myTxService.withRequest(myRequest).execute(() -> doHistoryInTransaction(mySearchEntity.getOffset(), theFromIndex, theToIndex)); + return myTxService + .withRequest(myRequest) + .withRequestPartitionId(getRequestPartitionId()) + .execute(() -> doHistoryInTransaction(mySearchEntity.getOffset(), theFromIndex, theToIndex)); case SEARCH: case EVERYTHING: default: diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java index 855f7dec8c2..c14aa477a74 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java @@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.search; * #L% */ +import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.config.JpaConfig; import ca.uhn.fhir.jpa.dao.ISearchBuilder; import ca.uhn.fhir.jpa.entity.Search; @@ -52,16 +53,16 @@ public class PersistedJpaBundleProviderFactory { return (PersistedJpaBundleProvider) retVal; } - public PersistedJpaSearchFirstPageBundleProvider newInstanceFirstPage(RequestDetails theRequestDetails, Search theSearch, SearchTask theTask, ISearchBuilder theSearchBuilder) { - return (PersistedJpaSearchFirstPageBundleProvider) myApplicationContext.getBean(JpaConfig.PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER, theRequestDetails, theSearch, theTask, theSearchBuilder); + public PersistedJpaSearchFirstPageBundleProvider newInstanceFirstPage(RequestDetails theRequestDetails, Search theSearch, SearchTask theTask, ISearchBuilder theSearchBuilder, RequestPartitionId theRequestPartitionId) { + return (PersistedJpaSearchFirstPageBundleProvider) myApplicationContext.getBean(JpaConfig.PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER, theRequestDetails, theSearch, theTask, theSearchBuilder, theRequestPartitionId); } - public IBundleProvider history(RequestDetails theRequest, String theResourceType, Long theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive, Integer theOffset) { - return history(theRequest, theResourceType, theResourcePid, theRangeStartInclusive, theRangeEndInclusive, theOffset, null); + public IBundleProvider history(RequestDetails theRequest, String theResourceType, Long theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive, Integer theOffset, RequestPartitionId theRequestPartitionId) { + return history(theRequest, theResourceType, theResourcePid, theRangeStartInclusive, theRangeEndInclusive, theOffset, null, theRequestPartitionId); } - public IBundleProvider history(RequestDetails theRequest, String theResourceType, Long theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive, Integer theOffset, HistorySearchStyleEnum searchParameterType) { + public IBundleProvider history(RequestDetails theRequest, String theResourceType, Long theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive, Integer theOffset, HistorySearchStyleEnum searchParameterType, RequestPartitionId theRequestPartitionId) { String resourceName = defaultIfBlank(theResourceType, null); Search search = new Search(); @@ -76,7 +77,10 @@ public class PersistedJpaBundleProviderFactory { search.setStatus(SearchStatusEnum.FINISHED); search.setHistorySearchStyle(searchParameterType); - return newInstance(theRequest, search); + PersistedJpaBundleProvider provider = newInstance(theRequest, search); + provider.setRequestPartitionId(theRequestPartitionId); + + return provider; } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaSearchFirstPageBundleProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaSearchFirstPageBundleProvider.java index 10b7e7da280..3169170700a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaSearchFirstPageBundleProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaSearchFirstPageBundleProvider.java @@ -20,17 +20,17 @@ package ca.uhn.fhir.jpa.search; * #L% */ +import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.dao.ISearchBuilder; import ca.uhn.fhir.jpa.entity.Search; +import ca.uhn.fhir.jpa.entity.SearchTypeEnum; import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.search.SearchStatusEnum; import ca.uhn.fhir.jpa.search.builder.tasks.SearchTask; import ca.uhn.fhir.jpa.util.QueryParameterUtils; -import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; -import org.hl7.fhir.instance.model.api.IAnyResource; import org.hl7.fhir.instance.model.api.IBaseResource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -48,11 +48,15 @@ public class PersistedJpaSearchFirstPageBundleProvider extends PersistedJpaBundl /** * Constructor */ - public PersistedJpaSearchFirstPageBundleProvider(Search theSearch, SearchTask theSearchTask, ISearchBuilder theSearchBuilder, RequestDetails theRequest) { + public PersistedJpaSearchFirstPageBundleProvider(Search theSearch, SearchTask theSearchTask, ISearchBuilder theSearchBuilder, RequestDetails theRequest, RequestPartitionId theRequestPartitionId) { super(theRequest, theSearch.getUuid()); + + assert theSearch.getSearchType() != SearchTypeEnum.HISTORY; + setSearchEntity(theSearch); mySearchTask = theSearchTask; mySearchBuilder = theSearchBuilder; + super.setRequestPartitionId(theRequestPartitionId); } @Nonnull @@ -67,9 +71,12 @@ public class PersistedJpaSearchFirstPageBundleProvider extends PersistedJpaBundl final List pids = mySearchTask.getResourcePids(theFromIndex, theToIndex); ourLog.trace("Done fetching search resource PIDs"); - List retVal = myTxService.withRequest(myRequest).execute(() -> { - return toResourceList(mySearchBuilder, pids); - }); + RequestPartitionId requestPartitionId = getRequestPartitionId(); + + List retVal = myTxService + .withRequest(myRequest) + .withRequestPartitionId(requestPartitionId) + .execute(() -> toResourceList(mySearchBuilder, pids)); long totalCountWanted = theToIndex - theFromIndex; long totalCountMatch = (int) retVal diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java index 16916368829..0830789e38c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java @@ -69,7 +69,7 @@ import com.google.common.annotations.VisibleForTesting; import org.apache.commons.lang3.time.DateUtils; import org.hl7.fhir.instance.model.api.IBaseResource; import org.springframework.beans.factory.BeanFactory; -import org.springframework.data.domain.AbstractPageRequest; +import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Sort; import org.springframework.stereotype.Component; @@ -108,7 +108,6 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { private final SearchBuilderFactory mySearchBuilderFactory; private final ISynchronousSearchSvc mySynchronousSearchSvc; private final PersistedJpaBundleProviderFactory myPersistedJpaBundleProviderFactory; - private final IRequestPartitionHelperSvc myRequestPartitionHelperService; private final ISearchParamRegistry mySearchParamRegistry; private final SearchStrategyFactory mySearchStrategyFactory; private final ExceptionService myExceptionSvc; @@ -154,7 +153,6 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { mySearchBuilderFactory = theSearchBuilderFactory; mySynchronousSearchSvc = theSynchronousSearchSvc; myPersistedJpaBundleProviderFactory = thePersistedJpaBundleProviderFactory; - myRequestPartitionHelperService = theRequestPartitionHelperService; mySearchParamRegistry = theSearchParamRegistry; mySearchStrategyFactory = theSearchStrategyFactory; myExceptionSvc = theExceptionSvc; @@ -201,9 +199,19 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { /** * This method is called by the HTTP client processing thread in order to * fetch resources. + *

    + * This method must not be called from inside a transaction. The rationale is that + * the {@link Search} entity is treated as a piece of shared state across client threads + * accessing the same thread, so we need to be able to update that table in a transaction + * and commit it right away in order for that to work. Examples of needing to do this + * include if two different clients request the same search and are both paging at the + * same time, but also includes clients that are hacking the paging links to + * fetch multiple pages of a search result in parallel. In both cases we need to only + * let one of them actually activate the search, or we will have conficts. The other thread + * just needs to wait until the first one actually fetches more results. */ @Override - public List getResources(final String theUuid, int theFrom, int theTo, @Nullable RequestDetails theRequestDetails) { + public List getResources(final String theUuid, int theFrom, int theTo, @Nullable RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) { assert !TransactionSynchronizationManager.isActualTransactionActive(); // If we're actively searching right now, don't try to do anything until at least one batch has been @@ -240,13 +248,12 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { Callable searchCallback = () -> mySearchCacheSvc .fetchByUuid(theUuid) .orElseThrow(() -> myExceptionSvc.newUnknownSearchException(theUuid)); - if (theRequestDetails != null) { - search = myTxService.withRequest(theRequestDetails).execute(searchCallback); - } else { - search = HapiTransactionService.invokeCallableAndHandleAnyException(searchCallback); - } - + search = myTxService + .withRequest(theRequestDetails) + .withRequestPartitionId(theRequestPartitionId) + .execute(searchCallback); QueryParameterUtils.verifySearchHasntFailedOrThrowInternalErrorException(search); + if (search.getStatus() == SearchStatusEnum.FINISHED) { ourLog.trace("Search entity marked as finished with {} results", search.getNumFound()); break; @@ -272,7 +279,6 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { String resourceType = search.getResourceType(); SearchParameterMap params = search.getSearchParameterMap().orElseThrow(() -> new IllegalStateException("No map in PASSCOMPLET search")); IFhirResourceDao resourceDao = myDaoRegistry.getResourceDao(resourceType); - RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType(theRequestDetails, resourceType, params, null); SearchTaskParameters parameters = new SearchTaskParameters( search, @@ -280,7 +286,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { params, resourceType, theRequestDetails, - requestPartitionId, + theRequestPartitionId, myOnRemoveSearchTask, mySyncSize ); @@ -299,7 +305,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { ourLog.trace("Finished looping"); - List pids = fetchResultPids(theUuid, theFrom, theTo, theRequestDetails, search); + List pids = fetchResultPids(theUuid, theFrom, theTo, theRequestDetails, search, theRequestPartitionId); ourLog.trace("Fetched {} results", pids.size()); @@ -307,8 +313,8 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { } @Nonnull - private List fetchResultPids(String theUuid, int theFrom, int theTo, @Nullable RequestDetails theRequestDetails, Search theSearch) { - List pids = myTxService.withRequest(theRequestDetails).execute(() -> mySearchResultCacheSvc.fetchResultPids(theSearch, theFrom, theTo)); + private List fetchResultPids(String theUuid, int theFrom, int theTo, @Nullable RequestDetails theRequestDetails, Search theSearch, RequestPartitionId theRequestPartitionId) { + List pids = mySearchResultCacheSvc.fetchResultPids(theSearch, theFrom, theTo, theRequestDetails, theRequestPartitionId); if (pids == null) { throw myExceptionSvc.newUnknownSearchException(theUuid); } @@ -325,7 +331,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { Search search = new Search(); QueryParameterUtils.populateSearchEntity(theParams, theResourceType, searchUuid, queryString, search, theRequestPartitionId); - myStorageInterceptorHooks.callStoragePresearchRegistered(theRequestDetails, theParams, search); + myStorageInterceptorHooks.callStoragePresearchRegistered(theRequestDetails, theParams, search, theRequestPartitionId); validateSearch(theParams); @@ -483,7 +489,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { myIdToSearchTask.put(theSearch.getUuid(), task); task.call(); - PersistedJpaSearchFirstPageBundleProvider retVal = myPersistedJpaBundleProviderFactory.newInstanceFirstPage(theRequestDetails, theSearch, task, theSb); + PersistedJpaSearchFirstPageBundleProvider retVal = myPersistedJpaBundleProviderFactory.newInstanceFirstPage(theRequestDetails, theSearch, task, theSb, theRequestPartitionId); ourLog.debug("Search initial phase completed in {}ms", w.getMillis()); return retVal; @@ -492,34 +498,37 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { @Nullable private PersistedJpaBundleProvider findCachedQuery(SearchParameterMap theParams, String theResourceType, RequestDetails theRequestDetails, String theQueryString, RequestPartitionId theRequestPartitionId) { // May be null - return myTxService.withRequest(theRequestDetails).execute(() -> { + return myTxService + .withRequest(theRequestDetails) + .withRequestPartitionId(theRequestPartitionId) + .execute(() -> { - // Interceptor call: STORAGE_PRECHECK_FOR_CACHED_SEARCH - HookParams params = new HookParams() - .add(SearchParameterMap.class, theParams) - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails); - Object outcome = CompositeInterceptorBroadcaster.doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PRECHECK_FOR_CACHED_SEARCH, params); - if (Boolean.FALSE.equals(outcome)) { - return null; - } + // Interceptor call: STORAGE_PRECHECK_FOR_CACHED_SEARCH + HookParams params = new HookParams() + .add(SearchParameterMap.class, theParams) + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails); + Object outcome = CompositeInterceptorBroadcaster.doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PRECHECK_FOR_CACHED_SEARCH, params); + if (Boolean.FALSE.equals(outcome)) { + return null; + } - // Check for a search matching the given hash - Search searchToUse = findSearchToUseOrNull(theQueryString, theResourceType, theRequestPartitionId); - if (searchToUse == null) { - return null; - } + // Check for a search matching the given hash + Search searchToUse = findSearchToUseOrNull(theQueryString, theResourceType, theRequestPartitionId); + if (searchToUse == null) { + return null; + } - ourLog.debug("Reusing search {} from cache", searchToUse.getUuid()); - // Interceptor call: JPA_PERFTRACE_SEARCH_REUSING_CACHED - params = new HookParams() - .add(SearchParameterMap.class, theParams) - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_SEARCH_REUSING_CACHED, params); + ourLog.debug("Reusing search {} from cache", searchToUse.getUuid()); + // Interceptor call: JPA_PERFTRACE_SEARCH_REUSING_CACHED + params = new HookParams() + .add(SearchParameterMap.class, theParams) + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails); + CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_SEARCH_REUSING_CACHED, params); - return myPersistedJpaBundleProviderFactory.newInstance(theRequestDetails, searchToUse.getUuid()); - }); + return myPersistedJpaBundleProviderFactory.newInstance(theRequestDetails, searchToUse.getUuid()); + }); } @Nullable @@ -563,7 +572,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { int pageIndex = theFromIndex / pageSize; - Pageable page = new AbstractPageRequest(pageIndex, pageSize) { + return new PageRequest(pageIndex, pageSize, Sort.unsorted()) { private static final long serialVersionUID = 1L; @Override @@ -571,33 +580,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { return theFromIndex; } - @Override - public Sort getSort() { - return Sort.unsorted(); - } - - @Override - public Pageable next() { - return null; - } - - @Override - public Pageable previous() { - return null; - } - - @Override - public Pageable first() { - return null; - } - - @Override - public Pageable withPage(int theI) { - return null; - } }; - - return page; } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SynchronousSearchSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SynchronousSearchSvcImpl.java index e0c77d25840..2b535f2be5e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SynchronousSearchSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SynchronousSearchSvcImpl.java @@ -36,6 +36,7 @@ import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails; import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; +import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.rest.api.Constants; @@ -85,6 +86,9 @@ public class SynchronousSearchSvcImpl implements ISynchronousSearchSvc { @Autowired private EntityManager myEntityManager; + @Autowired + private IRequestPartitionHelperSvc myRequestPartitionHelperSvc; + private int mySyncSize = 250; @Override @@ -97,8 +101,11 @@ public class SynchronousSearchSvcImpl implements ISynchronousSearchSvc { boolean wantCount = theParamWantOnlyCount || theParamOrConfigWantCount; // Execute the query and make sure we return distinct results - - return myTxService.withRequest(theRequestDetails).readOnly().execute(() -> { + return myTxService + .withRequest(theRequestDetails) + .withRequestPartitionId(theRequestPartitionId) + .readOnly() + .execute(() -> { // Load the results synchronously final List pids = new ArrayList<>(); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java index 7184310d9ab..93779296013 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java @@ -878,7 +878,7 @@ public class SearchBuilder implements ISearchBuilder { resourceId.setVersion(version); if (version != null && !version.equals(next.getVersion())) { IFhirResourceDao dao = myDaoRegistry.getResourceDao(resourceType); - next = dao.readEntity(next.getIdDt().withVersion(Long.toString(version)), null); + next = (IBaseResourceEntity) dao.readEntity(next.getIdDt().withVersion(Long.toString(version)), null); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/StorageInterceptorHooksFacade.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/StorageInterceptorHooksFacade.java index 76cfbf3790c..ff021676441 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/StorageInterceptorHooksFacade.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/StorageInterceptorHooksFacade.java @@ -23,8 +23,8 @@ package ca.uhn.fhir.jpa.search.builder; import ca.uhn.fhir.interceptor.api.HookParams; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.interceptor.api.Pointcut; +import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.entity.Search; -import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; @@ -46,13 +46,15 @@ public class StorageInterceptorHooksFacade { * @param theRequestDetails * @param theParams * @param search + * @param theRequestPartitionId */ - public void callStoragePresearchRegistered(RequestDetails theRequestDetails, SearchParameterMap theParams, Search search) { + public void callStoragePresearchRegistered(RequestDetails theRequestDetails, SearchParameterMap theParams, Search search, RequestPartitionId theRequestPartitionId) { HookParams params = new HookParams() .add(ICachedSearchDetails.class, search) .add(RequestDetails.class, theRequestDetails) .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) - .add(SearchParameterMap.class, theParams); + .add(SearchParameterMap.class, theParams) + .add(RequestPartitionId.class, theRequestPartitionId); CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PRESEARCH_REGISTERED, params); } //private IInterceptorBroadcaster myInterceptorBroadcaster; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchContinuationTask.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchContinuationTask.java index 6d4bec2f6a1..f766c8dffc0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchContinuationTask.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchContinuationTask.java @@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.search.builder.tasks; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; +import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.dao.SearchBuilderFactory; import ca.uhn.fhir.jpa.model.dao.JpaPid; @@ -74,8 +75,12 @@ public class SearchContinuationTask extends SearchTask { @Override public Void call() { try { - myTxService.withRequest(myRequestDetails).execute(() -> { - List previouslyAddedResourcePids = mySearchResultCacheSvc.fetchAllResultPids(getSearch()); + RequestPartitionId requestPartitionId = getRequestPartitionId(); + myTxService + .withRequest(myRequestDetails) + .withRequestPartitionId(requestPartitionId) + .execute(() -> { + List previouslyAddedResourcePids = mySearchResultCacheSvc.fetchAllResultPids(getSearch(), myRequestDetails, requestPartitionId); if (previouslyAddedResourcePids == null) { throw myExceptionSvc.newUnknownSearchException(getSearch().getUuid()); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchTask.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchTask.java index 759457b8d3f..f401a5bfcff 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchTask.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchTask.java @@ -92,7 +92,10 @@ import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; public class SearchTask implements Callable { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SearchTask.class); - + // injected beans + protected final HapiTransactionService myTxService; + protected final FhirContext myContext; + protected final ISearchResultCacheSvc mySearchResultCacheSvc; private final SearchParameterMap myParams; private final IDao myCallingDao; private final String myResourceType; @@ -104,6 +107,14 @@ public class SearchTask implements Callable { private final RequestPartitionId myRequestPartitionId; private final SearchRuntimeDetails mySearchRuntimeDetails; private final Transaction myParentTransaction; + private final Consumer myOnRemove; + private final int mySyncSize; + private final Integer myLoadingThrottleForUnitTests; + private final IInterceptorBroadcaster myInterceptorBroadcaster; + private final SearchBuilderFactory mySearchBuilderFactory; + private final JpaStorageSettings myStorageSettings; + private final ISearchCacheSvc mySearchCacheSvc; + private final IPagingProvider myPagingProvider; private Search mySearch; private boolean myAbortRequested; private int myCountSavedTotal = 0; @@ -112,22 +123,6 @@ public class SearchTask implements Callable { private boolean myAdditionalPrefetchThresholdsRemaining; private List myPreviouslyAddedResourcePids; private Integer myMaxResultsToFetch; - - private final Consumer myOnRemove; - - private final int mySyncSize; - private final Integer myLoadingThrottleForUnitTests; - - // injected beans - protected final HapiTransactionService myTxService; - protected final FhirContext myContext; - private final IInterceptorBroadcaster myInterceptorBroadcaster; - private final SearchBuilderFactory mySearchBuilderFactory; - protected final ISearchResultCacheSvc mySearchResultCacheSvc; - private final JpaStorageSettings myStorageSettings; - private final ISearchCacheSvc mySearchCacheSvc; - private final IPagingProvider myPagingProvider; - /** * Constructor */ @@ -169,6 +164,10 @@ public class SearchTask implements Callable { myParentTransaction = ElasticApm.currentTransaction(); } + protected RequestPartitionId getRequestPartitionId() { + return myRequestPartitionId; + } + /** * This method is called by the server HTTP thread, and * will block until at least one page of results have been @@ -267,11 +266,18 @@ public class SearchTask implements Callable { } public void saveSearch() { - myTxService.execute(myRequest, null, Propagation.REQUIRES_NEW, Isolation.DEFAULT, ()->doSaveSearch()); + myTxService + .withRequest(myRequest) + .withRequestPartitionId(myRequestPartitionId) + .withPropagation(Propagation.REQUIRES_NEW) + .execute(() -> doSaveSearch()); } private void saveUnsynced(final IResultIterator theResultIter) { - myTxService.withRequest(myRequest).execute(()->{ + myTxService + .withRequest(myRequest) + .withRequestPartitionId(myRequestPartitionId) + .execute(() -> { if (mySearch.getId() == null) { doSaveSearch(); } @@ -303,7 +309,7 @@ public class SearchTask implements Callable { // Actually store the results in the query cache storage myCountSavedTotal += unsyncedPids.size(); myCountSavedThisPass += unsyncedPids.size(); - mySearchResultCacheSvc.storeResults(mySearch, mySyncedPids, unsyncedPids); + mySearchResultCacheSvc.storeResults(mySearch, mySyncedPids, unsyncedPids, myRequest, getRequestPartitionId()); synchronized (mySyncedPids) { int numSyncedThisPass = unsyncedPids.size(); @@ -387,7 +393,11 @@ public class SearchTask implements Callable { // Create an initial search in the DB and give it an ID saveSearch(); - myTxService.execute(myRequest, null, Propagation.REQUIRED, Isolation.READ_COMMITTED, ()->doSearch()); + myTxService + .withRequest(myRequest) + .withRequestPartitionId(myRequestPartitionId) + .withIsolation(Isolation.READ_COMMITTED) + .execute(() -> doSearch()); mySearchRuntimeDetails.setSearchStatus(mySearch.getStatus()); if (mySearch.getStatus() == SearchStatusEnum.FINISHED) { @@ -506,13 +516,16 @@ public class SearchTask implements Callable { ourLog.trace("Got count {}", count); - myTxService.withRequest(myRequest).execute(()->{ - mySearch.setTotalCount(count.intValue()); - if (myParamWantOnlyCount) { - mySearch.setStatus(SearchStatusEnum.FINISHED); - } - doSaveSearch(); - }); + myTxService + .withRequest(myRequest) + .withRequestPartitionId(myRequestPartitionId) + .execute(() -> { + mySearch.setTotalCount(count.intValue()); + if (myParamWantOnlyCount) { + mySearch.setStatus(SearchStatusEnum.FINISHED); + } + doSaveSearch(); + }); if (myParamWantOnlyCount) { return; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchCacheSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchCacheSvcImpl.java index 6e1d5128a1f..52e9bce687d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchCacheSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchCacheSvcImpl.java @@ -111,7 +111,6 @@ public class DatabaseSearchCacheSvcImpl implements ISearchCacheSvc { } @Override - @Transactional(propagation = Propagation.NEVER) public Optional tryToMarkSearchAsInProgress(Search theSearch) { ourLog.trace("Going to try to change search status from {} to {}", theSearch.getStatus(), SearchStatusEnum.LOADING); try { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchResultCacheSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchResultCacheSvcImpl.java index 907715501c2..027745081b0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchResultCacheSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchResultCacheSvcImpl.java @@ -20,17 +20,18 @@ package ca.uhn.fhir.jpa.search.cache; * #L% */ +import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.dao.data.ISearchResultDao; +import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService; import ca.uhn.fhir.jpa.entity.Search; import ca.uhn.fhir.jpa.entity.SearchResult; import ca.uhn.fhir.jpa.model.dao.JpaPid; +import ca.uhn.fhir.rest.api.server.RequestDetails; import com.google.common.collect.Lists; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Pageable; -import org.springframework.transaction.annotation.Propagation; -import org.springframework.transaction.annotation.Transactional; import java.util.Collections; import java.util.List; @@ -43,50 +44,65 @@ public class DatabaseSearchResultCacheSvcImpl implements ISearchResultCacheSvc { @Autowired private ISearchResultDao mySearchResultDao; + @Autowired + private IHapiTransactionService myTransactionService; + @Override - @Transactional(propagation = Propagation.REQUIRED) - public List fetchResultPids(Search theSearch, int theFrom, int theTo) { - final Pageable page = toPage(theFrom, theTo); - if (page == null) { - return Collections.emptyList(); - } + public List fetchResultPids(Search theSearch, int theFrom, int theTo, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) { + return myTransactionService + .withRequest(theRequestDetails) + .withRequestPartitionId(theRequestPartitionId) + .execute(() -> { + final Pageable page = toPage(theFrom, theTo); + if (page == null) { + return Collections.emptyList(); + } - List retVal = mySearchResultDao - .findWithSearchPid(theSearch.getId(), page) - .getContent(); + List retVal = mySearchResultDao + .findWithSearchPid(theSearch.getId(), page) + .getContent(); - ourLog.debug("fetchResultPids for range {}-{} returned {} pids", theFrom, theTo, retVal.size()); + ourLog.debug("fetchResultPids for range {}-{} returned {} pids", theFrom, theTo, retVal.size()); - return JpaPid.fromLongList(retVal); + return JpaPid.fromLongList(retVal); + }); } @Override - @Transactional(propagation = Propagation.REQUIRED) - public List fetchAllResultPids(Search theSearch) { - List retVal = mySearchResultDao.findWithSearchPidOrderIndependent(theSearch.getId()); - ourLog.trace("fetchAllResultPids returned {} pids", retVal.size()); - return JpaPid.fromLongList(retVal); + public List fetchAllResultPids(Search theSearch, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) { + return myTransactionService + .withRequest(theRequestDetails) + .withRequestPartitionId(theRequestPartitionId) + .execute(() -> { + List retVal = mySearchResultDao.findWithSearchPidOrderIndependent(theSearch.getId()); + ourLog.trace("fetchAllResultPids returned {} pids", retVal.size()); + return JpaPid.fromLongList(retVal); + }); } @Override - @Transactional(propagation = Propagation.REQUIRED) - public void storeResults(Search theSearch, List thePreviouslyStoredResourcePids, List theNewResourcePids) { - List resultsToSave = Lists.newArrayList(); + public void storeResults(Search theSearch, List thePreviouslyStoredResourcePids, List theNewResourcePids, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) { + myTransactionService + .withRequest(theRequestDetails) + .withRequestPartitionId(theRequestPartitionId) + .execute(() -> { + List resultsToSave = Lists.newArrayList(); - ourLog.debug("Storing {} results with {} previous for search", theNewResourcePids.size(), thePreviouslyStoredResourcePids.size()); + ourLog.debug("Storing {} results with {} previous for search", theNewResourcePids.size(), thePreviouslyStoredResourcePids.size()); - int order = thePreviouslyStoredResourcePids.size(); - for (JpaPid nextPid : theNewResourcePids) { - SearchResult nextResult = new SearchResult(theSearch); - nextResult.setResourcePid(nextPid.getId()); - nextResult.setOrder(order); - resultsToSave.add(nextResult); - ourLog.trace("Saving ORDER[{}] Resource {}", order, nextResult.getResourcePid()); + int order = thePreviouslyStoredResourcePids.size(); + for (JpaPid nextPid : theNewResourcePids) { + SearchResult nextResult = new SearchResult(theSearch); + nextResult.setResourcePid(nextPid.getId()); + nextResult.setOrder(order); + resultsToSave.add(nextResult); + ourLog.trace("Saving ORDER[{}] Resource {}", order, nextResult.getResourcePid()); - order++; - } + order++; + } - mySearchResultDao.saveAll(resultsToSave); + mySearchResultDao.saveAll(resultsToSave); + }); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/ISearchResultCacheSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/ISearchResultCacheSvc.java index 4b4e12aba7b..9880e22ee7e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/ISearchResultCacheSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/ISearchResultCacheSvc.java @@ -20,8 +20,10 @@ package ca.uhn.fhir.jpa.search.cache; * #L% */ +import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.entity.Search; import ca.uhn.fhir.jpa.model.dao.JpaPid; +import ca.uhn.fhir.rest.api.server.RequestDetails; import javax.annotation.Nullable; import java.util.List; @@ -30,12 +32,12 @@ public interface ISearchResultCacheSvc { /** * @param theSearch The search - This method is not required to persist any chances to the Search object, it is only provided here for identification * @param thePreviouslyStoredResourcePids A list of resource PIDs that have previously been saved to this search - * @param theNewResourcePids A list of new resoure PIDs to add to this search (these ones have not been previously saved) + * @param theNewResourcePids A list of new resource PIDs to add to this search (these ones have not been previously saved) */ - void storeResults(Search theSearch, List thePreviouslyStoredResourcePids, List theNewResourcePids); + void storeResults(Search theSearch, List thePreviouslyStoredResourcePids, List theNewResourcePids, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId); /** - * Fetch a sunset of the search result IDs from the cache + * Fetch a subset of the search result IDs from the cache * * @param theSearch The search to fetch IDs for * @param theFrom The starting index (inclusive) @@ -44,7 +46,7 @@ public interface ISearchResultCacheSvc { * have been removed from the cache for some reason, such as expiry or manual purge) */ @Nullable - List fetchResultPids(Search theSearch, int theFrom, int theTo); + List fetchResultPids(Search theSearch, int theFrom, int theTo, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId); /** * Fetch all result PIDs for a given search with no particular order required @@ -54,6 +56,6 @@ public interface ISearchResultCacheSvc { * have been removed from the cache for some reason, such as expiry or manual purge) */ @Nullable - List fetchAllResultPids(Search theSearch); + List fetchAllResultPids(Search theSearch, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/JpaHapiTransactionService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/JpaHapiTransactionService.java new file mode 100644 index 00000000000..826c64b4dca --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/JpaHapiTransactionService.java @@ -0,0 +1,45 @@ +package ca.uhn.fhir.jpa.util; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; +import org.springframework.orm.jpa.JpaDialect; +import org.springframework.orm.jpa.JpaTransactionManager; +import org.springframework.orm.jpa.vendor.HibernateJpaDialect; + +public class JpaHapiTransactionService extends HapiTransactionService { + + private volatile Boolean myCustomIsolationSupported; + + @Override + public boolean isCustomIsolationSupported() { + if (myCustomIsolationSupported == null) { + if (myTransactionManager instanceof JpaTransactionManager) { + JpaDialect jpaDialect = ((JpaTransactionManager) myTransactionManager).getJpaDialect(); + myCustomIsolationSupported = (jpaDialect instanceof HibernateJpaDialect); + } else { + myCustomIsolationSupported = false; + } + } + return myCustomIsolationSupported; + } + +} diff --git a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml index eb3c5855045..0f98464e7b1 100644 --- a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml +++ b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-ips/pom.xml b/hapi-fhir-jpaserver-ips/pom.xml index f448ee6a1fa..6a26fd72124 100644 --- a/hapi-fhir-jpaserver-ips/pom.xml +++ b/hapi-fhir-jpaserver-ips/pom.xml @@ -3,7 +3,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-mdm/pom.xml b/hapi-fhir-jpaserver-mdm/pom.xml index aa8b77635bd..d737529f485 100644 --- a/hapi-fhir-jpaserver-mdm/pom.xml +++ b/hapi-fhir-jpaserver-mdm/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmControllerSvcImpl.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmControllerSvcImpl.java index 5440057bd78..98c2059ad4e 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmControllerSvcImpl.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmControllerSvcImpl.java @@ -44,7 +44,6 @@ import ca.uhn.fhir.mdm.model.MdmTransactionContext; import ca.uhn.fhir.mdm.provider.MdmControllerHelper; import ca.uhn.fhir.mdm.provider.MdmControllerUtil; import ca.uhn.fhir.model.primitive.IdDt; -import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.provider.ProviderConstants; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; @@ -125,9 +124,8 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc { } @Override - public Page queryLinks(MdmQuerySearchParameters theMdmQuerySearchParameters, MdmTransactionContext theMdmTransactionContext, RequestDetails theRequestDetails) - { - RequestPartitionId theReadPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, null, null); + public Page queryLinks(MdmQuerySearchParameters theMdmQuerySearchParameters, MdmTransactionContext theMdmTransactionContext, RequestDetails theRequestDetails) { + RequestPartitionId theReadPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, null); Page resultPage; if (theReadPartitionId.hasPartitionIds()) { theMdmQuerySearchParameters.setPartitionIds(theReadPartitionId.getPartitionIds()); @@ -166,12 +164,12 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc { @Override public Page getDuplicateGoldenResources(MdmTransactionContext theMdmTransactionContext, MdmPageRequest thePageRequest, RequestDetails theRequestDetails, String theRequestResourceType) { Page resultPage; - RequestPartitionId readPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, null, null); + RequestPartitionId readPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, null); - if (readPartitionId.isAllPartitions()){ - resultPage = myMdmLinkQuerySvc.getDuplicateGoldenResources(theMdmTransactionContext, thePageRequest, null, theRequestResourceType); + if (readPartitionId.isAllPartitions()) { + resultPage = myMdmLinkQuerySvc.getDuplicateGoldenResources(theMdmTransactionContext, thePageRequest, null, theRequestResourceType); } else { - resultPage = myMdmLinkQuerySvc.getDuplicateGoldenResources(theMdmTransactionContext, thePageRequest, readPartitionId.getPartitionIds(), theRequestResourceType); + resultPage = myMdmLinkQuerySvc.getDuplicateGoldenResources(theMdmTransactionContext, thePageRequest, readPartitionId.getPartitionIds(), theRequestResourceType); } validateMdmQueryPermissions(readPartitionId, resultPage.getContent(), theRequestDetails); @@ -205,12 +203,12 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc { public IBaseParameters submitMdmClearJob(@Nonnull List theResourceNames, IPrimitiveType theBatchSize, ServletRequestDetails theRequestDetails) { MdmClearJobParameters params = new MdmClearJobParameters(); params.setResourceNames(theResourceNames); - if (theBatchSize != null && theBatchSize.getValue() !=null && theBatchSize.getValue().longValue() > 0) { + if (theBatchSize != null && theBatchSize.getValue() != null && theBatchSize.getValue().longValue() > 0) { params.setBatchSize(theBatchSize.getValue().intValue()); } - ReadPartitionIdRequestDetails details= new ReadPartitionIdRequestDetails(null, RestOperationTypeEnum.EXTENDED_OPERATION_SERVER, null, null, null); - RequestPartitionId requestPartition = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, null, details); + ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forOperation(null, null, ProviderConstants.OPERATION_MDM_CLEAR); + RequestPartitionId requestPartition = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, details); params.setRequestPartitionId(requestPartition); JobInstanceStartRequest request = new JobInstanceStartRequest(); @@ -226,10 +224,10 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc { @Override - public IBaseParameters submitMdmSubmitJob(List theUrls, IPrimitiveType theBatchSize, ServletRequestDetails theRequestDetails) { + public IBaseParameters submitMdmSubmitJob(List theUrls, IPrimitiveType theBatchSize, ServletRequestDetails theRequestDetails) { MdmSubmitJobParameters params = new MdmSubmitJobParameters(); - if (theBatchSize != null && theBatchSize.getValue() !=null && theBatchSize.getValue().longValue() > 0) { + if (theBatchSize != null && theBatchSize.getValue() != null && theBatchSize.getValue().longValue() > 0) { params.setBatchSize(theBatchSize.getValue().intValue()); } params.setRequestPartitionId(RequestPartitionId.allPartitions()); @@ -256,12 +254,12 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc { myIMdmLinkUpdaterSvc.notDuplicateGoldenResource(goldenResource, target, theMdmTransactionContext); } - private void validateMdmQueryPermissions(RequestPartitionId theRequestPartitionId, List theMdmLinkJsonList, RequestDetails theRequestDetails){ + private void validateMdmQueryPermissions(RequestPartitionId theRequestPartitionId, List theMdmLinkJsonList, RequestDetails theRequestDetails) { Set seenResourceTypes = new HashSet<>(); - for (MdmLinkJson mdmLinkJson : theMdmLinkJsonList){ + for (MdmLinkJson mdmLinkJson : theMdmLinkJsonList) { IdDt idDt = new IdDt(mdmLinkJson.getSourceId()); - if (!seenResourceTypes.contains(idDt.getResourceType())){ + if (!seenResourceTypes.contains(idDt.getResourceType())) { myRequestPartitionHelperSvc.validateHasPartitionPermissions(theRequestDetails, idDt.getResourceType(), theRequestPartitionId); seenResourceTypes.add(idDt.getResourceType()); } diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/provider/MdmProviderMatchR4Test.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/provider/MdmProviderMatchR4Test.java index b3537378e63..b0db3b2674c 100644 --- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/provider/MdmProviderMatchR4Test.java +++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/provider/MdmProviderMatchR4Test.java @@ -36,7 +36,7 @@ public class MdmProviderMatchR4Test extends BaseProviderR4Test { } @Test - public void testMatch() throws Exception { + public void testMatch() { Patient jane = buildJanePatient(); jane.setActive(true); Patient createdJane = createPatient(jane); diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmControllerSvcImplTest.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmControllerSvcImplTest.java index bf6b348b92a..25f39dde818 100644 --- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmControllerSvcImplTest.java +++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmControllerSvcImplTest.java @@ -32,6 +32,8 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.ArgumentMatcher; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.mock.mockito.SpyBean; import org.springframework.data.domain.Page; @@ -41,6 +43,7 @@ import java.math.BigDecimal; import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.stream.Collectors; import static ca.uhn.fhir.mdm.provider.MdmProviderDstu3Plus.DEFAULT_PAGE_SIZE; import static ca.uhn.fhir.mdm.provider.MdmProviderDstu3Plus.MAX_PAGE_SIZE; @@ -50,6 +53,8 @@ import static org.mockito.ArgumentMatchers.argThat; import static org.mockito.ArgumentMatchers.eq; public class MdmControllerSvcImplTest extends BaseLinkR4Test { + private static final Logger ourLog = LoggerFactory.getLogger(MdmControllerSvcImplTest.class); + @Autowired IMdmControllerSvc myMdmControllerSvc; @@ -137,13 +142,18 @@ public class MdmControllerSvcImplTest extends BaseLinkR4Test { assertEquals(MdmLinkSourceEnum.AUTO, link.getLinkSource()); assertLinkCount(2); + runInTransaction(()->{ + ourLog.info("Links: {}", myMdmLinkDao.findAll().stream().map(t->t.toString()).collect(Collectors.joining("\n * "))); + }); + + myCaptureQueriesListener.clear(); Page resultPage = myMdmControllerSvc.getDuplicateGoldenResources(null, new MdmPageRequest((Integer) null, null, DEFAULT_PAGE_SIZE, MAX_PAGE_SIZE), new SystemRequestDetails().setRequestPartitionId(RequestPartitionId.fromPartitionId(1)), null); + myCaptureQueriesListener.logSelectQueries(); - assertEquals(resultPage.getContent().size(), 1); - - assertEquals(resultPage.getContent().get(0).getSourceId(), patient.getIdElement().getResourceType() + "/" + patient.getIdElement().getIdPart()); + assertEquals(1, resultPage.getContent().size()); + assertEquals(patient.getIdElement().getResourceType() + "/" + patient.getIdElement().getIdPart(), resultPage.getContent().get(0).getSourceId()); Mockito.verify(myRequestPartitionHelperSvc, Mockito.atLeastOnce()).validateHasPartitionPermissions(any(), eq("Patient"), argThat(new PartitionIdMatcher(requestPartitionId))); } diff --git a/hapi-fhir-jpaserver-model/pom.xml b/hapi-fhir-jpaserver-model/pom.xml index a77c5d691cc..fff6fd08200 100644 --- a/hapi-fhir-jpaserver-model/pom.xml +++ b/hapi-fhir-jpaserver-model/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/config/PartitionSettings.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/config/PartitionSettings.java index ab1b9fba0fc..02e94f816be 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/config/PartitionSettings.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/config/PartitionSettings.java @@ -30,6 +30,25 @@ public class PartitionSettings { private boolean myIncludePartitionInSearchHashes = false; private boolean myUnnamedPartitionMode; private Integer myDefaultPartitionId; + private boolean myAlwaysOpenNewTransactionForDifferentPartition; + + /** + * Should we always open a new database transaction if the partition context changes + * + * @since 6.6.0 + */ + public boolean isAlwaysOpenNewTransactionForDifferentPartition() { + return myAlwaysOpenNewTransactionForDifferentPartition; + } + + /** + * Should we always open a new database transaction if the partition context changes + * + * @since 6.6.0 + */ + public void setAlwaysOpenNewTransactionForDifferentPartition(boolean theAlwaysOpenNewTransactionForDifferentPartition) { + myAlwaysOpenNewTransactionForDifferentPartition = theAlwaysOpenNewTransactionForDifferentPartition; + } /** * If set to true (default is false) the PARTITION_ID value will be factored into the @@ -136,6 +155,12 @@ public class PartitionSettings { myDefaultPartitionId = theDefaultPartitionId; } + /** + * If enabled the JPA server will allow unqualified cross partition reference + */ + public boolean isAllowUnqualifiedCrossPartitionReference() { + return myAllowReferencesAcrossPartitions.equals(PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED); + } public enum CrossPartitionReferenceMode { @@ -145,18 +170,11 @@ public class PartitionSettings { NOT_ALLOWED, /** - * References can cross partition boundaries, in a way that hides the existence of partitions to the end user + * References can cross partition boundaries, with an assumption that boundaries + * will be managed by the database. */ - ALLOWED_UNQUALIFIED + ALLOWED_UNQUALIFIED, } - /** - * If enabled the JPA server will allow unqualified cross partition reference - * - */ - public boolean isAllowUnqualifiedCrossPartitionReference() { - return myAllowReferencesAcrossPartitions.equals(PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED); - } - } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IBasePersistedResource.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IBasePersistedResource.java index 09d3fec755e..f57ab77846f 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IBasePersistedResource.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IBasePersistedResource.java @@ -20,9 +20,10 @@ package ca.uhn.fhir.jpa.model.cross; * #L% */ +import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; import org.hl7.fhir.instance.model.api.IIdType; -public interface IBasePersistedResource extends IResourceLookup { +public interface IBasePersistedResource> extends IResourceLookup { IIdType getIdDt(); diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IResourceLookup.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IResourceLookup.java index 952cd84c82a..cb3c5a5597f 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IResourceLookup.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IResourceLookup.java @@ -24,7 +24,7 @@ import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; import java.util.Date; -public interface IResourceLookup { +public interface IResourceLookup> { String getResourceType(); /** diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiSequenceStyleGenerator.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiSequenceStyleGenerator.java index f3465520aa9..b25eabd30cd 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiSequenceStyleGenerator.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiSequenceStyleGenerator.java @@ -65,8 +65,12 @@ public class HapiSequenceStyleGenerator implements IdentifierGenerator, Persiste @Override public Serializable generate(SharedSessionContractImplementor theSession, Object theObject) throws HibernateException { - Long next = (Long) myGen.generate(theSession, theObject); - return myIdMassager.massage(myGeneratorName, next); + Long retVal = myIdMassager.generate(myGeneratorName); + if (retVal == null) { + Long next = (Long) myGen.generate(theSession, theObject); + retVal = myIdMassager.massage(myGeneratorName, next); + } + return retVal; } @Override diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/ISequenceValueMassager.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/ISequenceValueMassager.java index b46ca79350a..bf940b2e47f 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/ISequenceValueMassager.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/ISequenceValueMassager.java @@ -20,6 +20,8 @@ package ca.uhn.fhir.jpa.model.dialect; * #L% */ +import javax.annotation.Nullable; + /** * This is an internal API and may change or disappear without notice * @@ -29,6 +31,17 @@ public interface ISequenceValueMassager { Long massage(String theGeneratorName, Long theId); + /** + * If desired, the massager can supply an ID on its own. This method is tried first, + * and if it returns null, the normal hi-lo generator is called and then + * {@link #massage(String, Long)} is called on the output of that. + * + * @return Returns an ID or null + */ + @Nullable + default Long generate(String theGeneratorName) { + return null; + } final class NoopSequenceValueMassager implements ISequenceValueMassager { diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseHasResource.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseHasResource.java index 556b51d7e25..2d0d95ddbe0 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseHasResource.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseHasResource.java @@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.model.entity; import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.model.primitive.InstantDt; import org.hibernate.annotations.OptimisticLock; @@ -38,8 +39,10 @@ import java.util.Date; import static org.apache.commons.lang3.StringUtils.defaultString; @MappedSuperclass -public abstract class BaseHasResource extends BasePartitionable implements IBaseResourceEntity, IBasePersistedResource { +public abstract class BaseHasResource extends BasePartitionable implements IBaseResourceEntity, IBasePersistedResource { + public static final String RES_PUBLISHED = "RES_PUBLISHED"; + public static final String RES_UPDATED = "RES_UPDATED"; @Column(name = "RES_DELETED_AT", nullable = true) @Temporal(TemporalType.TIMESTAMP) private Date myDeleted; @@ -54,12 +57,12 @@ public abstract class BaseHasResource extends BasePartitionable implements IBase private boolean myHasTags; @Temporal(TemporalType.TIMESTAMP) - @Column(name = "RES_PUBLISHED", nullable = false) + @Column(name = RES_PUBLISHED, nullable = false) @OptimisticLock(excluded = true) private Date myPublished; @Temporal(TemporalType.TIMESTAMP) - @Column(name = "RES_UPDATED", nullable = false) + @Column(name = RES_UPDATED, nullable = false) @OptimisticLock(excluded = true) private Date myUpdated; diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/PartitionablePartitionId.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/PartitionablePartitionId.java index 389d1c18ef3..9d5bef4c2bb 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/PartitionablePartitionId.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/PartitionablePartitionId.java @@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.model.entity; */ import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.jpa.model.config.PartitionSettings; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; @@ -118,4 +119,14 @@ public class PartitionablePartitionId implements Cloneable { return RequestPartitionId.defaultPartition(); } } + + @Nonnull + public static PartitionablePartitionId toStoragePartition(@Nonnull RequestPartitionId theRequestPartitionId, @Nonnull PartitionSettings thePartitionSettings) { + Integer partitionId = theRequestPartitionId.getFirstPartitionIdOrNull(); + if (partitionId == null) { + partitionId = thePartitionSettings.getDefaultPartitionId(); + } + return new PartitionablePartitionId(partitionId, theRequestPartitionId.getPartitionDate()); + } + } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceEncodingEnum.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceEncodingEnum.java index 6a09380f77a..4dda8e4898e 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceEncodingEnum.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceEncodingEnum.java @@ -23,9 +23,6 @@ package ca.uhn.fhir.jpa.model.entity; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.parser.IParser; -/** - * @see ResourceHistoryTable#ENCODING_COL_LENGTH - */ public enum ResourceEncodingEnum { /* @@ -47,7 +44,13 @@ public enum ResourceEncodingEnum { /** * Resource was deleted - No contents expected */ - DEL; + DEL, + + /** + * Externally stored resource - Resource text is a reference to an external storage location, + * which will be stored in {@link ResourceHistoryTable#getResourceTextVc()} + */ + ESR; public IParser newParser(FhirContext theContext) { return theContext.newJsonParser(); diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java index 40f902926ec..30a940970d3 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java @@ -92,6 +92,7 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl @Column(name = "RES_TEXT_VC", length = RES_TEXT_VC_MAX_LENGTH, nullable = true) @OptimisticLock(excluded = true) private String myResourceTextVc; + @Column(name = "RES_ENCODING", nullable = false, length = ENCODING_COL_LENGTH) @Enumerated(EnumType.STRING) @OptimisticLock(excluded = true) diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceLink.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceLink.java index 9e26ac1392d..6c4bfa0bf42 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceLink.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceLink.java @@ -98,6 +98,9 @@ public class ResourceLink extends BaseResourceIndex { @Transient private transient String myTargetResourceId; + /** + * Constructor + */ public ResourceLink() { super(); } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java index 6ab566718d1..370de32fc4f 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java @@ -78,16 +78,18 @@ import java.util.stream.Collectors; @Indexed(routingBinder= @RoutingBinderRef(type = ResourceTableRoutingBinder.class)) @Entity -@Table(name = "HFJ_RESOURCE", uniqueConstraints = {}, indexes = { +@Table(name = ResourceTable.HFJ_RESOURCE, uniqueConstraints = {}, indexes = { // Do not reuse previously used index name: IDX_INDEXSTATUS, IDX_RES_TYPE - @Index(name = "IDX_RES_DATE", columnList = "RES_UPDATED"), + @Index(name = "IDX_RES_DATE", columnList = BaseHasResource.RES_UPDATED), @Index(name = "IDX_RES_TYPE_DEL_UPDATED", columnList = "RES_TYPE,RES_DELETED_AT,RES_UPDATED,PARTITION_ID,RES_ID"), }) @NamedEntityGraph(name = "Resource.noJoins") -public class ResourceTable extends BaseHasResource implements Serializable, IBasePersistedResource, IResourceLookup { +public class ResourceTable extends BaseHasResource implements Serializable, IBasePersistedResource { public static final int RESTYPE_LEN = 40; private static final int MAX_LANGUAGE_LENGTH = 20; private static final long serialVersionUID = 1L; + public static final String HFJ_RESOURCE = "HFJ_RESOURCE"; + public static final String RES_TYPE = "RES_TYPE"; /** * Holds the narrative text only - Used for Fulltext searching but not directly stored in the DB @@ -263,7 +265,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas @OptimisticLock(excluded = true) private Collection myResourceLinksAsTarget; - @Column(name = "RES_TYPE", length = RESTYPE_LEN, nullable = false) + @Column(name = RES_TYPE, length = RESTYPE_LEN, nullable = false) @FullTextField @OptimisticLock(excluded = true) private String myResourceType; @@ -769,7 +771,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas } @Override - public IResourcePersistentId getPersistentId() { + public JpaPid getPersistentId() { return JpaPid.fromId(getId()); } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/StorageSettings.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/StorageSettings.java index 9591becee61..84ac8db7257 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/StorageSettings.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/StorageSettings.java @@ -42,6 +42,10 @@ import java.util.Set; import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; +/** + * This class contains configuration options common to all hapi-fhir-storage implementations. + * Ultimately it should live in that project + */ public class StorageSettings { /** * @since 5.6.0 diff --git a/hapi-fhir-jpaserver-searchparam/pom.xml b/hapi-fhir-jpaserver-searchparam/pom.xml index bc6152c23b7..02300acc9cf 100755 --- a/hapi-fhir-jpaserver-searchparam/pom.xml +++ b/hapi-fhir-jpaserver-searchparam/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/ReadPartitionIdRequestDetails.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/interceptor/model/ReadPartitionIdRequestDetails.java similarity index 51% rename from hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/ReadPartitionIdRequestDetails.java rename to hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/interceptor/model/ReadPartitionIdRequestDetails.java index 7611b7e1f23..39dd974dd83 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/ReadPartitionIdRequestDetails.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/interceptor/model/ReadPartitionIdRequestDetails.java @@ -2,7 +2,7 @@ package ca.uhn.fhir.interceptor.model; /*- * #%L - * HAPI FHIR - Core Library + * HAPI FHIR Search Parameters * %% * Copyright (C) 2014 - 2023 Smile CDR, Inc. * %% @@ -20,31 +20,52 @@ package ca.uhn.fhir.interceptor.model; * #L% */ +import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; +import javax.annotation.Nonnull; import javax.annotation.Nullable; +import static org.apache.commons.lang3.StringUtils.isNotBlank; + +/** + * This is a model class used as a parameter for the interceptor pointcut + * {@link ca.uhn.fhir.interceptor.api.Pointcut#STORAGE_PARTITION_IDENTIFY_READ}. + */ public class ReadPartitionIdRequestDetails extends PartitionIdRequestDetails { private final String myResourceType; private final RestOperationTypeEnum myRestOperationType; private final IIdType myReadResourceId; - private final Object mySearchParams; + @Nullable + private final SearchParameterMap mySearchParams; + @Nullable private final IBaseResource myConditionalTargetOrNull; + @Nullable + private final String mySearchUuid; + @Nullable + private final String myExtendedOperationName; - public ReadPartitionIdRequestDetails(String theResourceType, RestOperationTypeEnum theRestOperationType, IIdType theReadResourceId, Object theSearchParams, @Nullable IBaseResource theConditionalTargetOrNull) { + private ReadPartitionIdRequestDetails(String theResourceType, RestOperationTypeEnum theRestOperationType, IIdType theReadResourceId, @Nullable SearchParameterMap theSearchParams, @Nullable IBaseResource theConditionalTargetOrNull, @Nullable String theSearchUuid, String theExtendedOperationName) { myResourceType = theResourceType; myRestOperationType = theRestOperationType; myReadResourceId = theReadResourceId; mySearchParams = theSearchParams; myConditionalTargetOrNull = theConditionalTargetOrNull; + mySearchUuid = theSearchUuid; + myExtendedOperationName = theExtendedOperationName; } - public static ReadPartitionIdRequestDetails forRead(String theResourceType, IIdType theId, boolean theIsVread) { - RestOperationTypeEnum op = theIsVread ? RestOperationTypeEnum.VREAD : RestOperationTypeEnum.READ; - return new ReadPartitionIdRequestDetails(theResourceType, op, theId.withResourceType(theResourceType), null, null); + @Nullable + public String getExtendedOperationName() { + return myExtendedOperationName; + } + + @Nullable + public String getSearchUuid() { + return mySearchUuid; } public String getResourceType() { @@ -59,18 +80,45 @@ public class ReadPartitionIdRequestDetails extends PartitionIdRequestDetails { return myReadResourceId; } - public Object getSearchParams() { + @Nullable + public SearchParameterMap getSearchParams() { return mySearchParams; } + @Nullable public IBaseResource getConditionalTargetOrNull() { return myConditionalTargetOrNull; } + /** + * @param theId The resource ID (must include a resource type and ID) + */ + public static ReadPartitionIdRequestDetails forRead(IIdType theId) { + assert isNotBlank(theId.getResourceType()); + assert isNotBlank(theId.getIdPart()); + return forRead(theId.getResourceType(), theId, false); + } + public static ReadPartitionIdRequestDetails forOperation(@Nullable String theResourceType, @Nullable IIdType theId, @Nonnull String theExtendedOperationName) { + RestOperationTypeEnum op; + if (theId != null) { + op = RestOperationTypeEnum.EXTENDED_OPERATION_INSTANCE; + } else if (theResourceType != null) { + op = RestOperationTypeEnum.EXTENDED_OPERATION_TYPE; + } else { + op = RestOperationTypeEnum.EXTENDED_OPERATION_INSTANCE; + } - public static ReadPartitionIdRequestDetails forSearchType(String theResourceType, Object theParams, IBaseResource theConditionalOperationTargetOrNull) { - return new ReadPartitionIdRequestDetails(theResourceType, RestOperationTypeEnum.SEARCH_TYPE, null, theParams, theConditionalOperationTargetOrNull); + return new ReadPartitionIdRequestDetails(theResourceType, op, null, null, null, null, theExtendedOperationName); + } + + public static ReadPartitionIdRequestDetails forRead(String theResourceType, @Nonnull IIdType theId, boolean theIsVread) { + RestOperationTypeEnum op = theIsVread ? RestOperationTypeEnum.VREAD : RestOperationTypeEnum.READ; + return new ReadPartitionIdRequestDetails(theResourceType, op, theId.withResourceType(theResourceType), null, null, null, null); + } + + public static ReadPartitionIdRequestDetails forSearchType(String theResourceType, SearchParameterMap theParams, IBaseResource theConditionalOperationTargetOrNull) { + return new ReadPartitionIdRequestDetails(theResourceType, RestOperationTypeEnum.SEARCH_TYPE, null, theParams, theConditionalOperationTargetOrNull, null, null); } public static ReadPartitionIdRequestDetails forHistory(String theResourceType, IIdType theIdType) { @@ -82,6 +130,10 @@ public class ReadPartitionIdRequestDetails extends PartitionIdRequestDetails { } else { restOperationTypeEnum = RestOperationTypeEnum.HISTORY_SYSTEM; } - return new ReadPartitionIdRequestDetails(theResourceType, restOperationTypeEnum, theIdType, null, null); + return new ReadPartitionIdRequestDetails(theResourceType, restOperationTypeEnum, theIdType, null, null, null, null); + } + + public static ReadPartitionIdRequestDetails forSearchUuid(String theUuid) { + return new ReadPartitionIdRequestDetails(null, RestOperationTypeEnum.GET_PAGE, null, null, null, theUuid, null); } } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/IResourceVersionSvc.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/IResourceVersionSvc.java index b636978994a..e359c626b95 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/IResourceVersionSvc.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/IResourceVersionSvc.java @@ -28,8 +28,8 @@ import javax.annotation.Nonnull; import java.util.List; /** - * This interface is used by the {@link IResourceChangeListenerCacheRefresher} to read resources matching the provided - * search parameter map in the repository and compare them to caches stored in the {@link IResourceChangeListenerRegistry}. + * This interface is used by the {@literal IResourceChangeListenerCacheRefresher} to read resources matching the provided + * search parameter map in the repository and compare them to caches stored in the {@literal IResourceChangeListenerRegistry}. */ public interface IResourceVersionSvc { @Nonnull diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceVersionMap.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceVersionMap.java index 98ae5bcfe07..624904341fb 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceVersionMap.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceVersionMap.java @@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.cache; * #L% */ -import ca.uhn.fhir.jpa.model.entity.ResourceTable; +import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource; import ca.uhn.fhir.model.primitive.IdDt; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; @@ -44,7 +44,7 @@ public class ResourceVersionMap { private final Map myMap = new HashMap<>(); private ResourceVersionMap() {} - public static ResourceVersionMap fromResourceTableEntities(List theEntities) { + public static ResourceVersionMap fromResourceTableEntities(List theEntities) { ResourceVersionMap retval = new ResourceVersionMap(); theEntities.forEach(entity -> retval.add(entity.getIdDt())); return retval; diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/partition/IRequestPartitionHelperSvc.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/partition/IRequestPartitionHelperSvc.java similarity index 81% rename from hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/partition/IRequestPartitionHelperSvc.java rename to hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/partition/IRequestPartitionHelperSvc.java index 6f7c943b6c1..ea9daa36e77 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/partition/IRequestPartitionHelperSvc.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/partition/IRequestPartitionHelperSvc.java @@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.partition; /*- * #%L - * HAPI FHIR Storage api + * HAPI FHIR Search Parameters * %% * Copyright (C) 2014 - 2023 Smile CDR, Inc. * %% @@ -22,7 +22,6 @@ package ca.uhn.fhir.jpa.partition; import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails; import ca.uhn.fhir.interceptor.model.RequestPartitionId; -import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.rest.api.server.RequestDetails; import org.hl7.fhir.instance.model.api.IBaseResource; @@ -35,18 +34,18 @@ import java.util.Set; public interface IRequestPartitionHelperSvc { @Nonnull - RequestPartitionId determineReadPartitionForRequest(@Nullable RequestDetails theRequest, String theResourceType, ReadPartitionIdRequestDetails theDetails); + RequestPartitionId determineReadPartitionForRequest(@Nullable RequestDetails theRequest, ReadPartitionIdRequestDetails theDetails); @Nonnull - default RequestPartitionId determineReadPartitionForRequestForRead(RequestDetails theRequest, String theResourceType, IIdType theId) { + default RequestPartitionId determineReadPartitionForRequestForRead(RequestDetails theRequest, String theResourceType, @Nonnull IIdType theId) { ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forRead(theResourceType, theId, theId.hasVersionIdPart()); - return determineReadPartitionForRequest(theRequest, theResourceType, details); + return determineReadPartitionForRequest(theRequest, details); } @Nonnull default RequestPartitionId determineReadPartitionForRequestForSearchType(RequestDetails theRequest, String theResourceType, SearchParameterMap theParams, IBaseResource theConditionalOperationTargetOrNull) { ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forSearchType(theResourceType, theParams, theConditionalOperationTargetOrNull); - return determineReadPartitionForRequest(theRequest, theResourceType, details); + return determineReadPartitionForRequest(theRequest, details); } RequestPartitionId determineGenericPartitionForRequest(RequestDetails theRequestDetails); @@ -54,18 +53,16 @@ public interface IRequestPartitionHelperSvc { @Nonnull default RequestPartitionId determineReadPartitionForRequestForHistory(RequestDetails theRequest, String theResourceType, IIdType theIdType) { ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forHistory(theResourceType, theIdType); - return determineReadPartitionForRequest(theRequest, theResourceType, details); + return determineReadPartitionForRequest(theRequest, details); } @Nonnull - default void validateHasPartitionPermissions(RequestDetails theRequest, String theResourceType, RequestPartitionId theRequestPartitionId){} + default void validateHasPartitionPermissions(RequestDetails theRequest, String theResourceType, RequestPartitionId theRequestPartitionId) { + } @Nonnull RequestPartitionId determineCreatePartitionForRequest(@Nullable RequestDetails theRequest, @Nonnull IBaseResource theResource, @Nonnull String theResourceType); - @Nonnull - PartitionablePartitionId toStoragePartition(@Nonnull RequestPartitionId theRequestPartitionId); - @Nonnull Set toReadPartitions(@Nonnull RequestPartitionId theRequestPartitionId); diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java index c3f3593cbde..a9e5a36610b 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java @@ -121,8 +121,8 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor private BaseRuntimeChildDefinition myRangeHighValueChild; private BaseRuntimeChildDefinition myAddressLineValueChild; private BaseRuntimeChildDefinition myAddressCityValueChild; - private BaseRuntimeChildDefinition myAddressStateValueChild; private BaseRuntimeChildDefinition myAddressDistrictValueChild; + private BaseRuntimeChildDefinition myAddressStateValueChild; private BaseRuntimeChildDefinition myAddressCountryValueChild; private BaseRuntimeChildDefinition myAddressPostalCodeValueChild; private BaseRuntimeChildDefinition myCapabilityStatementRestSecurityServiceValueChild; diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/CrossPartitionReferenceDetails.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/CrossPartitionReferenceDetails.java new file mode 100644 index 00000000000..78d288e67b8 --- /dev/null +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/CrossPartitionReferenceDetails.java @@ -0,0 +1,78 @@ +package ca.uhn.fhir.jpa.searchparam.extractor; + +/*- + * #%L + * HAPI FHIR Search Parameters + * %% + * Copyright (C) 2014 - 2023 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; + +import javax.annotation.Nonnull; + +public class CrossPartitionReferenceDetails { + + @Nonnull + private final RequestPartitionId mySourceResourcePartitionId; + @Nonnull + private final PathAndRef myPathAndRef; + @Nonnull + private final RequestDetails myRequestDetails; + @Nonnull + private final TransactionDetails myTransactionDetails; + @Nonnull + private final String mySourceResourceName; + + /** + * Constructor + */ + public CrossPartitionReferenceDetails(@Nonnull RequestPartitionId theSourceResourcePartitionId, @Nonnull String theSourceResourceName, @Nonnull PathAndRef thePathAndRef, @Nonnull RequestDetails theRequestDetails, @Nonnull TransactionDetails theTransactionDetails) { + mySourceResourcePartitionId = theSourceResourcePartitionId; + mySourceResourceName = theSourceResourceName; + myPathAndRef = thePathAndRef; + myRequestDetails = theRequestDetails; + myTransactionDetails = theTransactionDetails; + } + + @Nonnull + public String getSourceResourceName() { + return mySourceResourceName; + } + + @Nonnull + public RequestDetails getRequestDetails() { + return myRequestDetails; + } + + @Nonnull + public TransactionDetails getTransactionDetails() { + return myTransactionDetails; + } + + @Nonnull + public RequestPartitionId getSourceResourcePartitionId() { + return mySourceResourcePartitionId; + } + + @Nonnull + public PathAndRef getPathAndRef() { + return myPathAndRef; + } + +} diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorService.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorService.java index 97aaa45408f..8bdafc19604 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorService.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorService.java @@ -31,8 +31,8 @@ import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.cross.IResourceLookup; -import ca.uhn.fhir.jpa.model.entity.BasePartitionable; import ca.uhn.fhir.jpa.model.dao.JpaPid; +import ca.uhn.fhir.jpa.model.entity.BasePartitionable; import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam; import ca.uhn.fhir.jpa.model.entity.IResourceIndexComboSearchParameter; import ca.uhn.fhir.jpa.model.entity.StorageSettings; @@ -50,6 +50,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri; import ca.uhn.fhir.jpa.model.entity.ResourceLink; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; +import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; @@ -71,8 +72,8 @@ import java.util.Collection; import java.util.Date; import java.util.HashSet; import java.util.List; -import java.util.Set; import java.util.Optional; +import java.util.Set; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -93,6 +94,8 @@ public class SearchParamExtractorService { private PartitionSettings myPartitionSettings; @Autowired(required = false) private IResourceLinkResolver myResourceLinkResolver; + @Autowired + private IRequestPartitionHelperSvc myPartitionHelperSvc; @VisibleForTesting public void setSearchParamExtractor(ISearchParamExtractor theSearchParamExtractor) { @@ -436,7 +439,7 @@ public class SearchParamExtractorService { * one we already have. */ Optional optionalResourceLink = findMatchingResourceLink(thePathAndRef, theExistingParams.getResourceLinks()); - if(optionalResourceLink.isPresent()){ + if (optionalResourceLink.isPresent()) { resourceLink = optionalResourceLink.get(); } else { resourceLink = resolveTargetAndCreateResourceLinkOrReturnNull(theRequestPartitionId, theSourceResourceName, thePathAndRef, theEntity, transactionDate, nextId, theRequest, theTransactionDetails); @@ -480,7 +483,7 @@ public class SearchParamExtractorService { boolean hasMatchingResourceVersion = myContext.getParserOptions().isStripVersionsFromReferences() || referenceElement.getVersionIdPartAsLong() == null || referenceElement.getVersionIdPartAsLong().equals(resourceLink.getTargetResourceVersion()); - if( hasMatchingSearchParamPath && hasMatchingResourceType && hasMatchingResourceId && hasMatchingResourceVersion) { + if (hasMatchingSearchParamPath && hasMatchingResourceType && hasMatchingResourceId && hasMatchingResourceVersion) { return Optional.of(resourceLink); } } @@ -567,19 +570,33 @@ public class SearchParamExtractorService { * target any more times than we have to. */ - RequestPartitionId targetRequestPartitionId = theRequestPartitionId; - if (myPartitionSettings.isPartitioningEnabled() && myPartitionSettings.getAllowReferencesAcrossPartitions() == PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED) { - targetRequestPartitionId = RequestPartitionId.allPartitions(); + IResourceLookup targetResource; + if (myPartitionSettings.isPartitioningEnabled()) { + if (myPartitionSettings.getAllowReferencesAcrossPartitions() == PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED) { + + // Interceptor: Pointcut.JPA_CROSS_PARTITION_REFERENCE_DETECTED + if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.JPA_RESOLVE_CROSS_PARTITION_REFERENCE, myInterceptorBroadcaster, theRequest)) { + CrossPartitionReferenceDetails referenceDetails = new CrossPartitionReferenceDetails(theRequestPartitionId, theSourceResourceName, thePathAndRef, theRequest, theTransactionDetails); + HookParams params = new HookParams(referenceDetails); + targetResource = (IResourceLookup) CompositeInterceptorBroadcaster.doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequest, Pointcut.JPA_RESOLVE_CROSS_PARTITION_REFERENCE, params); + } else { + targetResource = myResourceLinkResolver.findTargetResource(RequestPartitionId.allPartitions(), theSourceResourceName, thePathAndRef, theRequest, theTransactionDetails); + } + + } else { + targetResource = myResourceLinkResolver.findTargetResource(theRequestPartitionId, theSourceResourceName, thePathAndRef, theRequest, theTransactionDetails); + } + } else { + targetResource = myResourceLinkResolver.findTargetResource(theRequestPartitionId, theSourceResourceName, thePathAndRef, theRequest, theTransactionDetails); } - IResourceLookup targetResource = myResourceLinkResolver.findTargetResource(targetRequestPartitionId, theSourceResourceName, thePathAndRef, theRequest, theTransactionDetails); if (targetResource == null) { return null; } String targetResourceType = targetResource.getResourceType(); - Long targetResourcePid = ((JpaPid) targetResource.getPersistentId()).getId(); + Long targetResourcePid = targetResource.getPersistentId().getId(); String targetResourceIdPart = theNextId.getIdPart(); Long targetVersion = theNextId.getVersionIdPartAsLong(); return ResourceLink.forLocalReference(thePathAndRef.getPath(), theEntity, targetResourceType, targetResourcePid, targetResourceIdPart, theUpdateTime, targetVersion); @@ -597,8 +614,8 @@ public class SearchParamExtractorService { for (IResourceIndexComboSearchParameter next : theParams) { if (next.getResource() == null) { next.setResource(theResourceTable); - if (next instanceof BasePartitionable){ - ((BasePartitionable)next).setPartitionId(theResourceTable.getPartitionId()); + if (next instanceof BasePartitionable) { + ((BasePartitionable) next).setPartitionId(theResourceTable.getPartitionId()); } } } @@ -671,14 +688,16 @@ public class SearchParamExtractorService { } // If extraction generated any warnings, broadcast an error - for (String next : theSearchParamSet.getWarnings()) { - StorageProcessingMessage messageHolder = new StorageProcessingMessage(); - messageHolder.setMessage(next); - HookParams params = new HookParams() - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) - .add(StorageProcessingMessage.class, messageHolder); - CompositeInterceptorBroadcaster.doCallHooks(theInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_WARNING, params); + if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_WARNING, theInterceptorBroadcaster, theRequestDetails)) { + for (String next : theSearchParamSet.getWarnings()) { + StorageProcessingMessage messageHolder = new StorageProcessingMessage(); + messageHolder.setMessage(next); + HookParams params = new HookParams() + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) + .add(StorageProcessingMessage.class, messageHolder); + CompositeInterceptorBroadcaster.doCallHooks(theInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_WARNING, params); + } } } } diff --git a/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorServiceTest.java b/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorServiceTest.java index 36fe6d35eed..5472ff8d328 100644 --- a/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorServiceTest.java +++ b/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorServiceTest.java @@ -36,9 +36,11 @@ public class SearchParamExtractorServiceTest { searchParamSet.addWarning("help i'm a bug"); searchParamSet.addWarning("Spiff"); + when(myJpaInterceptorBroadcaster.hasHooks(any())).thenReturn(true); when(myJpaInterceptorBroadcaster.callHooks(any(), any())).thenReturn(true); - SearchParamExtractorService.handleWarnings(new ServletRequestDetails(myRequestInterceptorBroadcaster), myJpaInterceptorBroadcaster, searchParamSet); + ServletRequestDetails requestDetails = new ServletRequestDetails(myRequestInterceptorBroadcaster); + SearchParamExtractorService.handleWarnings(requestDetails, myJpaInterceptorBroadcaster, searchParamSet); verify(myJpaInterceptorBroadcaster, times(2)).callHooks(eq(Pointcut.JPA_PERFTRACE_WARNING), any()); verify(myRequestInterceptorBroadcaster, times(2)).callHooks(eq(Pointcut.JPA_PERFTRACE_WARNING), any()); diff --git a/hapi-fhir-jpaserver-subscription/pom.xml b/hapi-fhir-jpaserver-subscription/pom.xml index 508e1d29417..ac1c831d20d 100644 --- a/hapi-fhir-jpaserver-subscription/pom.xml +++ b/hapi-fhir-jpaserver-subscription/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/triggering/SubscriptionTriggeringSvcImpl.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/triggering/SubscriptionTriggeringSvcImpl.java index 52bfbd4e7ec..3481c0b49f8 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/triggering/SubscriptionTriggeringSvcImpl.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/triggering/SubscriptionTriggeringSvcImpl.java @@ -29,6 +29,7 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; import ca.uhn.fhir.jpa.api.svc.ISearchSvc; +import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; import ca.uhn.fhir.jpa.model.sched.HapiJob; import ca.uhn.fhir.jpa.model.sched.IHasScheduledJobs; import ca.uhn.fhir.jpa.model.sched.ISchedulerService; @@ -103,6 +104,8 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc private MatchUrlService myMatchUrlService; @Autowired private IResourceModifiedConsumer myResourceModifiedConsumer; + @Autowired + private HapiTransactionService myTransactionService; private int myMaxSubmitPerPass = DEFAULT_MAX_SUBMIT; private ExecutorService myExecutorService; @@ -150,8 +153,8 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc SubscriptionTriggeringJobDetails jobDetails = new SubscriptionTriggeringJobDetails(); jobDetails.setJobId(UUID.randomUUID().toString()); - jobDetails.setRemainingResourceIds(resourceIds.stream().map(t -> t.getValue()).collect(Collectors.toList())); - jobDetails.setRemainingSearchUrls(searchUrls.stream().map(t -> t.getValue()).collect(Collectors.toList())); + jobDetails.setRemainingResourceIds(resourceIds.stream().map(IPrimitiveType::getValue).collect(Collectors.toList())); + jobDetails.setRemainingSearchUrls(searchUrls.stream().map(IPrimitiveType::getValue).collect(Collectors.toList())); if (theSubscriptionId != null) { jobDetails.setSubscriptionId(theSubscriptionId.getIdPart()); } @@ -329,12 +332,17 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc IFhirResourceDao resourceDao = myDaoRegistry.getResourceDao(theJobDetails.getCurrentSearchResourceType()); int maxQuerySize = myMaxSubmitPerPass - totalSubmitted; - int toIndex = fromIndex + maxQuerySize; + int toIndex; if (theJobDetails.getCurrentSearchCount() != null) { - toIndex = Math.min(toIndex, theJobDetails.getCurrentSearchCount()); + toIndex = Math.min(fromIndex + maxQuerySize, theJobDetails.getCurrentSearchCount()); + } else { + toIndex = fromIndex + maxQuerySize; } + ourLog.info("Triggering job[{}] search {} requesting resources {} - {}", theJobDetails.getJobId(), theJobDetails.getCurrentSearchUuid(), fromIndex, toIndex); - List resourceIds = mySearchCoordinatorSvc.getResources(theJobDetails.getCurrentSearchUuid(), fromIndex, toIndex, null); + List> resourceIds; + RequestPartitionId requestPartitionId = RequestPartitionId.allPartitions(); + resourceIds = mySearchCoordinatorSvc.getResources(theJobDetails.getCurrentSearchUuid(), fromIndex, toIndex, null, requestPartitionId); ourLog.info("Triggering job[{}] delivering {} resources", theJobDetails.getJobId(), resourceIds.size()); int highestIndexSubmitted = theJobDetails.getCurrentSearchLastUploadedIndex(); diff --git a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/DaoSubscriptionMatcherTest.java b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/DaoSubscriptionMatcherTest.java index 975b78a6565..e16eca02d8a 100644 --- a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/DaoSubscriptionMatcherTest.java +++ b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/DaoSubscriptionMatcherTest.java @@ -7,6 +7,7 @@ import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.cache.IResourceVersionSvc; import ca.uhn.fhir.jpa.model.config.PartitionSettings; +import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; import ca.uhn.fhir.jpa.searchparam.config.SearchParamConfig; import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamProvider; import ca.uhn.fhir.jpa.subscription.channel.subscription.SubscriptionChannelFactory; @@ -76,6 +77,11 @@ public class DaoSubscriptionMatcherTest { return mock(IResourceVersionSvc.class, RETURNS_DEEP_STUBS); } + @Bean + public IRequestPartitionHelperSvc requestPartitionHelperSvc() { + return mock(IRequestPartitionHelperSvc.class); + } + } } diff --git a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/BaseSubscriptionTest.java b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/BaseSubscriptionTest.java index de4f081da57..0b83e0cbe57 100644 --- a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/BaseSubscriptionTest.java +++ b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/BaseSubscriptionTest.java @@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.subscription.module; import ca.uhn.fhir.interceptor.api.IInterceptorService; import ca.uhn.fhir.interceptor.executor.InterceptorService; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; +import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; import ca.uhn.fhir.jpa.searchparam.config.SearchParamConfig; import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryImpl; import ca.uhn.fhir.jpa.subscription.channel.api.IChannelFactory; @@ -25,6 +26,8 @@ import org.springframework.test.context.junit.jupiter.SpringExtension; import java.util.Collections; +import static org.mockito.Mockito.mock; + @ExtendWith(SpringExtension.class) @ContextConfiguration(classes = { SearchParamConfig.class, @@ -84,6 +87,11 @@ public abstract class BaseSubscriptionTest { return new InterceptorService(); } + @Bean + public IRequestPartitionHelperSvc requestPartitionHelperSvc() { + return mock(IRequestPartitionHelperSvc.class); + } + @Bean // Default implementation returns the name unchanged public IChannelNamer channelNamer() { diff --git a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/subscriber/SubscriptionMatchingSubscriberTest.java b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/subscriber/SubscriptionMatchingSubscriberTest.java index 73e839509c2..fecdfdceb98 100644 --- a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/subscriber/SubscriptionMatchingSubscriberTest.java +++ b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/subscriber/SubscriptionMatchingSubscriberTest.java @@ -4,6 +4,7 @@ import ca.uhn.fhir.interceptor.api.HookParams; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.subscription.match.matcher.subscriber.SubscriptionCriteriaParser; import ca.uhn.fhir.jpa.subscription.match.matcher.subscriber.SubscriptionMatchingSubscriber; @@ -20,14 +21,13 @@ import com.google.common.collect.Lists; import org.hl7.fhir.dstu3.model.BooleanType; import org.hl7.fhir.dstu3.model.Observation; import org.hl7.fhir.dstu3.model.Subscription; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; import org.mockito.Answers; import org.mockito.Mock; import org.mockito.Mockito; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.test.util.ReflectionTestUtils; import java.util.Collections; @@ -46,7 +46,6 @@ import static org.mockito.Mockito.when; * Tests copied from jpa.subscription.resthook.RestHookTestDstu3Test */ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscribableChannelDstu3Test { - private static final Logger ourLog = LoggerFactory.getLogger(SubscriptionMatchingSubscriberTest.class); private final IFhirResourceDao myMockSubscriptionDao = Mockito.mock(IFhirResourceDao.class); @BeforeEach @@ -55,6 +54,11 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri myDaoRegistry.register(myMockSubscriptionDao); } + @AfterEach + public void afterEach() { + myStorageSettings.setCrossPartitionSubscriptionEnabled(new JpaStorageSettings().isCrossPartitionSubscriptionEnabled()); + } + @Test public void testRestHookSubscriptionApplicationFhirJson() throws Exception { String payload = "application/fhir+json"; @@ -392,6 +396,13 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri ourObservationListener.awaitExpected(); } + private void mockSubscriptionRead(RequestPartitionId theRequestPartitionId, Subscription subscription) { + Subscription modifiedSubscription = subscription.copy(); + // the original partition info was the request info, but we need the actual storage partition. + modifiedSubscription.setUserData(Constants.RESOURCE_PARTITION_ID, theRequestPartitionId); + when(myMockSubscriptionDao.read(eq(subscription.getIdElement()), any())).thenReturn(modifiedSubscription); + } + @Nested public class TestDeleteMessages { private final SubscriptionMatchingSubscriber subscriber = new SubscriptionMatchingSubscriber(); @@ -460,7 +471,7 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri eq(Pointcut.SUBSCRIPTION_BEFORE_PERSISTED_RESOURCE_CHECKED), any(HookParams.class))).thenReturn(true); when(message.getPayloadId(null)).thenReturn(new IdDt("Patient", 123L)); when(myNonDeleteCanonicalSubscription.getSendDeleteMessages()).thenReturn(false); - when(mySubscriptionRegistry.getAll()).thenReturn(List.of(myNonDeleteSubscription ,myActiveSubscription)); + when(mySubscriptionRegistry.getAll()).thenReturn(List.of(myNonDeleteSubscription, myActiveSubscription)); when(myActiveSubscription.getSubscription()).thenReturn(myCanonicalSubscription); when(myActiveSubscription.getCriteria()).thenReturn(mySubscriptionCriteria); when(myActiveSubscription.getId()).thenReturn("Patient/123"); @@ -496,12 +507,4 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri verify(message, atLeastOnce()).getPayloadId(null); } } - - - private void mockSubscriptionRead(RequestPartitionId theRequestPartitionId, Subscription subscription) { - Subscription modifiedSubscription = subscription.copy(); - // the original partition info was the request info, but we need the actual storage partition. - modifiedSubscription.setUserData(Constants.RESOURCE_PARTITION_ID, theRequestPartitionId); - when(myMockSubscriptionDao.read(eq(subscription.getIdElement()), any())).thenReturn(modifiedSubscription); - } } diff --git a/hapi-fhir-jpaserver-test-dstu2/pom.xml b/hapi-fhir-jpaserver-test-dstu2/pom.xml index bb6c4785982..1451c2ddf25 100644 --- a/hapi-fhir-jpaserver-test-dstu2/pom.xml +++ b/hapi-fhir-jpaserver-test-dstu2/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2ValidateTest.java b/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2ValidateTest.java index 3d64fcad5d1..305b3a8af9d 100644 --- a/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2ValidateTest.java +++ b/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2ValidateTest.java @@ -34,8 +34,10 @@ import static org.junit.jupiter.api.Assertions.fail; public class FhirResourceDaoDstu2ValidateTest extends BaseJpaDstu2Test { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu2ValidateTest.class); + @Override @BeforeEach - public void before() { + public void before() throws Exception { + super.before(); myStorageSettings.setAllowExternalReferences(true); } diff --git a/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/provider/ResourceProviderDstu2Test.java b/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/provider/ResourceProviderDstu2Test.java index af9704df95b..0a73a7ae1a8 100644 --- a/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/provider/ResourceProviderDstu2Test.java +++ b/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/provider/ResourceProviderDstu2Test.java @@ -2792,7 +2792,7 @@ public class ResourceProviderDstu2Test extends BaseResourceProviderDstu2Test { * Upload structurredef */ - String contents = IOUtils.toString(getClass().getResourceAsStream("/allergyintolerance-sd-david.json"), "UTF-8"); + String contents = ClasspathUtil.loadResource("/allergyintolerance-sd-david.json"); HttpEntityEnclosingRequestBase post = new HttpPut(myServerBase + "/StructureDefinition/ohAllergyIntolerance"); post.setEntity(new StringEntity(contents, ContentType.create(Constants.CT_FHIR_JSON, "UTF-8"))); CloseableHttpResponse response = ourHttpClient.execute(post); @@ -2809,7 +2809,7 @@ public class ResourceProviderDstu2Test extends BaseResourceProviderDstu2Test { * Validate */ - contents = IOUtils.toString(getClass().getResourceAsStream("/allergyintolerance-david.json"), "UTF-8"); + contents = ClasspathUtil.loadResource("/allergyintolerance-david.json"); post = new HttpPost(myServerBase + "/AllergyIntolerance/$validate?_pretty=true"); post.setEntity(new StringEntity(contents, ContentType.create(Constants.CT_FHIR_JSON, "UTF-8"))); diff --git a/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImplTest.java b/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImplTest.java index ab602016790..416dc6cf5b1 100644 --- a/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImplTest.java +++ b/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImplTest.java @@ -80,11 +80,12 @@ import static org.mockito.Mockito.when; @SuppressWarnings({"unchecked"}) @ExtendWith(MockitoExtension.class) -public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{ +public class SearchCoordinatorSvcImplTest extends BaseSearchSvc { private static final Logger ourLog = LoggerFactory.getLogger(SearchCoordinatorSvcImplTest.class); - - @Mock private SearchStrategyFactory mySearchStrategyFactory; - + @Spy + protected FhirContext myContext = FhirContext.forDstu2Cached(); + @Mock + private SearchStrategyFactory mySearchStrategyFactory; @Mock private ISearchCacheSvc mySearchCacheSvc; @Mock @@ -100,9 +101,6 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{ private IRequestPartitionHelperSvc myPartitionHelperSvc; @Mock private ISynchronousSearchSvc mySynchronousSearchSvc; - @Spy - protected FhirContext myContext = FhirContext.forDstu2Cached(); - @Spy private ExceptionService myExceptionSvc = new ExceptionService(myContext); @@ -118,6 +116,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{ @BeforeEach public void before() { HapiSystemProperties.enableUnitTestCaptureStack(); + HapiSystemProperties.enableUnitTestMode(); myCurrentSearch = null; @@ -178,7 +177,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{ assertEquals(allResults.size(), oldResults.size()); allResults.addAll(newResults); return null; - }).when(mySearchResultCacheSvc).storeResults(any(), anyList(), anyList()); + }).when(mySearchResultCacheSvc).storeResults(any(), anyList(), anyList(), any(), any()); SearchParameterMap params = new SearchParameterMap(); params.add("name", new StringParam("ANAME")); @@ -233,7 +232,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{ }); try { - mySvc.getResources("1234-5678", 0, 100, null); + mySvc.getResources("1234-5678", 0, 100, null, null); fail(); } catch (ResourceGoneException e) { assertEquals("Search ID \"1234-5678\" does not exist and may have expired", e.getMessage()); @@ -255,7 +254,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{ }); try { - mySvc.getResources("1234-5678", 0, 100, null); + mySvc.getResources("1234-5678", 0, 100, null, null); fail(); } catch (InternalErrorException e) { assertThat(e.getMessage(), containsString("Request timed out")); @@ -295,15 +294,16 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{ } private void initAsyncSearches() { - when(myPersistedJpaBundleProviderFactory.newInstanceFirstPage(nullable(RequestDetails.class), nullable(Search.class), nullable(SearchTask.class), nullable(ISearchBuilder.class))).thenAnswer(t->{ + when(myPersistedJpaBundleProviderFactory.newInstanceFirstPage(nullable(RequestDetails.class), nullable(Search.class), nullable(SearchTask.class), nullable(ISearchBuilder.class), nullable(RequestPartitionId.class))).thenAnswer(t -> { RequestDetails requestDetails = t.getArgument(0, RequestDetails.class); Search search = t.getArgument(1, Search.class); SearchTask searchTask = t.getArgument(2, SearchTask.class); ISearchBuilder searchBuilder = t.getArgument(3, ISearchBuilder.class); - PersistedJpaSearchFirstPageBundleProvider retVal = new PersistedJpaSearchFirstPageBundleProvider(search, searchTask, searchBuilder, requestDetails); + PersistedJpaSearchFirstPageBundleProvider retVal = new PersistedJpaSearchFirstPageBundleProvider(search, searchTask, searchBuilder, requestDetails, null); retVal.setStorageSettingsForUnitTest(new JpaStorageSettings()); retVal.setTxServiceForUnitTest(myTransactionService); retVal.setSearchCoordinatorSvcForUnitTest(mySvc); + retVal.setRequestPartitionHelperSvcForUnitTest(myPartitionHelperSvc); retVal.setContext(myContext); return retVal; }); @@ -333,7 +333,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{ try { assertNotNull(searchId); ourLog.info("About to pull the first resource"); - List resources = mySvc.getResources(searchId, 0, 1, null); + List resources = mySvc.getResources(searchId, 0, 1, null, null); ourLog.info("Done pulling the first resource"); assertEquals(1, resources.size()); } finally { @@ -342,14 +342,14 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{ }; new Thread(taskStarter).start(); - await().until(()->iter.getCountReturned() >= 3); + await().until(() -> iter.getCountReturned() >= 3); ourLog.info("About to cancel all searches"); mySvc.cancelAllActiveSearches(); ourLog.info("Done cancelling all searches"); try { - mySvc.getResources(searchId, 0, 1, null); + mySvc.getResources(searchId, 0, 1, null, null); } catch (ResourceGoneException e) { // good } @@ -373,9 +373,9 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{ List pids = createPidSequence(800); IResultIterator iter = new SlowIterator(pids.iterator(), 2); when(mySearchBuilder.createQuery(same(params), any(), any(), nullable(RequestPartitionId.class))).thenReturn(iter); - when(mySearchCacheSvc.save(any())).thenAnswer(t ->{ + when(mySearchCacheSvc.save(any())).thenAnswer(t -> { ourLog.info("Saving search"); - return t.getArgument( 0, Search.class); + return t.getArgument(0, Search.class); }); doAnswer(loadPids()).when(mySearchBuilder).loadResourcesByPid(any(Collection.class), any(Collection.class), any(List.class), anyBoolean(), any()); @@ -448,6 +448,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{ search.setSearchType(SearchTypeEnum.SEARCH); search.setResourceType("Patient"); search.setStatus(SearchStatusEnum.LOADING); + search.setSearchParameterMap(new SearchParameterMap()); when(mySearchCacheSvc.fetchByUuid(eq(uuid))).thenReturn(Optional.of(search)); doAnswer(loadPids()).when(mySearchBuilder).loadResourcesByPid(any(Collection.class), any(Collection.class), any(List.class), anyBoolean(), any()); @@ -462,7 +463,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{ // ignore } - when(mySearchResultCacheSvc.fetchResultPids(any(Search.class), anyInt(), anyInt())).thenAnswer(theInvocation -> { + when(mySearchResultCacheSvc.fetchResultPids(any(Search.class), anyInt(), anyInt(), any(), any())).thenAnswer(theInvocation -> { ArrayList results = new ArrayList<>(); for (long i = theInvocation.getArgument(1, Integer.class); i < theInvocation.getArgument(2, Integer.class); i++) { Long nextPid = i + 10L; @@ -492,6 +493,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{ provider.setSearchBuilderFactoryForUnitTest(mySearchBuilderFactory); provider.setSearchCoordinatorSvcForUnitTest(mySvc); provider.setStorageSettingsForUnitTest(new JpaStorageSettings()); + provider.setRequestPartitionId(RequestPartitionId.defaultPartition()); resources = provider.getResources(20, 40); assertEquals(20, resources.size()); assertEquals("30", resources.get(0).getIdElement().getValueAsString()); @@ -511,6 +513,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{ provider.setDaoRegistryForUnitTest(myDaoRegistry); provider.setSearchCoordinatorSvcForUnitTest(mySvc); provider.setStorageSettingsForUnitTest(new JpaStorageSettings()); + provider.setRequestPartitionId(RequestPartitionId.defaultPartition()); return provider; } @@ -566,12 +569,12 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{ search.setTotalCount(100); when(mySearchCacheSvc.fetchByUuid(eq("0000-1111"))).thenReturn(Optional.of(search)); - when(mySearchResultCacheSvc.fetchResultPids(any(), anyInt(), anyInt())).thenReturn(null); + when(mySearchResultCacheSvc.fetchResultPids(any(), anyInt(), anyInt(), any(), any())).thenReturn(null); try { - mySvc.getResources("0000-1111", 0, 10, null); + mySvc.getResources("0000-1111", 0, 10, null, null); fail(); - } catch (ResourceGoneException e) { + } catch (ResourceGoneException e) { assertEquals("Search ID \"0000-1111\" does not exist and may have expired", e.getMessage()); } @@ -593,27 +596,70 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{ search.setSearchParameterMap(new SearchParameterMap()); when(mySearchCacheSvc.fetchByUuid(eq("0000-1111"))).thenReturn(Optional.of(search)); - when(mySearchCacheSvc.tryToMarkSearchAsInProgress(any())).thenAnswer(t->{ + when(mySearchCacheSvc.tryToMarkSearchAsInProgress(any())).thenAnswer(t -> { search.setStatus(SearchStatusEnum.LOADING); return Optional.of(search); }); mockSearchTask(); - when(mySearchResultCacheSvc.fetchAllResultPids(any())).thenReturn(null); + when(mySearchResultCacheSvc.fetchAllResultPids(any(), any(), any())).thenReturn(null); try { - mySvc.getResources("0000-1111", 0, 10, null); + mySvc.getResources("0000-1111", 0, 10, null, null); fail(); - } catch (ResourceGoneException e) { + } catch (ResourceGoneException e) { assertEquals("Search ID \"0000-1111\" does not exist and may have expired", e.getMessage()); } } + private void mockSearchTask() { + IPagingProvider pagingProvider = mock(IPagingProvider.class); + lenient().when(pagingProvider.getMaximumPageSize()) + .thenReturn(500); + when(myBeanFactory.getBean(anyString(), any(SearchTaskParameters.class))) + .thenAnswer(invocation -> { + String type = invocation.getArgument(0); + switch (type) { + case SearchConfig.SEARCH_TASK -> { + return new SearchTask( + invocation.getArgument(1), + myTransactionService, + ourCtx, + myInterceptorBroadcaster, + mySearchBuilderFactory, + mySearchResultCacheSvc, + myStorageSettings, + mySearchCacheSvc, + pagingProvider + ); + } + case SearchConfig.CONTINUE_TASK -> { + return new SearchContinuationTask( + invocation.getArgument(1), + myTransactionService, + ourCtx, + myInterceptorBroadcaster, + mySearchBuilderFactory, + mySearchResultCacheSvc, + myStorageSettings, + mySearchCacheSvc, + pagingProvider, + myExceptionSvc + ); + } + default -> { + fail("Invalid bean type: " + type); + return null; + } + } + }); + } + public static class FailAfterNIterator extends BaseIterator implements IResultIterator { - private int myCount; private final IResultIterator myWrap; + private int myCount; FailAfterNIterator(IResultIterator theWrap, int theCount) { myWrap = theWrap; @@ -738,45 +784,4 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{ // nothing } } - - - private void mockSearchTask() { - IPagingProvider pagingProvider = mock(IPagingProvider.class); - lenient().when(pagingProvider.getMaximumPageSize()) - .thenReturn(500); - when(myBeanFactory.getBean(anyString(), any(SearchTaskParameters.class))) - .thenAnswer(invocation -> { - String type = invocation.getArgument(0); - switch (type) { - case SearchConfig.SEARCH_TASK: - return new SearchTask( - invocation.getArgument(1), - myTransactionService, - ourCtx, - myInterceptorBroadcaster, - mySearchBuilderFactory, - mySearchResultCacheSvc, - myStorageSettings, - mySearchCacheSvc, - pagingProvider - ); - case SearchConfig.CONTINUE_TASK: - return new SearchContinuationTask( - invocation.getArgument(1), - myTransactionService, - ourCtx, - myInterceptorBroadcaster, - mySearchBuilderFactory, - mySearchResultCacheSvc, - myStorageSettings, - mySearchCacheSvc, - pagingProvider, - myExceptionSvc - ); - default: - fail("Invalid bean type: " + type); - return null; - } - }); - } } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/logback-test.xml b/hapi-fhir-jpaserver-test-dstu2/src/test/resources/logback-test.xml similarity index 100% rename from hapi-fhir-jpaserver-test-utilities/src/main/resources/logback-test.xml rename to hapi-fhir-jpaserver-test-dstu2/src/test/resources/logback-test.xml diff --git a/hapi-fhir-jpaserver-test-dstu3/pom.xml b/hapi-fhir-jpaserver-test-dstu3/pom.xml index 62099e0b239..f2e29bf9cd3 100644 --- a/hapi-fhir-jpaserver-test-dstu3/pom.xml +++ b/hapi-fhir-jpaserver-test-dstu3/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java index 71f568ec1d2..09e81fa0351 100644 --- a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java +++ b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java @@ -2553,17 +2553,16 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test { p.addName().setFamily("testMetaAddInvalid"); IIdType id = myClient.create().resource(p).execute().getId().toUnqualifiedVersionless(); - //@formatter:off - String input = "\n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - ""; - //@formatter:on + String input = """ + + + + + + + + + """; HttpPost post = new HttpPost(myServerBase + "/Patient/" + id.getIdPart() + "/$meta-add"); post.setEntity(new StringEntity(input, ContentType.create(Constants.CT_FHIR_XML, "UTF-8"))); diff --git a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetVersionedTest.java b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetVersionedTest.java index d5e38616652..888cd3b2497 100644 --- a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetVersionedTest.java +++ b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetVersionedTest.java @@ -103,7 +103,7 @@ public class ResourceProviderDstu3ValueSetVersionedTest extends BaseResourceProv myExtensionalCsId_v1 = myCodeSystemDao.create(theCodeSystem, mySrd).getId().toUnqualifiedVersionless(); } }); - myCodeSystemDao.readEntity(myExtensionalCsId_v1, null).getId(); + myCodeSystemDao.readEntity(myExtensionalCsId_v1, null); theCodeSystem.setId("CodeSystem/cs2"); theCodeSystem.setVersion("2"); @@ -116,7 +116,7 @@ public class ResourceProviderDstu3ValueSetVersionedTest extends BaseResourceProv myExtensionalCsId_v2 = myCodeSystemDao.create(theCodeSystem, mySrd).getId().toUnqualifiedVersionless(); } }); - myCodeSystemDao.readEntity(myExtensionalCsId_v2, null).getId(); + myCodeSystemDao.readEntity(myExtensionalCsId_v2, null); } @@ -126,13 +126,13 @@ public class ResourceProviderDstu3ValueSetVersionedTest extends BaseResourceProv valueSet.setId("ValueSet/vs1"); valueSet.getCompose().getInclude().get(0).setVersion("1"); myExtensionalVsId_v1 = persistSingleValueSet(valueSet, HttpVerb.POST); - myExtensionalVsIdOnResourceTable_v1 = myValueSetDao.readEntity(myExtensionalVsId_v1, null).getId(); + myExtensionalVsIdOnResourceTable_v1 = myValueSetDao.readEntity(myExtensionalVsId_v1, null).getIdDt().getIdPartAsLong(); valueSet.setVersion("2"); valueSet.setId("ValueSet/vs2"); valueSet.getCompose().getInclude().get(0).setVersion("2"); myExtensionalVsId_v2 = persistSingleValueSet(valueSet, HttpVerb.POST); - myExtensionalVsIdOnResourceTable_v2 = myValueSetDao.readEntity(myExtensionalVsId_v2, null).getId(); + myExtensionalVsIdOnResourceTable_v2 = myValueSetDao.readEntity(myExtensionalVsId_v2, null).getIdDt().getIdPartAsLong(); } diff --git a/hapi-fhir-jpaserver-test-dstu3/src/test/resources/logback-test.xml b/hapi-fhir-jpaserver-test-dstu3/src/test/resources/logback-test.xml new file mode 100644 index 00000000000..dcada7fec76 --- /dev/null +++ b/hapi-fhir-jpaserver-test-dstu3/src/test/resources/logback-test.xml @@ -0,0 +1,46 @@ + + + + %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} [%file:%line] %msg%n + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/hapi-fhir-jpaserver-test-r4/pom.xml b/hapi-fhir-jpaserver-test-r4/pom.xml index cb74e3b903a..fac4daeeba7 100644 --- a/hapi-fhir-jpaserver-test-r4/pom.xml +++ b/hapi-fhir-jpaserver-test-r4/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/Batch2CoordinatorIT.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/Batch2CoordinatorIT.java index 1e7d48945d6..8b40f551501 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/Batch2CoordinatorIT.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/Batch2CoordinatorIT.java @@ -88,10 +88,14 @@ public class Batch2CoordinatorIT extends BaseJpaR4Test { return RunOutcome.SUCCESS; } + @Override @BeforeEach - public void before() { + public void before() throws Exception { + super.before(); + myCompletionHandler = details -> {}; myWorkChannel = (LinkedBlockingChannel) myChannelFactory.getOrCreateReceiver(CHANNEL_NAME, JobWorkNotificationJsonMessage.class, new ChannelConsumerSettings()); + myStorageSettings.setJobFastTrackingEnabled(true); } @AfterEach diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java index 8dda2c2020c..580981059a9 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java @@ -17,9 +17,9 @@ import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.client.apache.ResourceEntity; import ca.uhn.fhir.rest.server.HardcodedServerAddressStrategy; -import ca.uhn.fhir.rest.server.RestfulServer; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; -import ca.uhn.fhir.test.utilities.JettyUtil; +import ca.uhn.fhir.test.utilities.HttpClientExtension; +import ca.uhn.fhir.test.utilities.server.RestfulServerExtension; import ca.uhn.fhir.util.JsonUtil; import ca.uhn.fhir.util.UrlUtil; import com.google.common.base.Charsets; @@ -28,20 +28,14 @@ import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClientBuilder; -import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; -import org.eclipse.jetty.server.Server; -import org.eclipse.jetty.servlet.ServletHandler; -import org.eclipse.jetty.servlet.ServletHolder; import org.hl7.fhir.r4.model.IdType; import org.hl7.fhir.r4.model.InstantType; import org.hl7.fhir.r4.model.Parameters; import org.hl7.fhir.r4.model.StringType; -import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.api.extension.RegisterExtension; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; @@ -54,12 +48,12 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Set; -import java.util.concurrent.TimeUnit; import java.util.stream.Stream; import static org.hamcrest.MatcherAssert.assertThat; @@ -89,53 +83,19 @@ public class BulkDataExportProviderTest { private static final Logger ourLog = LoggerFactory.getLogger(BulkDataExportProviderTest.class); private static final String GROUP_ID = "Group/G2401"; private static final String G_JOB_ID = "0000000-GGGGGG"; - private Server myServer; @Spy private final FhirContext myCtx = FhirContext.forR4Cached(); - private int myPort; - + @RegisterExtension + private final HttpClientExtension myClient = new HttpClientExtension(); @Mock private IBatch2JobRunner myJobRunner; - - private JpaStorageSettings myStorageSettings; - private DaoRegistry myDaoRegistry; - private CloseableHttpClient myClient; - @InjectMocks private BulkDataExportProvider myProvider; - private RestfulServer servlet; - - static Stream paramsProvider(){ - return Stream.of( - Arguments.arguments(true), - Arguments.arguments(false) - ); - } - - @AfterEach - public void after() throws Exception { - JettyUtil.closeServer(myServer); - myClient.close(); - } - - @BeforeEach - public void start() throws Exception { - myServer = new Server(0); - - ServletHandler proxyHandler = new ServletHandler(); - servlet = new RestfulServer(myCtx); - servlet.registerProvider(myProvider); - ServletHolder servletHolder = new ServletHolder(servlet); - proxyHandler.addServletWithMapping(servletHolder, "/*"); - myServer.setHandler(proxyHandler); - JettyUtil.startServer(myServer); - myPort = JettyUtil.getPortForStartedServer(myServer); - - PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS); - HttpClientBuilder builder = HttpClientBuilder.create(); - builder.setConnectionManager(connectionManager); - myClient = builder.build(); - } + @RegisterExtension + private final RestfulServerExtension myServer = new RestfulServerExtension(myCtx) + .withServer(s -> s.registerProvider(myProvider)); + private JpaStorageSettings myStorageSettings; + private DaoRegistry myDaoRegistry; @BeforeEach public void injectStorageSettings() { @@ -147,9 +107,9 @@ public class BulkDataExportProviderTest { } public void startWithFixedBaseUrl() { - String baseUrl = "http://localhost:" + myPort + "/fixedvalue"; + String baseUrl = myServer.getBaseUrl() + "/fixedvalue"; HardcodedServerAddressStrategy hardcodedServerAddressStrategy = new HardcodedServerAddressStrategy(baseUrl); - servlet.setServerAddressStrategy(hardcodedServerAddressStrategy); + myServer.withServer(s -> s.setServerAddressStrategy(hardcodedServerAddressStrategy)); } private BulkExportParameters verifyJobStart() { @@ -196,7 +156,7 @@ public class BulkDataExportProviderTest { ourLog.debug(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input)); // test - HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT); + HttpPost post = new HttpPost(myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT); post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); post.setEntity(new ResourceEntity(myCtx, input)); ourLog.info("Request: {}", post); @@ -205,7 +165,7 @@ public class BulkDataExportProviderTest { assertEquals(202, response.getStatusLine().getStatusCode()); assertEquals("Accepted", response.getStatusLine().getReasonPhrase()); - assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); + assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); } BulkExportParameters params = verifyJobStart(); @@ -223,7 +183,7 @@ public class BulkDataExportProviderTest { .thenReturn(createJobStartResponse()); Parameters input = new Parameters(); - HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT); + HttpPost post = new HttpPost(myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT); post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); post.setEntity(new ResourceEntity(myCtx, input)); @@ -243,7 +203,7 @@ public class BulkDataExportProviderTest { InstantType now = InstantType.now(); - String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT + String url = myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT + "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON) + "&" + JpaConstants.PARAM_EXPORT_TYPE + "=" + UrlUtil.escapeUrlParam("Patient, Practitioner") + "&" + JpaConstants.PARAM_EXPORT_SINCE + "=" + UrlUtil.escapeUrlParam(now.getValueAsString()) @@ -257,7 +217,7 @@ public class BulkDataExportProviderTest { assertEquals(202, response.getStatusLine().getStatusCode()); assertEquals("Accepted", response.getStatusLine().getReasonPhrase()); - assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); + assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); } BulkExportParameters params = verifyJobStart(); @@ -272,7 +232,7 @@ public class BulkDataExportProviderTest { when(myJobRunner.startNewJob(any())) .thenReturn(createJobStartResponse()); - String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT + String url = myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT + "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON) + "&" + JpaConstants.PARAM_EXPORT_TYPE + "=" + UrlUtil.escapeUrlParam("Patient,EpisodeOfCare") + "&" + JpaConstants.PARAM_EXPORT_TYPE_FILTER + "=" + UrlUtil.escapeUrlParam("Patient?_id=P999999990") @@ -286,7 +246,7 @@ public class BulkDataExportProviderTest { assertEquals(202, response.getStatusLine().getStatusCode()); assertEquals("Accepted", response.getStatusLine().getReasonPhrase()); - assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); + assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); } BulkExportParameters params = verifyJobStart(); @@ -309,7 +269,7 @@ public class BulkDataExportProviderTest { .thenReturn(info); // test - String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + + String url = myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID; HttpGet get = new HttpGet(url); get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); @@ -338,7 +298,7 @@ public class BulkDataExportProviderTest { .thenReturn(info); // call - String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + + String url = myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID; HttpGet get = new HttpGet(url); get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); @@ -382,7 +342,7 @@ public class BulkDataExportProviderTest { .thenReturn(info); // call - String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + + String url = myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID; HttpGet get = new HttpGet(url); get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); @@ -398,11 +358,11 @@ public class BulkDataExportProviderTest { BulkExportResponseJson responseJson = JsonUtil.deserialize(responseContent, BulkExportResponseJson.class); assertEquals(3, responseJson.getOutput().size()); assertEquals("Patient", responseJson.getOutput().get(0).getType()); - assertEquals("http://localhost:" + myPort + "/Binary/111", responseJson.getOutput().get(0).getUrl()); + assertEquals(myServer.getBaseUrl() + "/Binary/111", responseJson.getOutput().get(0).getUrl()); assertEquals("Patient", responseJson.getOutput().get(1).getType()); - assertEquals("http://localhost:" + myPort + "/Binary/222", responseJson.getOutput().get(1).getUrl()); + assertEquals(myServer.getBaseUrl() + "/Binary/222", responseJson.getOutput().get(1).getUrl()); assertEquals("Patient", responseJson.getOutput().get(2).getType()); - assertEquals("http://localhost:" + myPort + "/Binary/333", responseJson.getOutput().get(2).getUrl()); + assertEquals(myServer.getBaseUrl() + "/Binary/333", responseJson.getOutput().get(2).getUrl()); } } @@ -427,7 +387,7 @@ public class BulkDataExportProviderTest { .thenReturn(info); // test - String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + + String url = myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID; HttpGet get = new HttpGet(url); get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); @@ -453,7 +413,7 @@ public class BulkDataExportProviderTest { when(myJobRunner.getJobInfo(anyString())) .thenThrow(new ResourceNotFoundException("Unknown job: AAA")); - String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + + String url = myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID; HttpGet get = new HttpGet(url); get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); @@ -464,13 +424,13 @@ public class BulkDataExportProviderTest { assertEquals(404, response.getStatusLine().getStatusCode()); assertEquals(Constants.CT_FHIR_JSON_NEW, response.getEntity().getContentType().getValue().replaceAll(";.*", "").trim()); - assertThat(responseContent, containsString("\"diagnostics\":\"Unknown job: AAA\"")); + assertThat(responseContent, containsString("\"diagnostics\": \"Unknown job: AAA\"")); } } /** * Group export tests - * See https://build.fhir.org/ig/HL7/us-bulk-data/ + * See Bulk Data IG *

    * GET [fhir base]/Group/[id]/$export *

    @@ -496,7 +456,7 @@ public class BulkDataExportProviderTest { ourLog.debug(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input)); // call - HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + GROUP_ID + "/" + JpaConstants.OPERATION_EXPORT); + HttpPost post = new HttpPost(myServer.getBaseUrl() + "/" + GROUP_ID + "/" + JpaConstants.OPERATION_EXPORT); post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); post.setEntity(new ResourceEntity(myCtx, input)); ourLog.info("Request: {}", post); @@ -504,7 +464,7 @@ public class BulkDataExportProviderTest { ourLog.info("Response: {}", response.toString()); assertEquals(202, response.getStatusLine().getStatusCode()); assertEquals("Accepted", response.getStatusLine().getReasonPhrase()); - assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + G_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); + assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + G_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); } // verify @@ -525,7 +485,7 @@ public class BulkDataExportProviderTest { InstantType now = InstantType.now(); - String url = "http://localhost:" + myPort + "/" + GROUP_ID + "/" + JpaConstants.OPERATION_EXPORT + String url = myServer.getBaseUrl() + "/" + GROUP_ID + "/" + JpaConstants.OPERATION_EXPORT + "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON) + "&" + JpaConstants.PARAM_EXPORT_TYPE + "=" + UrlUtil.escapeUrlParam("Patient, Practitioner") + "&" + JpaConstants.PARAM_EXPORT_SINCE + "=" + UrlUtil.escapeUrlParam(now.getValueAsString()) @@ -541,7 +501,7 @@ public class BulkDataExportProviderTest { assertEquals(202, response.getStatusLine().getStatusCode()); assertEquals("Accepted", response.getStatusLine().getReasonPhrase()); - assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + G_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); + assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + G_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); } BulkExportParameters bp = verifyJobStart(); @@ -559,7 +519,7 @@ public class BulkDataExportProviderTest { InstantType now = InstantType.now(); // manual construct - String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT + String url = myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT + "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON) + "&" + JpaConstants.PARAM_EXPORT_TYPE + "=" + UrlUtil.escapeUrlParam("Immunization, Observation") + "&" + JpaConstants.PARAM_EXPORT_SINCE + "=" + UrlUtil.escapeUrlParam(now.getValueAsString()); @@ -567,44 +527,44 @@ public class BulkDataExportProviderTest { String immunizationTypeFilter1 = "Immunization?patient.identifier=SC378274-MRN|009999997,SC378274-MRN|009999998,SC378274-MRN|009999999&date=2020-01-02"; String immunizationTypeFilter2 = "Immunization?patient=Patient/123"; String observationFilter1 = "Observation?subject=Patient/123&created=ge2020-01-01"; - StringBuilder multiValuedTypeFilterBuilder = new StringBuilder() - .append("&") - .append(JpaConstants.PARAM_EXPORT_TYPE_FILTER) - .append("=") - .append(UrlUtil.escapeUrlParam(immunizationTypeFilter1)) - .append(",") - .append(UrlUtil.escapeUrlParam(immunizationTypeFilter2)) - .append(",") - .append(UrlUtil.escapeUrlParam(observationFilter1)); + String multiValuedTypeFilterBuilder = "&" + + JpaConstants.PARAM_EXPORT_TYPE_FILTER + + "=" + + UrlUtil.escapeUrlParam(immunizationTypeFilter1) + + "," + + UrlUtil.escapeUrlParam(immunizationTypeFilter2) + + "," + + UrlUtil.escapeUrlParam(observationFilter1); - url += multiValuedTypeFilterBuilder.toString(); + url += multiValuedTypeFilterBuilder; // call HttpGet get = new HttpGet(url); get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); - myClient.execute(get); - - // verify - BulkExportParameters bp = verifyJobStart(); - assertThat(bp.getFilters(), containsInAnyOrder(immunizationTypeFilter1, immunizationTypeFilter2, observationFilter1)); + try (CloseableHttpResponse ignored = myClient.execute(get)) { + // verify + BulkExportParameters bp = verifyJobStart(); + assertThat(bp.getFilters(), containsInAnyOrder(immunizationTypeFilter1, immunizationTypeFilter2, observationFilter1)); + } } @Test public void testInitiateGroupExportWithInvalidResourceTypesFails() throws IOException { // when - String url = "http://localhost:" + myPort + "/" + "Group/123/" + JpaConstants.OPERATION_EXPORT + String url = myServer.getBaseUrl() + "/" + "Group/123/" + JpaConstants.OPERATION_EXPORT + "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON) + "&" + JpaConstants.PARAM_EXPORT_TYPE + "=" + UrlUtil.escapeUrlParam("StructureDefinition,Observation"); HttpGet get = new HttpGet(url); get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); - CloseableHttpResponse execute = myClient.execute(get); - String responseBody = IOUtils.toString(execute.getEntity().getContent()); + try (CloseableHttpResponse execute = myClient.execute(get)) { + String responseBody = IOUtils.toString(execute.getEntity().getContent(), StandardCharsets.UTF_8); - // verify - assertThat(execute.getStatusLine().getStatusCode(), is(equalTo(400))); - assertThat(responseBody, is(containsString("Resource types [StructureDefinition] are invalid for this type of export, as they do not contain search parameters that refer to patients."))); + // verify + assertThat(execute.getStatusLine().getStatusCode(), is(equalTo(400))); + assertThat(responseBody, is(containsString("Resource types [StructureDefinition] are invalid for this type of export, as they do not contain search parameters that refer to patients."))); + } } @Test @@ -614,22 +574,23 @@ public class BulkDataExportProviderTest { .thenReturn(createJobStartResponse()); // test - String url = "http://localhost:" + myPort + "/" + "Group/123/" + JpaConstants.OPERATION_EXPORT + String url = myServer.getBaseUrl() + "/" + "Group/123/" + JpaConstants.OPERATION_EXPORT + "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON); HttpGet get = new HttpGet(url); get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); - CloseableHttpResponse execute = myClient.execute(get); + try (CloseableHttpResponse execute = myClient.execute(get)) { - // verify - assertThat(execute.getStatusLine().getStatusCode(), is(equalTo(202))); - final BulkExportParameters bulkExportParameters = verifyJobStart(); + // verify + assertThat(execute.getStatusLine().getStatusCode(), is(equalTo(202))); + final BulkExportParameters bulkExportParameters = verifyJobStart(); - assertAll( - () -> assertTrue(bulkExportParameters.getResourceTypes().contains("Patient")), - () -> assertTrue(bulkExportParameters.getResourceTypes().contains("Group")), - () -> assertTrue(bulkExportParameters.getResourceTypes().contains("Device")) - ); + assertAll( + () -> assertTrue(bulkExportParameters.getResourceTypes().contains("Patient")), + () -> assertTrue(bulkExportParameters.getResourceTypes().contains("Group")), + () -> assertTrue(bulkExportParameters.getResourceTypes().contains("Device")) + ); + } } @Test @@ -645,7 +606,7 @@ public class BulkDataExportProviderTest { ourLog.debug(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input)); // call - HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT); + HttpPost post = new HttpPost(myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT); post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); post.setEntity(new ResourceEntity(myCtx, input)); ourLog.info("Request: {}", post); @@ -654,7 +615,7 @@ public class BulkDataExportProviderTest { assertEquals(202, response.getStatusLine().getStatusCode()); assertEquals("Accepted", response.getStatusLine().getReasonPhrase()); - assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); + assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); } // verify @@ -674,7 +635,7 @@ public class BulkDataExportProviderTest { input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON)); // call - HttpPost post = new HttpPost("http://localhost:" + myPort + "/Patient/" + JpaConstants.OPERATION_EXPORT); + HttpPost post = new HttpPost(myServer.getBaseUrl() + "/Patient/" + JpaConstants.OPERATION_EXPORT); post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); post.setEntity(new ResourceEntity(myCtx, input)); ourLog.info("Request: {}", post); @@ -682,7 +643,7 @@ public class BulkDataExportProviderTest { ourLog.info("Response: {}", response.toString()); assertEquals(202, response.getStatusLine().getStatusCode()); assertEquals("Accepted", response.getStatusLine().getReasonPhrase()); - assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); + assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); } BulkExportParameters bp = verifyJobStart(); @@ -707,7 +668,7 @@ public class BulkDataExportProviderTest { ourLog.debug(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input)); // call - HttpPost post = new HttpPost("http://localhost:" + myPort + "/Patient/" + JpaConstants.OPERATION_EXPORT); + HttpPost post = new HttpPost(myServer.getBaseUrl() + "/Patient/" + JpaConstants.OPERATION_EXPORT); post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); post.setEntity(new ResourceEntity(myCtx, input)); ourLog.info("Request: {}", post); @@ -715,7 +676,7 @@ public class BulkDataExportProviderTest { ourLog.info("Response: {}", response.toString()); assertEquals(202, response.getStatusLine().getStatusCode()); assertEquals("Accepted", response.getStatusLine().getReasonPhrase()); - assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); + assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); } BulkExportParameters bp = verifyJobStart(); @@ -740,7 +701,7 @@ public class BulkDataExportProviderTest { input.addParameter(JpaConstants.PARAM_EXPORT_TYPE, new StringType("Patient, Practitioner")); // call - HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT); + HttpPost post = new HttpPost(myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT); post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); post.addHeader(Constants.HEADER_CACHE_CONTROL, Constants.CACHE_CONTROL_NO_CACHE); post.setEntity(new ResourceEntity(myCtx, input)); @@ -749,7 +710,7 @@ public class BulkDataExportProviderTest { ourLog.info("Response: {}", response.toString()); assertEquals(202, response.getStatusLine().getStatusCode()); assertEquals("Accepted", response.getStatusLine().getReasonPhrase()); - assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); + assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); } // verify @@ -757,7 +718,6 @@ public class BulkDataExportProviderTest { assertThat(parameters.isUseExistingJobsFirst(), is(equalTo(false))); } - @Test public void testProvider_whenEnableBatchJobReuseIsFalse_startsNewJob() throws IOException { // setup @@ -775,7 +735,7 @@ public class BulkDataExportProviderTest { input.addParameter(JpaConstants.PARAM_EXPORT_TYPE, new StringType("Patient, Practitioner")); // call - HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT); + HttpPost post = new HttpPost(myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT); post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); post.setEntity(new ResourceEntity(myCtx, input)); ourLog.info("Request: {}", post); @@ -783,7 +743,7 @@ public class BulkDataExportProviderTest { ourLog.info("Response: {}", response.toString()); assertEquals(202, response.getStatusLine().getStatusCode()); assertEquals("Accepted", response.getStatusLine().getReasonPhrase()); - assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); + assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); } // verify @@ -791,7 +751,6 @@ public class BulkDataExportProviderTest { assertThat(parameters.isUseExistingJobsFirst(), is(equalTo(false))); } - @Test public void testProviderReturnsSameIdForSameJob() throws IOException { // given @@ -831,7 +790,7 @@ public class BulkDataExportProviderTest { .thenReturn(result); // call - String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + + String url = myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID; HttpDelete delete = new HttpDelete(url); try (CloseableHttpResponse response = myClient.execute(delete)) { @@ -860,7 +819,7 @@ public class BulkDataExportProviderTest { .thenReturn(info); // call - String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + + String url = myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID; HttpDelete delete = new HttpDelete(url); try (CloseableHttpResponse response = myClient.execute(delete)) { @@ -884,7 +843,7 @@ public class BulkDataExportProviderTest { .thenReturn(createJobStartResponse()); // call - final HttpGet httpGet = new HttpGet(String.format("http://localhost:%s/%s", myPort, JpaConstants.OPERATION_EXPORT)); + final HttpGet httpGet = new HttpGet(String.format("http://localhost:%s/%s", myServer.getPort(), JpaConstants.OPERATION_EXPORT)); httpGet.addHeader("_outputFormat", Constants.CT_FHIR_NDJSON); httpGet.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); @@ -892,7 +851,7 @@ public class BulkDataExportProviderTest { ourLog.info("Response: {}", response.toString()); assertEquals(202, response.getStatusLine().getStatusCode()); assertEquals("Accepted", response.getStatusLine().getReasonPhrase()); - assertEquals(String.format("http://localhost:%s/$export-poll-status?_jobId=%s", myPort, A_JOB_ID), response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); + assertEquals(String.format("http://localhost:%s/$export-poll-status?_jobId=%s", myServer.getPort(), A_JOB_ID), response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); assertTrue(IOUtils.toString(response.getEntity().getContent(), Charsets.UTF_8).isEmpty()); } @@ -907,14 +866,14 @@ public class BulkDataExportProviderTest { .thenReturn(createJobStartResponse()); // call - final HttpGet httpGet = new HttpGet(String.format("http://localhost:%s/%s?_outputFormat=%s", myPort, JpaConstants.OPERATION_EXPORT, Constants.CT_FHIR_NDJSON)); + final HttpGet httpGet = new HttpGet(String.format("http://localhost:%s/%s?_outputFormat=%s", myServer.getPort(), JpaConstants.OPERATION_EXPORT, Constants.CT_FHIR_NDJSON)); httpGet.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); try (CloseableHttpResponse response = myClient.execute(httpGet)) { assertAll( () -> assertEquals(202, response.getStatusLine().getStatusCode()), () -> assertEquals("Accepted", response.getStatusLine().getReasonPhrase()), - () -> assertEquals(String.format("http://localhost:%s/$export-poll-status?_jobId=%s", myPort, A_JOB_ID), response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()), + () -> assertEquals(String.format("http://localhost:%s/$export-poll-status?_jobId=%s", myServer.getPort(), A_JOB_ID), response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()), () -> assertTrue(IOUtils.toString(response.getEntity().getContent(), Charsets.UTF_8).isEmpty()) ); } @@ -933,7 +892,7 @@ public class BulkDataExportProviderTest { input.addParameter(JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID, new StringType(jobId)); // Initiate Export Poll Status - HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS); + HttpPost post = new HttpPost(myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS); post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); post.setEntity(new ResourceEntity(myCtx, input)); @@ -962,7 +921,7 @@ public class BulkDataExportProviderTest { input.addParameter(JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID, new StringType(A_JOB_ID)); // Initiate Export Poll Status - HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS); + HttpPost post = new HttpPost(myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS); post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); post.setEntity(new ResourceEntity(myCtx, input)); @@ -980,7 +939,7 @@ public class BulkDataExportProviderTest { input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(ca.uhn.fhir.rest.api.Constants.CT_FHIR_NDJSON)); // Initiate Export Poll Status - HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS); + HttpPost post = new HttpPost(myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS); post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); post.setEntity(new ResourceEntity(myCtx, input)); @@ -992,7 +951,7 @@ public class BulkDataExportProviderTest { private void callExportAndAssertJobId(Parameters input, String theExpectedJobId) throws IOException { HttpPost post; - post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT); + post = new HttpPost(myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT); post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); post.addHeader(Constants.HEADER_CACHE_CONTROL, Constants.CACHE_CONTROL_NO_CACHE); post.setEntity(new ResourceEntity(myCtx, input)); @@ -1001,9 +960,16 @@ public class BulkDataExportProviderTest { ourLog.info("Response: {}", response.toString()); assertEquals(202, response.getStatusLine().getStatusCode()); assertEquals("Accepted", response.getStatusLine().getReasonPhrase()); - assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + theExpectedJobId, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); + assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + theExpectedJobId, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); } } + static Stream paramsProvider() { + return Stream.of( + Arguments.arguments(true), + Arguments.arguments(false) + ); + } + } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/BasePartitioningR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/BasePartitioningR4Test.java index 3d68e7f458d..b54610ea2ad 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/BasePartitioningR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/BasePartitioningR4Test.java @@ -212,8 +212,10 @@ public abstract class BasePartitioningR4Test extends BaseJpaR4SystemTest { throw new Exception(); } catch (Exception e) { stack = StackTraceHelper.getStackAsString(e); - int lastWantedNewLine = StringUtils.ordinalIndexOf(stack, "\n", 15); - stack = stack.substring(0, lastWantedNewLine); + int lastWantedNewLine = StringUtils.ordinalIndexOf(stack, "\n", 25); + if (lastWantedNewLine != -1) { + stack = stack.substring(0, lastWantedNewLine); + } } RequestPartitionId retVal = myReadRequestPartitionIds.remove(0); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ExternalReferenceTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ExternalReferenceTest.java index e37ccc68aa3..1393eb1a0de 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ExternalReferenceTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ExternalReferenceTest.java @@ -36,7 +36,7 @@ public class FhirResourceDaoR4ExternalReferenceTest extends BaseJpaR4Test { myStorageSettings.setAllowExternalReferences(new JpaStorageSettings().isAllowExternalReferences()); myStorageSettings.setTreatBaseUrlsAsLocal(null); } - + @Test public void testInternalReferenceBlockedByDefault() { Patient p = new Patient(); @@ -55,7 +55,7 @@ public class FhirResourceDaoR4ExternalReferenceTest extends BaseJpaR4Test { org.setId("FOO"); org.setName("Org Name"); myOrganizationDao.update(org, mySrd); - + Patient p = new Patient(); p.getManagingOrganization().setReference("http://example.com/base/Organization/FOO"); try { @@ -78,7 +78,7 @@ public class FhirResourceDaoR4ExternalReferenceTest extends BaseJpaR4Test { Patient p = new Patient(); p.getManagingOrganization().setReference("http://example.com/base/Organization/FOO"); IIdType pid = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless(); - + SearchParameterMap map = new SearchParameterMap(); map.add(Patient.SP_ORGANIZATION, new ReferenceParam("http://example.com/base/Organization/FOO")); assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(map)), contains(pid.getValue())); @@ -103,12 +103,12 @@ public class FhirResourceDaoR4ExternalReferenceTest extends BaseJpaR4Test { Patient p = new Patient(); p.getManagingOrganization().setReference("http://example.com/base/Organization/FOO"); IIdType pid = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless(); - + p = myPatientDao.read(pid, mySrd); assertEquals("Organization/FOO", p.getManagingOrganization().getReference()); - + SearchParameterMap map; - + map = new SearchParameterMap(); map.add(Patient.SP_ORGANIZATION, new ReferenceParam("http://example.com/base/Organization/FOO")); assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(map)), contains(pid.getValue())); @@ -117,11 +117,11 @@ public class FhirResourceDaoR4ExternalReferenceTest extends BaseJpaR4Test { @Test public void testSearchForInvalidLocalReference() { SearchParameterMap map; - + map = new SearchParameterMap(); map.add(Patient.SP_ORGANIZATION, new ReferenceParam("Organization/FOO")); assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(map)), empty()); - + map = new SearchParameterMap(); map.add(Patient.SP_ORGANIZATION, new ReferenceParam("Organization/9999999999")); assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(map)), empty()); @@ -142,10 +142,10 @@ public class FhirResourceDaoR4ExternalReferenceTest extends BaseJpaR4Test { Patient p = new Patient(); p.getManagingOrganization().setReference("http://example.com/base/Organization/FOO"); IIdType pid = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless(); - + p = myPatientDao.read(pid, mySrd); assertEquals("Organization/FOO", p.getManagingOrganization().getReference()); - + SearchParameterMap map; // Different base diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchNoFtTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchNoFtTest.java index 5ebe63379f4..6cc69a71556 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchNoFtTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchNoFtTest.java @@ -2531,16 +2531,27 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test { } + @Test + public void testFetchMultiplePages() { + create9Patients(); + + myStorageSettings.setSearchPreFetchThresholds(Lists.newArrayList(3, 6, 10)); + + IBundleProvider search = myPatientDao.search(new SearchParameterMap(), mySrd); + List resources = search.getResources(0, 3); + assertEquals(3, resources.size()); + + IBundleProvider search2 = myPagingProvider.retrieveResultList(mySrd, search.getUuid()); + resources = search2.getResources(3, 99); + assertEquals(6, resources.size()); + } + /** * See #1174 */ @Test public void testSearchDateInSavedSearch() { - for (int i = 1; i <= 9; i++) { - Patient p1 = new Patient(); - p1.getBirthDateElement().setValueAsString("1980-01-0" + i); - myPatientDao.create(p1).getId().toUnqualifiedVersionless().getValue(); - } + create9Patients(); myStorageSettings.setSearchPreFetchThresholds(Lists.newArrayList(3, 6, 10)); @@ -2578,6 +2589,14 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test { } } + private void create9Patients() { + for (int i = 1; i <= 9; i++) { + Patient p1 = new Patient(); + p1.getBirthDateElement().setValueAsString("1980-01-0" + i); + myPatientDao.create(p1).getId().toUnqualifiedVersionless().getValue(); + } + } + /** * #222 */ diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyTest.java index ac107678214..048e242d700 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyTest.java @@ -71,8 +71,10 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test { TermReindexingSvcImpl.setForceSaveDeferredAlwaysForUnitTest(false); } + @Override @BeforeEach - public void before() { + public void before() throws Exception { + super.before(); myStorageSettings.setMaximumExpansionSize(5000); myCachingValidationSupport.invalidateCaches(); } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValidateTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValidateTest.java index 2056bc76f0d..6b10d43f2a1 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValidateTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValidateTest.java @@ -1663,6 +1663,7 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test { String rawResource = myFhirContext.newJsonParser().encodeResourceToString(params); myPatientDao.validate(pat, null, rawResource, EncodingEnum.JSON, ValidationModeEnum.UPDATE, null, mySrd); } + @Test public void testValidateRawResourceForUpdateWithNoId() { String methodName = "testValidateForUpdate"; diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java index 7a3d8cc05e1..0787c9c250b 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java @@ -563,7 +563,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { } @Test - public void testReindexing() throws InterruptedException { + public void testReindexing() { Patient p = new Patient(); p.addName().setFamily("family"); final IIdType id = myPatientDao.create(p, mySrd).getId().toUnqualified(); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java index f4b32ac4d51..922ce524224 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java @@ -769,6 +769,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { // Update that resource addReadPartition(myPartitionId); + addCreatePartition(myPartitionId); p = new Patient(); p.setActive(true); p.addIdentifier().setValue("12345"); @@ -2751,7 +2752,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { ourLog.info("About to start transaction"); - for (int i = 0; i < 28; i++) { + for (int i = 0; i < 40; i++) { addCreatePartition(1, null); } @@ -2863,13 +2864,12 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { // Fetch history resource searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, true); ourLog.info("SQL:{}", searchSql); - assertEquals(0, countMatches(searchSql, "PARTITION_ID"), searchSql); + assertEquals(1, countMatches(searchSql, "PARTITION_ID in"), searchSql); // Fetch history resource searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(2).getSql(true, true); ourLog.info("SQL:{}", searchSql); - assertEquals(0, countMatches(searchSql, "PARTITION_ID="), searchSql.replace(" ", "").toUpperCase()); - assertEquals(0, countMatches(searchSql, "PARTITION_IDIN"), searchSql.replace(" ", "").toUpperCase()); + assertEquals(1, countMatches(searchSql, "PARTITION_ID in"), searchSql.replace(" ", "").toUpperCase()); } @Test diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PartitioningInterceptorR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PartitioningInterceptorR4Test.java index a7ba89e62f7..e6abe1c9b89 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PartitioningInterceptorR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PartitioningInterceptorR4Test.java @@ -39,7 +39,6 @@ import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.mock.web.MockHttpServletRequest; -import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import java.time.LocalDate; import java.util.ArrayList; @@ -79,8 +78,11 @@ public class PartitioningInterceptorR4Test extends BaseJpaR4SystemTest { myInterceptorRegistry.unregisterAllInterceptors(); } + @Override @BeforeEach - public void before() throws ServletException { + public void before() throws Exception { + super.before(); + myPartitionSettings.setPartitioningEnabled(true); myPartitionInterceptor = new MyWriteInterceptor(); @@ -152,7 +154,7 @@ public class PartitioningInterceptorR4Test extends BaseJpaR4SystemTest { sd.setUrl("http://foo"); myStructureDefinitionDao.create(sd, new ServletRequestDetails()); - runInTransaction(()->{ + runInTransaction(() -> { List resources = myResourceTableDao.findAll(); LocalDate expectedDate = LocalDate.of(2021, 2, 22); assertEquals(1, resources.size()); @@ -169,7 +171,7 @@ public class PartitioningInterceptorR4Test extends BaseJpaR4SystemTest { sd.setUrl("http://foo"); myStructureDefinitionDao.create(sd, new ServletRequestDetails()); - runInTransaction(()->{ + runInTransaction(() -> { List resources = myResourceTableDao.findAll(); assertEquals(1, resources.size()); assertEquals(null, resources.get(0).getPartitionId()); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptorTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptorTest.java index 2d4370ee64e..bbfcd43f45a 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptorTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptorTest.java @@ -61,7 +61,7 @@ public class PatientIdPartitionInterceptorTest extends BaseJpaR4SystemTest { @BeforeEach public void before() throws Exception { super.before(); - mySvc = new PatientIdPartitionInterceptor(myFhirContext, mySearchParamExtractor); + mySvc = new PatientIdPartitionInterceptor(myFhirContext, mySearchParamExtractor, myPartitionSettings); myForceOffsetSearchModeInterceptor = new ForceOffsetSearchModeInterceptor(); myInterceptorRegistry.registerInterceptor(mySvc); @@ -342,8 +342,8 @@ public class PatientIdPartitionInterceptorTest extends BaseJpaR4SystemTest { myCaptureQueriesListener.clear(); IBundleProvider outcome = myOrganizationDao.history(new IdType("Organization/C"), null, null, null, mySrd); - assertEquals(2, outcome.size()); myCaptureQueriesListener.logSelectQueries(); + assertEquals(2, outcome.size()); assertEquals(3, myCaptureQueriesListener.getSelectQueries().size()); assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(false, false), containsString("PARTITION_ID in ")); assertThat(myCaptureQueriesListener.getSelectQueries().get(1).getSql(false, false), containsString("PARTITION_ID=")); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/packages/JpaPackageCacheTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/packages/JpaPackageCacheTest.java index 5effc5f8b66..583fb6ea597 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/packages/JpaPackageCacheTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/packages/JpaPackageCacheTest.java @@ -79,7 +79,7 @@ public class JpaPackageCacheTest extends BaseJpaR4Test { public void testSaveAndDeletePackagePartitionsEnabled() throws IOException { myPartitionSettings.setPartitioningEnabled(true); myPartitionSettings.setDefaultPartitionId(1); - PatientIdPartitionInterceptor patientIdPartitionInterceptor = new PatientIdPartitionInterceptor(myFhirContext, mySearchParamExtractor); + PatientIdPartitionInterceptor patientIdPartitionInterceptor = new PatientIdPartitionInterceptor(myFhirContext, mySearchParamExtractor, myPartitionSettings); myInterceptorService.registerInterceptor(patientIdPartitionInterceptor); myInterceptorService.registerInterceptor(myRequestTenantPartitionInterceptor); try { @@ -118,7 +118,7 @@ public class JpaPackageCacheTest extends BaseJpaR4Test { myPartitionSettings.setPartitioningEnabled(true); myPartitionSettings.setDefaultPartitionId(0); myPartitionSettings.setUnnamedPartitionMode(true); - PatientIdPartitionInterceptor patientIdPartitionInterceptor = new PatientIdPartitionInterceptor(myFhirContext, mySearchParamExtractor); + PatientIdPartitionInterceptor patientIdPartitionInterceptor = new PatientIdPartitionInterceptor(myFhirContext, mySearchParamExtractor, myPartitionSettings); myInterceptorService.registerInterceptor(patientIdPartitionInterceptor); myInterceptorService.registerInterceptor(myRequestTenantPartitionInterceptor); try { diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/packages/NpmR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/packages/NpmR4Test.java index 6277ee7a989..e8f1768d0c0 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/packages/NpmR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/packages/NpmR4Test.java @@ -14,10 +14,10 @@ import ca.uhn.fhir.jpa.model.entity.NpmPackageEntity; import ca.uhn.fhir.jpa.model.entity.NpmPackageVersionEntity; import ca.uhn.fhir.jpa.model.entity.NpmPackageVersionResourceEntity; import ca.uhn.fhir.jpa.model.util.JpaConstants; -import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.test.BaseJpaR4Test; import ca.uhn.fhir.rest.api.server.IBundleProvider; +import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import ca.uhn.fhir.rest.param.ReferenceParam; import ca.uhn.fhir.rest.param.TokenParam; import ca.uhn.fhir.rest.param.UriParam; @@ -80,6 +80,9 @@ public class NpmR4Test extends BaseJpaR4Test { private static final Logger ourLog = LoggerFactory.getLogger(NpmR4Test.class); @Autowired + @Qualifier("myImplementationGuideDaoR4") + protected IFhirResourceDao myImplementationGuideDao; + @Autowired private IHapiPackageCacheManager myPackageCacheManager; @Autowired private NpmJpaValidationSupport myNpmJpaValidationSupport; @@ -95,12 +98,12 @@ public class NpmR4Test extends BaseJpaR4Test { private IInterceptorService myInterceptorService; @Autowired private RequestTenantPartitionInterceptor myRequestTenantPartitionInterceptor; - @Autowired - @Qualifier("myImplementationGuideDaoR4") - protected IFhirResourceDao myImplementationGuideDao; + @Override @BeforeEach public void before() throws Exception { + super.before(); + JpaPackageCache jpaPackageCache = ProxyUtil.getSingletonTarget(myPackageCacheManager, JpaPackageCache.class); myServer = new Server(0); @@ -144,7 +147,7 @@ public class NpmR4Test extends BaseJpaR4Test { .setFetchDependencies(true); myPackageInstallerSvc.install(spec); - runInTransaction(()->{ + runInTransaction(() -> { SearchParameterMap map = SearchParameterMap.newSynchronous(SearchParameter.SP_BASE, new TokenParam("NamingSystem")); IBundleProvider outcome = mySearchParameterDao.search(map); List resources = outcome.getResources(0, outcome.sizeOrThrowNpe()); @@ -345,24 +348,24 @@ public class NpmR4Test extends BaseJpaR4Test { public void testNumericIdsInstalledWithNpmPrefix() throws Exception { myStorageSettings.setAllowExternalReferences(true); - // Load a copy of hl7.fhir.uv.shorthand-0.12.0, but with id set to 1 instead of "shorthand-code-system" + // Load a copy of hl7.fhir.uv.shorthand-0.12.0, but with id set to 1 instead of "shorthand-code-system" byte[] bytes = ClasspathUtil.loadResourceAsByteArray("/packages/hl7.fhir.uv.shorthand-0.13.0.tgz"); - myFakeNpmServlet.responses.put("/hl7.fhir.uv.shorthand/0.13.0", bytes); + myFakeNpmServlet.responses.put("/hl7.fhir.uv.shorthand/0.13.0", bytes); - PackageInstallationSpec spec = new PackageInstallationSpec().setName("hl7.fhir.uv.shorthand").setVersion("0.13.0").setInstallMode(PackageInstallationSpec.InstallModeEnum.STORE_AND_INSTALL); - PackageInstallOutcomeJson outcome = myPackageInstallerSvc.install(spec); - // Be sure no further communication with the server - JettyUtil.closeServer(myServer); + PackageInstallationSpec spec = new PackageInstallationSpec().setName("hl7.fhir.uv.shorthand").setVersion("0.13.0").setInstallMode(PackageInstallationSpec.InstallModeEnum.STORE_AND_INSTALL); + PackageInstallOutcomeJson outcome = myPackageInstallerSvc.install(spec); + // Be sure no further communication with the server + JettyUtil.closeServer(myServer); - // Search for the installed resource - runInTransaction(() -> { - SearchParameterMap map = SearchParameterMap.newSynchronous(); - map.add(StructureDefinition.SP_URL, new UriParam("http://hl7.org/fhir/uv/shorthand/CodeSystem/shorthand-code-system")); - IBundleProvider result = myCodeSystemDao.search(map); - assertEquals(1, result.sizeOrThrowNpe()); - IBaseResource resource = result.getResources(0, 1).get(0); - assertEquals("CodeSystem/npm-1/_history/1", resource.getIdElement().toString()); - }); + // Search for the installed resource + runInTransaction(() -> { + SearchParameterMap map = SearchParameterMap.newSynchronous(); + map.add(StructureDefinition.SP_URL, new UriParam("http://hl7.org/fhir/uv/shorthand/CodeSystem/shorthand-code-system")); + IBundleProvider result = myCodeSystemDao.search(map); + assertEquals(1, result.sizeOrThrowNpe()); + IBaseResource resource = result.getResources(0, 1).get(0); + assertEquals("CodeSystem/npm-1/_history/1", resource.getIdElement().toString()); + }); } @@ -786,7 +789,7 @@ public class NpmR4Test extends BaseJpaR4Test { spSearch = myPractitionerRoleDao.search(map, new SystemRequestDetails()); assertEquals(1, spSearch.sizeOrThrowNpe()); - + // Install newer version spec = new PackageInstallationSpec().setName("test-exchange.fhir.us.com").setVersion("2.1.2").setInstallMode(PackageInstallationSpec.InstallModeEnum.STORE_AND_INSTALL); myPackageInstallerSvc.install(spec); @@ -799,7 +802,7 @@ public class NpmR4Test extends BaseJpaR4Test { assertEquals("2.2", sp.getVersion()); } - + @Test public void testLoadContents() throws IOException { @@ -889,16 +892,16 @@ public class NpmR4Test extends BaseJpaR4Test { map.add(StructureDefinition.SP_URL, new UriParam("https://www.medizininformatik-initiative.de/fhir/core/modul-labor/StructureDefinition/LogicalModel/Laborbefund")); IBundleProvider result = myStructureDefinitionDao.search(map); assertEquals(1, result.sizeOrThrowNpe()); - List resources = result.getResources(0,1); - assertFalse(((StructureDefinition)resources.get(0)).hasSnapshot()); + List resources = result.getResources(0, 1); + assertFalse(((StructureDefinition) resources.get(0)).hasSnapshot()); // Confirm that DiagnosticLab (a resource StructureDefinition with differential but no snapshot) was created with a generated snapshot. map = SearchParameterMap.newSynchronous(); map.add(StructureDefinition.SP_URL, new UriParam("https://www.medizininformatik-initiative.de/fhir/core/modul-labor/StructureDefinition/DiagnosticReportLab")); result = myStructureDefinitionDao.search(map); assertEquals(1, result.sizeOrThrowNpe()); - resources = result.getResources(0,1); - assertTrue(((StructureDefinition)resources.get(0)).hasSnapshot()); + resources = result.getResources(0, 1); + assertTrue(((StructureDefinition) resources.get(0)).hasSnapshot()); }); } @@ -963,5 +966,4 @@ public class NpmR4Test extends BaseJpaR4Test { } - } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/PatientEverythingR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/PatientEverythingR4Test.java index 6ce428f6d15..b30a1216b8d 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/PatientEverythingR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/PatientEverythingR4Test.java @@ -6,6 +6,7 @@ import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test; import ca.uhn.fhir.parser.StrictErrorHandler; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.EncodingEnum; +import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import com.google.common.base.Charsets; import org.apache.commons.io.IOUtils; import org.apache.http.client.ClientProtocolException; @@ -107,7 +108,7 @@ public class PatientEverythingR4Test extends BaseResourceProviderR4Test { Task task = new Task(); task.setStatus(Task.TaskStatus.COMPLETED); task.getOwner().setReference(patId); - taskId = myClient.create().resource(task).execute().getId().toUnqualifiedVersionless().getValue(); + taskId = myClient.create().resource(task).execute().getId().toUnqualifiedVersionless().getValue(); Encounter wrongEnc1 = new Encounter(); wrongEnc1.setStatus(EncounterStatus.ARRIVED); @@ -169,18 +170,18 @@ public class PatientEverythingR4Test extends BaseResourceProviderR4Test { */ @Test public void testEverythingReturnsCorrectResources() throws Exception { - + Bundle bundle = fetchBundle(myServerBase + "/" + patId + "/$everything?_format=json&_count=100", EncodingEnum.JSON); - + assertNull(bundle.getLink("next")); - + Set actual = new TreeSet<>(); for (BundleEntryComponent nextEntry : bundle.getEntry()) { actual.add(nextEntry.getResource().getIdElement().toUnqualifiedVersionless().getValue()); } - + ourLog.info("Found IDs: {}", actual); - + assertThat(actual, hasItem(patId)); assertThat(actual, hasItem(encId1)); assertThat(actual, hasItem(encId2)); @@ -199,16 +200,16 @@ public class PatientEverythingR4Test extends BaseResourceProviderR4Test { myStorageSettings.setEverythingIncludesFetchPageSize(1); Bundle bundle = fetchBundle(myServerBase + "/" + patId + "/$everything?_format=json&_count=100", EncodingEnum.JSON); - + assertNull(bundle.getLink("next")); - + Set actual = new TreeSet(); for (BundleEntryComponent nextEntry : bundle.getEntry()) { actual.add(nextEntry.getResource().getIdElement().toUnqualifiedVersionless().getValue()); } - + ourLog.info("Found IDs: {}", actual); - + assertThat(actual, hasItem(patId)); assertThat(actual, hasItem(encId1)); assertThat(actual, hasItem(encId2)); @@ -217,19 +218,19 @@ public class PatientEverythingR4Test extends BaseResourceProviderR4Test { assertThat(actual, not(hasItem(myWrongPatId))); assertThat(actual, not(hasItem(myWrongEnc1))); } - + /** * See #674 */ @Test public void testEverythingPagesWithCorrectEncodingJson() throws Exception { - + Bundle bundle = fetchBundle(myServerBase + "/" + patId + "/$everything?_format=json&_count=1", EncodingEnum.JSON); - + assertNotNull(bundle.getLink("next").getUrl()); assertThat(bundle.getLink("next").getUrl(), containsString("_format=json")); bundle = fetchBundle(bundle.getLink("next").getUrl(), EncodingEnum.JSON); - + assertNotNull(bundle.getLink("next").getUrl()); assertThat(bundle.getLink("next").getUrl(), containsString("_format=json")); bundle = fetchBundle(bundle.getLink("next").getUrl(), EncodingEnum.JSON); @@ -240,9 +241,9 @@ public class PatientEverythingR4Test extends BaseResourceProviderR4Test { */ @Test public void testEverythingPagesWithCorrectEncodingXml() throws Exception { - + Bundle bundle = fetchBundle(myServerBase + "/" + patId + "/$everything?_format=xml&_count=1", EncodingEnum.XML); - + assertNotNull(bundle.getLink("next").getUrl()); ourLog.info("Next link: {}", bundle.getLink("next").getUrl()); assertThat(bundle.getLink("next").getUrl(), containsString("_format=xml")); @@ -253,12 +254,13 @@ public class PatientEverythingR4Test extends BaseResourceProviderR4Test { assertThat(bundle.getLink("next").getUrl(), containsString("_format=xml")); bundle = fetchBundle(bundle.getLink("next").getUrl(), EncodingEnum.XML); } + @Test //See https://github.com/hapifhir/hapi-fhir/issues/3215 public void testEverythingWithLargeResultSetDoesNotNpe() throws IOException { for (int i = 0; i < 500; i++) { Observation obs1 = new Observation(); - obs1.setSubject(new Reference( patId)); + obs1.setSubject(new Reference(patId)); obs1.getCode().addCoding().setCode("CODE1"); obs1.setValue(new StringType("obsvalue1")); myObservationDao.create(obs1, new SystemRequestDetails()).getId().toUnqualifiedVersionless(); @@ -283,7 +285,7 @@ public class PatientEverythingR4Test extends BaseResourceProviderR4Test { } finally { IOUtils.closeQuietly(resp); } - + return bundle; } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/PatientMemberMatchOperationR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/PatientMemberMatchOperationR4Test.java index 4b3bbdbf580..638e9a5f0db 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/PatientMemberMatchOperationR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/PatientMemberMatchOperationR4Test.java @@ -129,7 +129,7 @@ public class PatientMemberMatchOperationR4Test extends BaseResourceProviderR4Tes .setId("Patient/A123"); if (includeBeneficiaryIdentifier) { member.setIdentifier(Collections.singletonList(new Identifier() - .setSystem(EXISTING_COVERAGE_PATIENT_IDENT_SYSTEM).setValue(EXISTING_COVERAGE_PATIENT_IDENT_VALUE))); + .setSystem(EXISTING_COVERAGE_PATIENT_IDENT_SYSTEM).setValue(EXISTING_COVERAGE_PATIENT_IDENT_VALUE))); } member.setActive(true); myClient.update().resource(member).execute(); @@ -248,6 +248,158 @@ public class PatientMemberMatchOperationR4Test extends BaseResourceProviderR4Tes validateResponseConsent(parametersResponse, myConsent); } + @Test + public void testMemberMatchByCoverageIdentifier() throws Exception { + createCoverageWithBeneficiary(true, true); + + Parameters inputParameters = buildInputParameters(myPatient, oldCoverage, newCoverage, myConsent); + Parameters parametersResponse = performOperation(myServerBase + ourQuery, EncodingEnum.JSON, inputParameters); + + validateMemberPatient(parametersResponse); + validateNewCoverage(parametersResponse, newCoverage); + } + + /** + * Validates that second resource from the response is same as the received coverage + */ + private void validateNewCoverage(Parameters theResponse, Coverage theOriginalCoverage) { + List patientList = ParametersUtil.getNamedParameters(this.getFhirContext(), theResponse, PARAM_NEW_COVERAGE); + assertEquals(1, patientList.size()); + Coverage respCoverage = (Coverage) theResponse.getParameter().get(1).getResource(); + + assertEquals("Coverage/" + theOriginalCoverage.getId(), respCoverage.getId()); + assertEquals(theOriginalCoverage.getIdentifierFirstRep().getSystem(), respCoverage.getIdentifierFirstRep().getSystem()); + assertEquals(theOriginalCoverage.getIdentifierFirstRep().getValue(), respCoverage.getIdentifierFirstRep().getValue()); + } + + private void validateConsentPatientAndPerformerRef(Patient thePatient, Consent theConsent) { + String patientRef = thePatient.getIdElement().toUnqualifiedVersionless().getValue(); + assertEquals(patientRef, theConsent.getPatient().getReference()); + assertEquals(patientRef, theConsent.getPerformer().get(0).getReference()); + } + + private void validateMemberPatient(Parameters response) { +// parameter MemberPatient must have a new identifier with: +// { +// "use": "usual", +// "type": { +// "coding": [ +// { +// "system": "http://terminology.hl7.org/CodeSystem/v2-0203", +// "code": "UMB", +// "display": "Member Number", +// "userSelected": false +// } +// ], +// "text": "Member Number" +// }, +// "system": COVERAGE_PATIENT_IDENT_SYSTEM, +// "value": COVERAGE_PATIENT_IDENT_VALUE +// } + List patientList = ParametersUtil.getNamedParameters(this.getFhirContext(), response, PARAM_MEMBER_PATIENT); + assertEquals(1, patientList.size()); + Patient resultPatient = (Patient) response.getParameter().get(0).getResource(); + + assertNotNull(resultPatient.getIdentifier()); + assertEquals(1, resultPatient.getIdentifier().size()); + Identifier addedIdentifier = resultPatient.getIdentifier().get(0); + assertEquals(Identifier.IdentifierUse.USUAL, addedIdentifier.getUse()); + checkCoding(addedIdentifier.getType()); + assertEquals(EXISTING_COVERAGE_PATIENT_IDENT_SYSTEM, addedIdentifier.getSystem()); + assertEquals(EXISTING_COVERAGE_PATIENT_IDENT_VALUE, addedIdentifier.getValue()); + } + + @Test + public void testNoCoverageMatchFound() throws Exception { + Parameters inputParameters = buildInputParameters(myPatient, oldCoverage, newCoverage, myConsent); + performOperationExpecting422(myServerBase + ourQuery, EncodingEnum.JSON, inputParameters, + "Could not find coverage for member"); + } + + @Test + public void testConsentUpdatePatientAndPerformer() throws Exception { + createCoverageWithBeneficiary(true, true); + Parameters inputParameters = buildInputParameters(myPatient, oldCoverage, newCoverage, myConsent); + Parameters parametersResponse = performOperation(myServerBase + ourQuery, + EncodingEnum.JSON, inputParameters); + + Consent respConsent = validateResponseConsent(parametersResponse, myConsent); + validateConsentPatientAndPerformerRef(myPatient, respConsent); + } + + private Parameters buildInputParameters(Patient thePatient, Coverage theOldCoverage, Coverage theNewCoverage, Consent theConsent) { + Parameters p = new Parameters(); + ParametersUtil.addParameterToParameters(this.getFhirContext(), p, PARAM_MEMBER_PATIENT, thePatient); + ParametersUtil.addParameterToParameters(this.getFhirContext(), p, PARAM_OLD_COVERAGE, theOldCoverage); + ParametersUtil.addParameterToParameters(this.getFhirContext(), p, PARAM_NEW_COVERAGE, theNewCoverage); + ParametersUtil.addParameterToParameters(this.getFhirContext(), p, PARAM_CONSENT, theConsent); + return p; + } + + private Parameters performOperation(String theUrl, + EncodingEnum theEncoding, Parameters theInputParameters) throws Exception { + + HttpPost post = new HttpPost(theUrl); + post.addHeader(Constants.HEADER_ACCEPT_ENCODING, theEncoding.toString()); + post.setEntity(new ResourceEntity(this.getFhirContext(), theInputParameters)); + ourLog.info("Request: {}", post); + try (CloseableHttpResponse response = ourHttpClient.execute(post)) { + String responseString = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8); + ourLog.info("Response: {}", responseString); + assertEquals(200, response.getStatusLine().getStatusCode()); + + return theEncoding.newParser(myFhirContext).parseResource(Parameters.class, + responseString); + } + } + + private void performOperationExpecting422(String theUrl, EncodingEnum theEncoding, + Parameters theInputParameters, String theExpectedErrorMsg) throws Exception { + + HttpPost post = new HttpPost(theUrl); + post.addHeader(Constants.HEADER_ACCEPT_ENCODING, theEncoding.toString()); + post.setEntity(new ResourceEntity(this.getFhirContext(), theInputParameters)); + ourLog.info("Request: {}", post); + try (CloseableHttpResponse response = ourHttpClient.execute(post)) { + String responseString = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8); + ourLog.info("Response: {}", responseString); + assertEquals(422, response.getStatusLine().getStatusCode()); + assertThat(responseString, containsString(theExpectedErrorMsg)); + } + } + + private void checkCoding(CodeableConcept theType) { + // must match: + // "coding": [ + // { + // "system": "http://terminology.hl7.org/CodeSystem/v2-0203", + // "code": "MB", + // "display": "Member Number", + // "userSelected": false + // } + // * ] + Coding coding = theType.getCoding().get(0); + assertEquals("http://terminology.hl7.org/CodeSystem/v2-0203", coding.getSystem()); + assertEquals("MB", coding.getCode()); + assertEquals("Member Number", coding.getDisplay()); + assertFalse(coding.getUserSelected()); + } + + /** + * Validates that consent from the response is same as the received consent with additional identifier and extension + */ + private Consent validateResponseConsent(Parameters theResponse, Consent theOriginalConsent) { + List consentList = ParametersUtil.getNamedParameters(this.getFhirContext(), theResponse, PARAM_CONSENT); + assertEquals(1, consentList.size()); + Consent respConsent = (Consent) theResponse.getParameter().get(2).getResource(); + + assertEquals(theOriginalConsent.getScope().getCodingFirstRep().getSystem(), respConsent.getScope().getCodingFirstRep().getSystem()); + assertEquals(theOriginalConsent.getScope().getCodingFirstRep().getCode(), respConsent.getScope().getCodingFirstRep().getCode()); + assertEquals(myMemberMatcherR4Helper.CONSENT_IDENTIFIER_CODE_SYSTEM, respConsent.getIdentifier().get(0).getSystem()); + assertNotNull(respConsent.getIdentifier().get(0).getValue()); + return respConsent; + } + @Nested public class ValidateParameterErrors { private Patient ourPatient; @@ -318,7 +470,7 @@ public class PatientMemberMatchOperationR4Test extends BaseResourceProviderR4Tes @Test public void testMissingConsentPatientReference() throws Exception { ourPatient.setName(Lists.newArrayList(new HumanName() - .setUse(HumanName.NameUse.OFFICIAL).setFamily("Person"))) + .setUse(HumanName.NameUse.OFFICIAL).setFamily("Person"))) .setBirthDateElement(new DateType("2020-01-01")); Parameters inputParameters = buildInputParameters(ourPatient, ourOldCoverage, ourNewCoverage, ourConsent); @@ -339,164 +491,4 @@ public class PatientMemberMatchOperationR4Test extends BaseResourceProviderR4Tes } } - - @Test - public void testMemberMatchByCoverageIdentifier() throws Exception { - createCoverageWithBeneficiary(true, true); - - Parameters inputParameters = buildInputParameters(myPatient, oldCoverage, newCoverage, myConsent); - Parameters parametersResponse = performOperation(myServerBase + ourQuery, EncodingEnum.JSON, inputParameters); - - validateMemberPatient(parametersResponse); - validateNewCoverage(parametersResponse, newCoverage); - } - - - /** - * Validates that second resource from the response is same as the received coverage - */ - private void validateNewCoverage(Parameters theResponse, Coverage theOriginalCoverage) { - List patientList = ParametersUtil.getNamedParameters(this.getFhirContext(), theResponse, PARAM_NEW_COVERAGE); - assertEquals(1, patientList.size()); - Coverage respCoverage = (Coverage) theResponse.getParameter().get(1).getResource(); - - assertEquals("Coverage/" + theOriginalCoverage.getId(), respCoverage.getId()); - assertEquals(theOriginalCoverage.getIdentifierFirstRep().getSystem(), respCoverage.getIdentifierFirstRep().getSystem()); - assertEquals(theOriginalCoverage.getIdentifierFirstRep().getValue(), respCoverage.getIdentifierFirstRep().getValue()); - } - - private void validateConsentPatientAndPerformerRef(Patient thePatient, Consent theConsent) { - String patientRef = thePatient.getIdElement().toUnqualifiedVersionless().getValue(); - assertEquals(patientRef, theConsent.getPatient().getReference()); - assertEquals(patientRef, theConsent.getPerformer().get(0).getReference()); - } - - - private void validateMemberPatient(Parameters response) { -// parameter MemberPatient must have a new identifier with: -// { -// "use": "usual", -// "type": { -// "coding": [ -// { -// "system": "http://terminology.hl7.org/CodeSystem/v2-0203", -// "code": "UMB", -// "display": "Member Number", -// "userSelected": false -// } -// ], -// "text": "Member Number" -// }, -// "system": COVERAGE_PATIENT_IDENT_SYSTEM, -// "value": COVERAGE_PATIENT_IDENT_VALUE -// } - List patientList = ParametersUtil.getNamedParameters(this.getFhirContext(), response, PARAM_MEMBER_PATIENT); - assertEquals(1, patientList.size()); - Patient resultPatient = (Patient) response.getParameter().get(0).getResource(); - - assertNotNull(resultPatient.getIdentifier()); - assertEquals(1, resultPatient.getIdentifier().size()); - Identifier addedIdentifier = resultPatient.getIdentifier().get(0); - assertEquals(Identifier.IdentifierUse.USUAL, addedIdentifier.getUse()); - checkCoding(addedIdentifier.getType()); - assertEquals(EXISTING_COVERAGE_PATIENT_IDENT_SYSTEM, addedIdentifier.getSystem()); - assertEquals(EXISTING_COVERAGE_PATIENT_IDENT_VALUE, addedIdentifier.getValue()); - } - - - @Test - public void testNoCoverageMatchFound() throws Exception { - Parameters inputParameters = buildInputParameters(myPatient, oldCoverage, newCoverage, myConsent); - performOperationExpecting422(myServerBase + ourQuery, EncodingEnum.JSON, inputParameters, - "Could not find coverage for member"); - } - - @Test - public void testConsentUpdatePatientAndPerformer() throws Exception { - createCoverageWithBeneficiary(true, true); - Parameters inputParameters = buildInputParameters(myPatient, oldCoverage, newCoverage, myConsent); - Parameters parametersResponse = performOperation(myServerBase + ourQuery, - EncodingEnum.JSON, inputParameters); - - Consent respConsent = validateResponseConsent(parametersResponse, myConsent); - validateConsentPatientAndPerformerRef(myPatient, respConsent); - } - - - private Parameters buildInputParameters(Patient thePatient, Coverage theOldCoverage, Coverage theNewCoverage, Consent theConsent) { - Parameters p = new Parameters(); - ParametersUtil.addParameterToParameters(this.getFhirContext(), p, PARAM_MEMBER_PATIENT, thePatient); - ParametersUtil.addParameterToParameters(this.getFhirContext(), p, PARAM_OLD_COVERAGE, theOldCoverage); - ParametersUtil.addParameterToParameters(this.getFhirContext(), p, PARAM_NEW_COVERAGE, theNewCoverage); - ParametersUtil.addParameterToParameters(this.getFhirContext(), p, PARAM_CONSENT, theConsent); - return p; - } - - - private Parameters performOperation(String theUrl, - EncodingEnum theEncoding, Parameters theInputParameters) throws Exception { - - HttpPost post = new HttpPost(theUrl); - post.addHeader(Constants.HEADER_ACCEPT_ENCODING, theEncoding.toString()); - post.setEntity(new ResourceEntity(this.getFhirContext(), theInputParameters)); - ourLog.info("Request: {}", post); - try (CloseableHttpResponse response = ourHttpClient.execute(post)) { - String responseString = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8); - ourLog.info("Response: {}", responseString); - assertEquals(200, response.getStatusLine().getStatusCode()); - - return theEncoding.newParser(myFhirContext).parseResource(Parameters.class, - responseString); - } - } - - - private void performOperationExpecting422(String theUrl, EncodingEnum theEncoding, - Parameters theInputParameters, String theExpectedErrorMsg) throws Exception { - - HttpPost post = new HttpPost(theUrl); - post.addHeader(Constants.HEADER_ACCEPT_ENCODING, theEncoding.toString()); - post.setEntity(new ResourceEntity(this.getFhirContext(), theInputParameters)); - ourLog.info("Request: {}", post); - try (CloseableHttpResponse response = ourHttpClient.execute(post)) { - String responseString = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8); - ourLog.info("Response: {}", responseString); - assertEquals(422, response.getStatusLine().getStatusCode()); - assertThat(responseString, containsString(theExpectedErrorMsg)); - } - } - - - private void checkCoding(CodeableConcept theType) { - // must match: - // "coding": [ - // { - // "system": "http://terminology.hl7.org/CodeSystem/v2-0203", - // "code": "MB", - // "display": "Member Number", - // "userSelected": false - // } - // * ] - Coding coding = theType.getCoding().get(0); - assertEquals("http://terminology.hl7.org/CodeSystem/v2-0203", coding.getSystem()); - assertEquals("MB", coding.getCode()); - assertEquals("Member Number", coding.getDisplay()); - assertFalse(coding.getUserSelected()); - } - - /** - * Validates that consent from the response is same as the received consent with additional identifier and extension - */ - private Consent validateResponseConsent(Parameters theResponse, Consent theOriginalConsent) { - List consentList = ParametersUtil.getNamedParameters(this.getFhirContext(), theResponse, PARAM_CONSENT); - assertEquals(1, consentList.size()); - Consent respConsent= (Consent) theResponse.getParameter().get(2).getResource(); - - assertEquals(theOriginalConsent.getScope().getCodingFirstRep().getSystem(), respConsent.getScope().getCodingFirstRep().getSystem()); - assertEquals(theOriginalConsent.getScope().getCodingFirstRep().getCode(), respConsent.getScope().getCodingFirstRep().getCode()); - assertEquals(myMemberMatcherR4Helper.CONSENT_IDENTIFIER_CODE_SYSTEM, respConsent.getIdentifier().get(0).getSystem()); - assertNotNull(respConsent.getIdentifier().get(0).getValue()); - return respConsent; - } - } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/RemoteTerminologyServiceResourceProviderR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/RemoteTerminologyServiceResourceProviderR4Test.java index c31db7196e3..a02443b3f09 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/RemoteTerminologyServiceResourceProviderR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/RemoteTerminologyServiceResourceProviderR4Test.java @@ -8,6 +8,7 @@ import ca.uhn.fhir.rest.annotation.OperationParam; import ca.uhn.fhir.rest.annotation.RequiredParam; import ca.uhn.fhir.rest.annotation.Search; import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum; +import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor; import ca.uhn.fhir.rest.param.UriParam; import ca.uhn.fhir.rest.server.IResourceProvider; import ca.uhn.fhir.test.utilities.server.RestfulServerExtension; @@ -43,7 +44,7 @@ public class RemoteTerminologyServiceResourceProviderR4Test { private static final String CODE = "CODE"; private static final String VALUE_SET_URL = "http://value.set/url"; private static final String SAMPLE_MESSAGE = "This is a sample message"; - private static FhirContext ourCtx = FhirContext.forR4(); + private static final FhirContext ourCtx = FhirContext.forR4Cached(); private MyCodeSystemProvider myCodeSystemProvider = new MyCodeSystemProvider(); private MyValueSetProvider myValueSetProvider = new MyValueSetProvider(); @@ -57,6 +58,7 @@ public class RemoteTerminologyServiceResourceProviderR4Test { public void before_ConfigureService() { String myBaseUrl = "http://localhost:" + myRestfulServerExtension.getPort(); mySvc = new RemoteTerminologyServiceValidationSupport(ourCtx, myBaseUrl); + mySvc.addClientInterceptor(new LoggingInterceptor(false).setLogRequestSummary(true).setLogResponseSummary(true)); } @AfterEach diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderHasParamR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderHasParamR4Test.java index d77440aeef7..4dbbee087a6 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderHasParamR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderHasParamR4Test.java @@ -611,7 +611,7 @@ public class ResourceProviderHasParamR4Test extends BaseResourceProviderR4Test { searchAndReturnUnqualifiedVersionlessIdValues(uri); - List queries = myCaptureQueriesListener.getSelectQueries().stream().map(t -> t.getSql(true, false)).collect(Collectors.toList()); + List queries = myCaptureQueriesListener.getSelectQueries().stream().map(t -> t.getSql(true, false)).toList(); List notInListQueries = new ArrayList<>(); for (String query : queries) { diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderInterceptorR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderInterceptorR4Test.java index b7e8e117bbc..2e17f8d2780 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderInterceptorR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderInterceptorR4Test.java @@ -12,6 +12,7 @@ import ca.uhn.fhir.jpa.model.search.SearchStatusEnum; import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test; import ca.uhn.fhir.jpa.searchparam.submit.interceptor.SearchParamValidatingInterceptor; import ca.uhn.fhir.parser.IParser; +import ca.uhn.fhir.rest.api.CacheControlDirective; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.MethodOutcome; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; @@ -238,7 +239,7 @@ public class ResourceProviderInterceptorR4Test extends BaseResourceProviderR4Tes @Test public void testCreateReflexResourceTheHardWay() { - ServerOperationInterceptorAdapter interceptor = new ReflexInterceptor(); + ReflexInterceptor interceptor = new ReflexInterceptor(); myServer.getRestfulServer().registerInterceptor(interceptor); try { @@ -249,13 +250,16 @@ public class ResourceProviderInterceptorR4Test extends BaseResourceProviderR4Tes await() .atMost(60, TimeUnit.SECONDS) + .pollInterval(1, TimeUnit.SECONDS) .until(()->{ Bundle observations = myClient .search() .forResource("Observation") .where(Observation.SUBJECT.hasId(pid)) .returnBundle(Bundle.class) + .cacheControl(CacheControlDirective.noCache()) .execute(); + ourLog.info("Have {} observations", observations.getEntry().size()); return observations.getEntry().size(); }, equalTo(1)); @@ -558,22 +562,27 @@ public class ResourceProviderInterceptorR4Test extends BaseResourceProviderR4Tes } - public class ReflexInterceptor extends ServerOperationInterceptorAdapter { - @Override + @Interceptor + public class ReflexInterceptor { + + @Hook(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED) public void resourceCreated(RequestDetails theRequest, IBaseResource theResource) { + ourLog.info("resourceCreated with {}", theResource); if (theResource instanceof Patient) { ((ServletRequestDetails) theRequest).getServletRequest().setAttribute("CREATED_PATIENT", theResource); } } - @Override + @Hook(Pointcut.SERVER_OUTGOING_RESPONSE) public void processingCompletedNormally(ServletRequestDetails theRequestDetails) { Patient createdPatient = (Patient) theRequestDetails.getServletRequest().getAttribute("CREATED_PATIENT"); + ourLog.info("processingCompletedNormally with {}", createdPatient); if (createdPatient != null) { Observation observation = new Observation(); observation.setSubject(new Reference(createdPatient.getId())); - myClient.create().resource(observation).execute(); + IIdType id = myClient.create().resource(observation).execute().getId(); + ourLog.info("Created Observation with ID: {}", id); } } } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetHSearchDisabledTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetHSearchDisabledTest.java index 67dc5a72e85..a71da858d22 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetHSearchDisabledTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetHSearchDisabledTest.java @@ -5,28 +5,19 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet; import ca.uhn.fhir.jpa.provider.ValueSetOperationProvider; import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; -import ca.uhn.fhir.jpa.subscription.match.config.WebsocketDispatcherConfig; import ca.uhn.fhir.jpa.test.BaseJpaTest; import ca.uhn.fhir.jpa.test.config.TestHSearchAddInConfig; import ca.uhn.fhir.jpa.test.config.TestR4Config; -import ca.uhn.fhir.parser.StrictErrorHandler; import ca.uhn.fhir.rest.api.EncodingEnum; -import ca.uhn.fhir.rest.client.api.IGenericClient; -import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum; -import ca.uhn.fhir.rest.server.RestfulServer; -import ca.uhn.fhir.rest.server.interceptor.CorsInterceptor; import ca.uhn.fhir.rest.server.provider.ResourceProviderFactory; -import ca.uhn.fhir.test.utilities.JettyUtil; -import org.eclipse.jetty.server.Server; -import org.eclipse.jetty.servlet.ServletContextHandler; -import org.eclipse.jetty.servlet.ServletHolder; +import ca.uhn.fhir.test.utilities.server.RestfulServerExtension; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.CodeSystem; import org.hl7.fhir.r4.model.Parameters; import org.hl7.fhir.r4.model.ValueSet; -import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.api.extension.RegisterExtension; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.ApplicationContext; @@ -36,16 +27,9 @@ import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.support.TransactionCallbackWithoutResult; import org.springframework.transaction.support.TransactionTemplate; -import org.springframework.web.context.ContextLoader; -import org.springframework.web.context.WebApplicationContext; -import org.springframework.web.context.support.AnnotationConfigWebApplicationContext; -import org.springframework.web.context.support.GenericWebApplicationContext; -import org.springframework.web.cors.CorsConfiguration; -import org.springframework.web.servlet.DispatcherServlet; import javax.annotation.Nonnull; import java.io.IOException; -import java.util.Arrays; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; @@ -57,10 +41,6 @@ public class ResourceProviderR4ValueSetHSearchDisabledTest extends BaseJpaTest { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResourceProviderR4ValueSetHSearchDisabledTest.class); - private static RestfulServer ourRestServer; - private static String ourServerBase; - private static Server ourServer; - private static DatabaseBackedPagingProvider ourPagingProvider; @Autowired private FhirContext myFhirCtx; @@ -81,7 +61,13 @@ public class ResourceProviderR4ValueSetHSearchDisabledTest extends BaseJpaTest { private IIdType myExtensionalCsId; private IIdType myExtensionalVsId; - private IGenericClient myClient; + @SuppressWarnings("JUnitMalformedDeclaration") + @RegisterExtension + private RestfulServerExtension myServer = new RestfulServerExtension(FhirContext.forR4Cached()) + .withServer(t -> t.registerProviders(myResourceProviders.createProviders())) + .withServer(t -> t.registerProvider(myAppCtx.getBean(ValueSetOperationProvider.class))) + .withServer(t -> t.setDefaultResponseEncoding(EncodingEnum.XML)) + .withServer(t -> t.setPagingProvider(myAppCtx.getBean(DatabaseBackedPagingProvider.class))); private void loadAndPersistCodeSystemAndValueSet() throws IOException { loadAndPersistCodeSystem(); @@ -101,7 +87,7 @@ public class ResourceProviderR4ValueSetHSearchDisabledTest extends BaseJpaTest { myExtensionalCsId = myCodeSystemDao.create(theCodeSystem, mySrd).getId().toUnqualifiedVersionless(); } }); - myCodeSystemDao.readEntity(myExtensionalCsId, null).getId(); + myCodeSystemDao.readEntity(myExtensionalCsId, null); } private void loadAndPersistValueSet() throws IOException { @@ -117,7 +103,7 @@ public class ResourceProviderR4ValueSetHSearchDisabledTest extends BaseJpaTest { myExtensionalVsId = myValueSetDao.create(theValueSet, mySrd).getId().toUnqualifiedVersionless(); } }); - myValueSetDao.readEntity(myExtensionalVsId, null).getId(); + myValueSetDao.readEntity(myExtensionalVsId, null); } @Override @@ -130,12 +116,12 @@ public class ResourceProviderR4ValueSetHSearchDisabledTest extends BaseJpaTest { return myTxManager; } - @Test public void testExpandById() throws Exception { loadAndPersistCodeSystemAndValueSet(); - Parameters respParam = myClient + Parameters respParam = myServer + .getFhirClient() .operation() .onInstance(myExtensionalVsId) .named("expand") @@ -161,76 +147,5 @@ public class ResourceProviderR4ValueSetHSearchDisabledTest extends BaseJpaTest { } - @BeforeEach - public void before() throws Exception { - myFhirCtx.getRestfulClientFactory().setServerValidationMode(ServerValidationModeEnum.NEVER); - myFhirCtx.getRestfulClientFactory().setSocketTimeout(1200 * 1000); - myFhirCtx.setParserErrorHandler(new StrictErrorHandler()); - - if (ourServer == null) { - ourRestServer = new RestfulServer(myFhirCtx); - ourRestServer.registerProviders(myResourceProviders.createProviders()); - ourRestServer.setDefaultResponseEncoding(EncodingEnum.XML); - - ourPagingProvider = myAppCtx.getBean(DatabaseBackedPagingProvider.class); - - ourRestServer.registerProvider(myAppCtx.getBean(ValueSetOperationProvider.class)); - - Server server = new Server(0); - - ServletContextHandler proxyHandler = new ServletContextHandler(); - proxyHandler.setContextPath("/"); - - ServletHolder servletHolder = new ServletHolder(); - servletHolder.setServlet(ourRestServer); - proxyHandler.addServlet(servletHolder, "/fhir/context/*"); - - GenericWebApplicationContext webApplicationContext = new GenericWebApplicationContext(); - webApplicationContext.setParent(myAppCtx); - webApplicationContext.refresh(); - proxyHandler.getServletContext().setAttribute(WebApplicationContext.ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE, webApplicationContext); - - DispatcherServlet dispatcherServlet = new DispatcherServlet(); - // dispatcherServlet.setApplicationContext(webApplicationContext); - dispatcherServlet.setContextClass(AnnotationConfigWebApplicationContext.class); - ServletHolder subsServletHolder = new ServletHolder(); - subsServletHolder.setServlet(dispatcherServlet); - subsServletHolder.setInitParameter( - ContextLoader.CONFIG_LOCATION_PARAM, - WebsocketDispatcherConfig.class.getName()); - proxyHandler.addServlet(subsServletHolder, "/*"); - - // Register a CORS filter - CorsConfiguration config = new CorsConfiguration(); - CorsInterceptor corsInterceptor = new CorsInterceptor(config); - config.addAllowedHeader("x-fhir-starter"); - config.addAllowedHeader("Origin"); - config.addAllowedHeader("Accept"); - config.addAllowedHeader("X-Requested-With"); - config.addAllowedHeader("Content-Type"); - config.addAllowedHeader("Access-Control-Request-Method"); - config.addAllowedHeader("Access-Control-Request-Headers"); - config.addAllowedOrigin("*"); - config.addExposedHeader("Location"); - config.addExposedHeader("Content-Location"); - config.setAllowedMethods(Arrays.asList("GET", "POST", "PUT", "DELETE", "OPTIONS")); - ourRestServer.registerInterceptor(corsInterceptor); - - server.setHandler(proxyHandler); - JettyUtil.startServer(server); - int port = JettyUtil.getPortForStartedServer(server); - ourServerBase = "http://localhost:" + port + "/fhir/context"; - - myFhirCtx.getRestfulClientFactory().setServerValidationMode(ServerValidationModeEnum.NEVER); - myFhirCtx.getRestfulClientFactory().setSocketTimeout(20000); - - ourServer = server; - } - - ourRestServer.setPagingProvider(ourPagingProvider); - - myClient = myFhirCtx.newRestfulGenericClient(ourServerBase); - } - } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetNoVerCSNoVerTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetNoVerCSNoVerTest.java index 3c0b880b903..76e917ae3f4 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetNoVerCSNoVerTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetNoVerCSNoVerTest.java @@ -114,7 +114,7 @@ public class ResourceProviderR4ValueSetNoVerCSNoVerTest extends BaseResourceProv myExtensionalCsId = myCodeSystemDao.create(theCodeSystem, mySrd).getId().toUnqualifiedVersionless(); } }); - myCodeSystemDao.readEntity(myExtensionalCsId, null).getId(); + myCodeSystemDao.readEntity(myExtensionalCsId, null); } private void loadAndPersistValueSet() throws IOException { @@ -145,7 +145,7 @@ public class ResourceProviderR4ValueSetNoVerCSNoVerTest extends BaseResourceProv default: throw new IllegalArgumentException("HTTP verb is not supported: " + theVerb); } - myExtensionalVsIdOnResourceTable = myValueSetDao.readEntity(myExtensionalVsId, null).getId(); + myExtensionalVsIdOnResourceTable = (Long) myValueSetDao.readEntity(myExtensionalVsId, null).getPersistentId().getId(); } private CodeSystem createExternalCs() { diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetVerCSNoVerTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetVerCSNoVerTest.java index 035a23222ff..2ce02edde6a 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetVerCSNoVerTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetVerCSNoVerTest.java @@ -106,7 +106,7 @@ public class ResourceProviderR4ValueSetVerCSNoVerTest extends BaseResourceProvid myExtensionalCsId = myCodeSystemDao.create(theCodeSystem, mySrd).getId().toUnqualifiedVersionless(); } }); - myCodeSystemDao.readEntity(myExtensionalCsId, null).getId(); + myCodeSystemDao.readEntity(myExtensionalCsId, null); } private void loadAndPersistValueSet() throws IOException { @@ -138,7 +138,7 @@ public class ResourceProviderR4ValueSetVerCSNoVerTest extends BaseResourceProvid default: throw new IllegalArgumentException("HTTP verb is not supported: " + theVerb); } - myExtensionalVsIdOnResourceTable = myValueSetDao.readEntity(myExtensionalVsId, null).getId(); + myExtensionalVsIdOnResourceTable = myValueSetDao.readEntity(myExtensionalVsId, null).getIdDt().getIdPartAsLong(); } private CodeSystem createExternalCs() { diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetVerCSVerTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetVerCSVerTest.java index ad75e811737..57f71d85b37 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetVerCSVerTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetVerCSVerTest.java @@ -106,7 +106,7 @@ public class ResourceProviderR4ValueSetVerCSVerTest extends BaseResourceProvider myExtensionalCsId_v1 = myCodeSystemDao.create(theCodeSystem, mySrd).getId().toUnqualifiedVersionless(); } }); - myCodeSystemDao.readEntity(myExtensionalCsId_v1, null).getId(); + myCodeSystemDao.readEntity(myExtensionalCsId_v1, null); theCodeSystem.setId("CodeSystem/cs2"); theCodeSystem.setVersion("2"); @@ -119,7 +119,7 @@ public class ResourceProviderR4ValueSetVerCSVerTest extends BaseResourceProvider myExtensionalCsId_v2 = myCodeSystemDao.create(theCodeSystem, mySrd).getId().toUnqualifiedVersionless(); } }); - myCodeSystemDao.readEntity(myExtensionalCsId_v2, null).getId(); + myCodeSystemDao.readEntity(myExtensionalCsId_v2, null); } @@ -129,13 +129,13 @@ public class ResourceProviderR4ValueSetVerCSVerTest extends BaseResourceProvider valueSet.setId("ValueSet/vs1"); valueSet.getCompose().getInclude().get(0).setVersion("1"); myExtensionalVsId_v1 = persistSingleValueSet(valueSet, HttpVerb.POST); - myExtensionalVsIdOnResourceTable_v1 = myValueSetDao.readEntity(myExtensionalVsId_v1, null).getId(); + myExtensionalVsIdOnResourceTable_v1 = myValueSetDao.readEntity(myExtensionalVsId_v1, null).getIdDt().getIdPartAsLong(); valueSet.setVersion("2"); valueSet.setId("ValueSet/vs2"); valueSet.getCompose().getInclude().get(0).setVersion("2"); myExtensionalVsId_v2 = persistSingleValueSet(valueSet, HttpVerb.POST); - myExtensionalVsIdOnResourceTable_v2 = myValueSetDao.readEntity(myExtensionalVsId_v2, null).getId(); + myExtensionalVsIdOnResourceTable_v2 = myValueSetDao.readEntity(myExtensionalVsId_v2, null).getIdDt().getIdPartAsLong(); } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImplTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImplTest.java index 63587c86375..4171540f847 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImplTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImplTest.java @@ -1,6 +1,7 @@ package ca.uhn.fhir.jpa.search.reindex; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao; @@ -10,7 +11,6 @@ import ca.uhn.fhir.jpa.dao.data.IResourceReindexJobDao; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; import ca.uhn.fhir.jpa.entity.ResourceReindexJobEntity; import ca.uhn.fhir.jpa.model.entity.ResourceTable; -import ca.uhn.fhir.jpa.test.BaseJpaTest; import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import org.apache.commons.lang3.time.DateUtils; import org.hl7.fhir.instance.model.api.IBaseResource; @@ -51,7 +51,7 @@ import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; @ExtendWith(MockitoExtension.class) -public class ResourceReindexingSvcImplTest extends BaseJpaTest { +public class ResourceReindexingSvcImplTest { private static final FhirContext ourFhirContext = FhirContext.forR4Cached(); @@ -87,21 +87,10 @@ public class ResourceReindexingSvcImplTest extends BaseJpaTest { private final ResourceReindexer myResourceReindexer = new ResourceReindexer(ourFhirContext); @InjectMocks private final ResourceReindexingSvcImpl mySvc = new ResourceReindexingSvcImpl(); + private JpaStorageSettings myStorageSettings = new JpaStorageSettings(); - @Override - public FhirContext getFhirContext() { - return ourFhirContext; - } - - @Override - protected PlatformTransactionManager getTxManager() { - return myTxManager; - } - - @Override @BeforeEach public void before() throws Exception { - super.before(); myStorageSettings.setReindexThreadCount(2); mySvc.setContextForUnitTest(ourFhirContext); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java index 68499535fbf..6c6b348e475 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java @@ -26,8 +26,8 @@ import ca.uhn.fhir.jpa.dao.index.SearchParamWithInlineReferencesExtractor; import ca.uhn.fhir.jpa.dao.r4.FhirSystemDaoR4; import ca.uhn.fhir.jpa.dao.r4.TransactionProcessorVersionAdapterR4; import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; +import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceServiceRegistry; import ca.uhn.fhir.jpa.model.config.PartitionSettings; -import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId; import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.sched.ISchedulerService; @@ -76,6 +76,7 @@ import org.springframework.transaction.TransactionDefinition; import org.springframework.transaction.TransactionException; import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.support.SimpleTransactionStatus; +import org.springframework.transaction.support.TransactionSynchronizationManager; import javax.annotation.Nonnull; import javax.annotation.Nullable; @@ -265,6 +266,7 @@ public class GiantTransactionPerfTest { myEobDao.setIdHelperSvcForUnitTest(myIdHelperService); myEobDao.setPartitionSettingsForUnitTest(myPartitionSettings); myEobDao.setJpaStorageResourceParserForUnitTest(myJpaStorageResourceParser); + myEobDao.setExternallyStoredResourceServiceRegistryForUnitTest(new ExternallyStoredResourceServiceRegistry()); myEobDao.start(); myDaoRegistry.setResourceDaos(Lists.newArrayList(myEobDao)); @@ -869,7 +871,7 @@ public class GiantTransactionPerfTest { private static class MockRequestPartitionHelperSvc implements ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc { @Nonnull @Override - public RequestPartitionId determineReadPartitionForRequest(@Nullable RequestDetails theRequest, String theResourceType, @Nonnull ReadPartitionIdRequestDetails theDetails) { + public RequestPartitionId determineReadPartitionForRequest(@Nullable RequestDetails theRequest, @Nonnull ReadPartitionIdRequestDetails theDetails) { return RequestPartitionId.defaultPartition(); } @@ -884,12 +886,6 @@ public class GiantTransactionPerfTest { return RequestPartitionId.defaultPartition(); } - @Override - @Nonnull - public PartitionablePartitionId toStoragePartition(@Nonnull RequestPartitionId theRequestPartitionId) { - return new PartitionablePartitionId(theRequestPartitionId.getFirstPartitionIdOrNull(), theRequestPartitionId.getPartitionDate()); - } - @Nonnull @Override public Set toReadPartitions(@Nonnull RequestPartitionId theRequestPartitionId) { @@ -911,17 +907,18 @@ public class GiantTransactionPerfTest { @Nonnull @Override public TransactionStatus getTransaction(TransactionDefinition definition) throws TransactionException { + TransactionSynchronizationManager.setActualTransactionActive(true); return new SimpleTransactionStatus(); } @Override public void commit(@Nonnull TransactionStatus status) throws TransactionException { - + TransactionSynchronizationManager.setActualTransactionActive(false); } @Override public void rollback(@Nonnull TransactionStatus status) throws TransactionException { - + TransactionSynchronizationManager.setActualTransactionActive(false); } } } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/BaseTermR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/BaseTermR4Test.java index 8fceada16bb..9b0f3063a8f 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/BaseTermR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/BaseTermR4Test.java @@ -27,8 +27,10 @@ public abstract class BaseTermR4Test extends BaseJpaR4Test { Long myExtensionalCsIdOnResourceTable; Long myExtensionalVsIdOnResourceTable; + @Override @BeforeEach - public void before() { + public void before() throws Exception { + super.before(); myStorageSettings.setAllowExternalReferences(true); } @@ -143,7 +145,7 @@ public abstract class BaseTermR4Test extends BaseJpaR4Test { default: throw new IllegalArgumentException("HTTP verb is not supported: " + theVerb); } - myExtensionalCsIdOnResourceTable = myCodeSystemDao.readEntity(myExtensionalCsId, null).getId(); + myExtensionalCsIdOnResourceTable = (Long) myCodeSystemDao.readEntity(myExtensionalCsId, null).getPersistentId().getId(); } void loadAndPersistValueSet(HttpVerb theVerb) throws IOException { @@ -180,7 +182,7 @@ public abstract class BaseTermR4Test extends BaseJpaR4Test { default: throw new IllegalArgumentException("HTTP verb is not supported: " + theVerb); } - myExtensionalVsIdOnResourceTable = myValueSetDao.readEntity(myExtensionalVsId, null).getId(); + myExtensionalVsIdOnResourceTable = (Long) myValueSetDao.readEntity(myExtensionalVsId, null).getPersistentId().getId(); } } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java index e4f8c3a23b4..2edf0f57ee5 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java @@ -53,6 +53,8 @@ public class TerminologySvcImplR4Test extends BaseTermR4Test { ConceptValidationOptions optsNoGuess = new ConceptValidationOptions(); ConceptValidationOptions optsGuess = new ConceptValidationOptions().setInferSystem(true); + @Autowired + private Batch2JobHelper myBatchJobHelper; @Override @AfterEach @@ -479,6 +481,7 @@ public class TerminologySvcImplR4Test extends BaseTermR4Test { await().until(() -> { myBatch2JobHelper.runMaintenancePass(); myTerminologyDeferredStorageSvc.saveAllDeferred(); + myBatchJobHelper.awaitAllJobsOfJobDefinitionIdToComplete(TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME); return myTerminologyDeferredStorageSvc.isStorageQueueEmpty(true); }, equalTo(true)); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/resources/logback-test.xml b/hapi-fhir-jpaserver-test-r4/src/test/resources/logback-test.xml new file mode 100644 index 00000000000..dcada7fec76 --- /dev/null +++ b/hapi-fhir-jpaserver-test-r4/src/test/resources/logback-test.xml @@ -0,0 +1,46 @@ + + + + %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} [%file:%line] %msg%n + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/hapi-fhir-jpaserver-test-r4b/pom.xml b/hapi-fhir-jpaserver-test-r4b/pom.xml index ac59ecbb484..ecc32818129 100644 --- a/hapi-fhir-jpaserver-test-r4b/pom.xml +++ b/hapi-fhir-jpaserver-test-r4b/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r4b/src/test/resources/logback-test.xml b/hapi-fhir-jpaserver-test-r4b/src/test/resources/logback-test.xml new file mode 100644 index 00000000000..dcada7fec76 --- /dev/null +++ b/hapi-fhir-jpaserver-test-r4b/src/test/resources/logback-test.xml @@ -0,0 +1,46 @@ + + + + %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} [%file:%line] %msg%n + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/hapi-fhir-jpaserver-test-r5/pom.xml b/hapi-fhir-jpaserver-test-r5/pom.xml index ed50c6255da..38acf3ffde3 100644 --- a/hapi-fhir-jpaserver-test-r5/pom.xml +++ b/hapi-fhir-jpaserver-test-r5/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java index ec718d86496..bbf7b0ece51 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java @@ -45,6 +45,7 @@ import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao; import ca.uhn.fhir.jpa.entity.TermValueSet; import ca.uhn.fhir.jpa.entity.TermValueSetConcept; import ca.uhn.fhir.jpa.interceptor.PerformanceTracingLoggingInterceptor; +import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.provider.JpaSystemProvider; import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; import ca.uhn.fhir.jpa.search.IStaleSearchDeletingSvc; @@ -140,6 +141,8 @@ import static org.mockito.Mockito.mock; @ExtendWith(SpringExtension.class) @ContextConfiguration(classes = {TestR5Config.class}) public abstract class BaseJpaR5Test extends BaseJpaTest implements ITestDataBuilder { + @Autowired + protected PartitionSettings myPartitionSettings; @Autowired protected ITermCodeSystemStorageSvc myTermCodeSystemStorageSvc; @Autowired diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/CrossPartitionReferencesTest.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/CrossPartitionReferencesTest.java new file mode 100644 index 00000000000..e5aea038ccf --- /dev/null +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/CrossPartitionReferencesTest.java @@ -0,0 +1,247 @@ +package ca.uhn.fhir.jpa.dao.r5; + +import ca.uhn.fhir.interceptor.api.Hook; +import ca.uhn.fhir.interceptor.api.Interceptor; +import ca.uhn.fhir.interceptor.api.Pointcut; +import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails; +import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService; +import ca.uhn.fhir.jpa.model.config.PartitionSettings; +import ca.uhn.fhir.jpa.model.cross.IResourceLookup; +import ca.uhn.fhir.jpa.model.dao.JpaPid; +import ca.uhn.fhir.jpa.model.entity.ResourceTable; +import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; +import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; +import ca.uhn.fhir.jpa.searchparam.extractor.CrossPartitionReferenceDetails; +import ca.uhn.fhir.jpa.searchparam.extractor.IResourceLinkResolver; +import ca.uhn.fhir.jpa.util.JpaHapiTransactionService; +import ca.uhn.fhir.rest.api.Constants; +import ca.uhn.fhir.rest.api.server.IBundleProvider; +import ca.uhn.fhir.rest.param.TokenParam; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; +import org.hl7.fhir.r5.model.Enumerations; +import org.hl7.fhir.r5.model.Observation; +import org.hl7.fhir.r5.model.Patient; +import org.hl7.fhir.r5.model.Reference; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.Captor; +import org.mockito.Mock; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.transaction.annotation.Propagation; + +import javax.annotation.Nonnull; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.matchesPattern; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class CrossPartitionReferencesTest extends BaseJpaR5Test { + + public static final RequestPartitionId PARTITION_PATIENT = RequestPartitionId.fromPartitionId(1); + public static final RequestPartitionId PARTITION_OBSERVATION = RequestPartitionId.fromPartitionId(2); + @Autowired + private JpaHapiTransactionService myTransactionSvc; + @Autowired + private IRequestPartitionHelperSvc myPartitionHelperSvc; + @Autowired + private IHapiTransactionService myTransactionService; + @Autowired + private IResourceLinkResolver myResourceLinkResolver; + @Mock + private ICrossPartitionReferenceDetectedHandler myCrossPartitionReferencesDetectedInterceptor; + @Captor + private ArgumentCaptor myCrossPartitionReferenceDetailsCaptor; + + @Override + @BeforeEach + public void before() throws Exception { + super.before(); + + myPartitionSettings.setPartitioningEnabled(true); + myPartitionSettings.setAllowReferencesAcrossPartitions(PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED); + myPartitionSettings.setUnnamedPartitionMode(true); + myPartitionSettings.setAlwaysOpenNewTransactionForDifferentPartition(true); + + myInterceptorRegistry.registerInterceptor(new MyPartitionSelectorInterceptor()); + myInterceptorRegistry.registerInterceptor(myCrossPartitionReferencesDetectedInterceptor); + + myTransactionSvc.setTransactionPropagationWhenChangingPartitions(Propagation.REQUIRES_NEW); + } + + @AfterEach + public void after() { + myPartitionSettings.setPartitioningEnabled(new PartitionSettings().isPartitioningEnabled()); + myPartitionSettings.setAllowReferencesAcrossPartitions(new PartitionSettings().getAllowReferencesAcrossPartitions()); + myPartitionSettings.setUnnamedPartitionMode(new PartitionSettings().isUnnamedPartitionMode()); + myPartitionSettings.setAlwaysOpenNewTransactionForDifferentPartition(new PartitionSettings().isAlwaysOpenNewTransactionForDifferentPartition()); + + myInterceptorRegistry.unregisterInterceptorsIf(t -> t instanceof MyPartitionSelectorInterceptor); + myInterceptorRegistry.unregisterInterceptorsIf(t -> t instanceof ICrossPartitionReferenceDetectedHandler); + + myTransactionSvc.setTransactionPropagationWhenChangingPartitions(Propagation.REQUIRED); + } + + @Test + public void testSamePartitionReference_Create() { + // Setup + Patient p1 = new Patient(); + p1.setActive(true); + IIdType patient1Id = myPatientDao.create(p1, mySrd).getId().toUnqualifiedVersionless(); + + initializeCrossReferencesInterceptor(); + + myCaptureQueriesListener.clear(); + Patient p2 = new Patient(); + p2.setActive(true); + p2.addLink().setOther(new Reference(patient1Id)); + + // Test + myCaptureQueriesListener.clear(); + IIdType patient2Id = myPatientDao.create(p2, mySrd).getId().toUnqualifiedVersionless(); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertEquals(1, myCaptureQueriesListener.countCommits()); + assertEquals(0, myCaptureQueriesListener.countRollbacks()); + + SearchParameterMap params = SearchParameterMap + .newSynchronous(Constants.PARAM_ID, new TokenParam(patient2Id.getValue())) + .addInclude(Patient.INCLUDE_LINK); + IBundleProvider search = myPatientDao.search(params, mySrd); + assertThat(toUnqualifiedVersionlessIdValues(search), contains(patient2Id.getValue(), patient1Id.getValue())); + assertEquals(2, search.getAllResources().size()); + search.getAllResources().forEach(p -> assertTrue(((Patient) p).getActive())); + } + + @Test + public void testSamePartitionReference_SearchWithInclude() { + // Setup + Patient p1 = new Patient(); + p1.setActive(true); + IIdType patient1Id = myPatientDao.create(p1, mySrd).getId().toUnqualifiedVersionless(); + + initializeCrossReferencesInterceptor(); + + myCaptureQueriesListener.clear(); + Patient p2 = new Patient(); + p2.setActive(true); + p2.addLink().setOther(new Reference(patient1Id)); + IIdType patient2Id = myPatientDao.create(p2, mySrd).getId().toUnqualifiedVersionless(); + + // Test + SearchParameterMap params = SearchParameterMap + .newSynchronous(Constants.PARAM_ID, new TokenParam(patient2Id.getValue())) + .addInclude(Patient.INCLUDE_LINK); + IBundleProvider search = myPatientDao.search(params, mySrd); + + // Verify + assertThat(toUnqualifiedVersionlessIdValues(search), contains(patient2Id.getValue(), patient1Id.getValue())); + assertEquals(2, search.getAllResources().size()); + search.getAllResources().forEach(p -> assertTrue(((Patient) p).getActive())); + } + + @Test + public void testCrossPartitionReference_Create() { + + // Setup + Patient p = new Patient(); + p.setActive(true); + IIdType patientId = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless(); + ourLog.info("Patient ID: {}", patientId); + runInTransaction(() -> { + ResourceTable resourceTable = myResourceTableDao.findById(patientId.getIdPartAsLong()).orElseThrow(); + assertEquals(1, resourceTable.getPartitionId().getPartitionId()); + }); + + initializeCrossReferencesInterceptor(); + + // Test + + Observation o = new Observation(); + o.setStatus(Enumerations.ObservationStatus.FINAL); + o.setSubject(new Reference(patientId)); + myCaptureQueriesListener.clear(); + IIdType observationId = myObservationDao.create(o, mySrd).getId().toUnqualifiedVersionless(); + + // Verify + assertEquals(2, myCaptureQueriesListener.countCommits()); + assertEquals(0, myCaptureQueriesListener.countRollbacks()); + + runInTransaction(() -> { + ResourceTable resourceTable = myResourceTableDao.findById(observationId.getIdPartAsLong()).orElseThrow(); + assertEquals(2, resourceTable.getPartitionId().getPartitionId()); + }); + + verify(myCrossPartitionReferencesDetectedInterceptor, times(1)).handle(eq(Pointcut.JPA_RESOLVE_CROSS_PARTITION_REFERENCE), myCrossPartitionReferenceDetailsCaptor.capture()); + CrossPartitionReferenceDetails referenceDetails = myCrossPartitionReferenceDetailsCaptor.getValue(); + assertEquals(PARTITION_OBSERVATION, referenceDetails.getSourceResourcePartitionId()); + assertEquals(patientId.getValue(), referenceDetails.getPathAndRef().getRef().getReferenceElement().getValue()); + } + + private void initializeCrossReferencesInterceptor() { + when(myCrossPartitionReferencesDetectedInterceptor.handle(any(),any())).thenAnswer(t->{ + CrossPartitionReferenceDetails theDetails = t.getArgument(1, CrossPartitionReferenceDetails.class); + IIdType targetId = theDetails.getPathAndRef().getRef().getReferenceElement(); + ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forRead(targetId); + RequestPartitionId referenceTargetPartition = myPartitionHelperSvc.determineReadPartitionForRequest(theDetails.getRequestDetails(), details); + + IResourceLookup targetResource = myTransactionService + .withRequest(theDetails.getRequestDetails()) + .withTransactionDetails(theDetails.getTransactionDetails()) + .withRequestPartitionId(referenceTargetPartition) + .execute(() -> myResourceLinkResolver.findTargetResource(referenceTargetPartition, theDetails.getSourceResourceName(), theDetails.getPathAndRef(), theDetails.getRequestDetails(), theDetails.getTransactionDetails())); + + return targetResource; + }); + } + + @Interceptor + public interface ICrossPartitionReferenceDetectedHandler { + + @Hook(Pointcut.JPA_RESOLVE_CROSS_PARTITION_REFERENCE) + IResourceLookup handle(Pointcut thePointcut, CrossPartitionReferenceDetails theDetails); + + } + + + public class MyPartitionSelectorInterceptor { + + @Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE) + public RequestPartitionId selectPartitionCreate(IBaseResource theResource) { + String resourceType = myFhirContext.getResourceType(theResource); + return selectPartition(resourceType); + } + + @Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_READ) + public RequestPartitionId selectPartitionRead(ReadPartitionIdRequestDetails theReadPartitionIdRequestDetails) { + String resourceType = theReadPartitionIdRequestDetails.getResourceType(); + return selectPartition(resourceType); + } + + @Nonnull + private static RequestPartitionId selectPartition(String resourceType) { + switch (resourceType) { + case "Patient": + return PARTITION_PATIENT; + case "Observation": + return PARTITION_OBSERVATION; + default: + throw new InternalErrorException("Don't know how to handle resource type"); + } + } + + } + +} diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/ExternallyStoredResourceR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/ExternallyStoredResourceR5Test.java new file mode 100644 index 00000000000..173f1899a33 --- /dev/null +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/ExternallyStoredResourceR5Test.java @@ -0,0 +1,86 @@ +package ca.uhn.fhir.jpa.dao.r5; + +import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceAddress; +import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceAddressMetadataKey; +import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceServiceRegistry; +import ca.uhn.fhir.jpa.esr.IExternallyStoredResourceService; +import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum; +import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; +import ca.uhn.fhir.jpa.model.entity.ResourceTable; +import org.hl7.fhir.instance.model.api.IIdType; +import org.hl7.fhir.r5.model.Patient; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mock; +import org.springframework.beans.factory.annotation.Autowired; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.when; + +public class ExternallyStoredResourceR5Test extends BaseJpaR5Test { + + public static final String MY_PROVIDER_ID = "my-provider-id"; + public static final String ADDRESS_123 = "address_123"; + @Autowired + private ExternallyStoredResourceServiceRegistry myProviderRegistry; + + @Mock + private IExternallyStoredResourceService myProvider; + + @BeforeEach + public void beforeEach() { + when(myProvider.getId()).thenReturn(MY_PROVIDER_ID); + myProviderRegistry.registerProvider(myProvider); + } + + @AfterEach + public void afterEach() { + myProviderRegistry.clearProviders(); + } + + @Test + public void testCreate() { + // Test + IIdType id = storePatientWithExternalAddress(); + + // Verify + runInTransaction(()->{ + ResourceTable resource = myResourceTableDao.getReferenceById(id.getIdPartAsLong()); + assertNotNull(resource); + ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 1L); + assertNotNull(history); + assertEquals(ResourceEncodingEnum.ESR, history.getEncoding()); + assertEquals(MY_PROVIDER_ID + ":" + ADDRESS_123, history.getResourceTextVc()); + }); + + } + + + @Test + public void testRead() { + // Setup + IIdType id = storePatientWithExternalAddress(); + Patient patient = new Patient(); + patient.setActive(true); + when(myProvider.fetchResource(any())).thenReturn(patient); + + // Test + Patient fetchedPatient = myPatientDao.read(id, mySrd); + assertTrue(fetchedPatient.getActive()); + + } + + private IIdType storePatientWithExternalAddress() { + Patient p = new Patient(); + ExternallyStoredResourceAddress address = new ExternallyStoredResourceAddress(MY_PROVIDER_ID, ADDRESS_123); + ExternallyStoredResourceAddressMetadataKey.INSTANCE.put(p, address); + IIdType id = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless(); + return id; + } + + +} diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetTest.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetTest.java index ac43734dcc6..464e597f563 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetTest.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetTest.java @@ -152,7 +152,7 @@ public class ResourceProviderR5ValueSetTest extends BaseResourceProviderR5Test { default: throw new IllegalArgumentException("HTTP verb is not supported: " + theVerb); } - myExtensionalVsIdOnResourceTable = myValueSetDao.readEntity(myExtensionalVsId, null).getId(); + myExtensionalVsIdOnResourceTable = (Long) myValueSetDao.readEntity(myExtensionalVsId, null).getPersistentId().getId(); } private CodeSystem createExternalCs() { diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetVersionedTest.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetVersionedTest.java index 72427a4d46b..b99f702ce22 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetVersionedTest.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetVersionedTest.java @@ -102,7 +102,7 @@ public class ResourceProviderR5ValueSetVersionedTest extends BaseResourceProvide myExtensionalCsId_v1 = myCodeSystemDao.create(theCodeSystem, mySrd).getId().toUnqualifiedVersionless(); } }); - myCodeSystemDao.readEntity(myExtensionalCsId_v1, null).getId(); + myCodeSystemDao.readEntity(myExtensionalCsId_v1, null); theCodeSystem.setId("CodeSystem/cs2"); theCodeSystem.setVersion("2"); @@ -115,7 +115,7 @@ public class ResourceProviderR5ValueSetVersionedTest extends BaseResourceProvide myExtensionalCsId_v2 = myCodeSystemDao.create(theCodeSystem, mySrd).getId().toUnqualifiedVersionless(); } }); - myCodeSystemDao.readEntity(myExtensionalCsId_v2, null).getId(); + myCodeSystemDao.readEntity(myExtensionalCsId_v2, null); } @@ -125,13 +125,13 @@ public class ResourceProviderR5ValueSetVersionedTest extends BaseResourceProvide valueSet.setId("ValueSet/vs1"); valueSet.getCompose().getInclude().get(0).setVersion("1"); myExtensionalVsId_v1 = persistSingleValueSet(valueSet, HttpVerb.POST); - myExtensionalVsIdOnResourceTable_v1 = myValueSetDao.readEntity(myExtensionalVsId_v1, null).getId(); + myExtensionalVsIdOnResourceTable_v1 = (Long) myValueSetDao.readEntity(myExtensionalVsId_v1, null).getPersistentId().getId(); valueSet.setVersion("2"); valueSet.setId("ValueSet/vs2"); valueSet.getCompose().getInclude().get(0).setVersion("2"); myExtensionalVsId_v2 = persistSingleValueSet(valueSet, HttpVerb.POST); - myExtensionalVsIdOnResourceTable_v2 = myValueSetDao.readEntity(myExtensionalVsId_v2, null).getId(); + myExtensionalVsIdOnResourceTable_v2 = (Long) myValueSetDao.readEntity(myExtensionalVsId_v2, null).getPersistentId().getId(); } diff --git a/hapi-fhir-jpaserver-test-r5/src/test/resources/logback-test.xml b/hapi-fhir-jpaserver-test-r5/src/test/resources/logback-test.xml new file mode 100644 index 00000000000..dcada7fec76 --- /dev/null +++ b/hapi-fhir-jpaserver-test-r5/src/test/resources/logback-test.xml @@ -0,0 +1,46 @@ + + + + %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} [%file:%line] %msg%n + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/hapi-fhir-jpaserver-test-utilities/pom.xml b/hapi-fhir-jpaserver-test-utilities/pom.xml index 7c954543cce..46a987dda60 100644 --- a/hapi-fhir-jpaserver-test-utilities/pom.xml +++ b/hapi-fhir-jpaserver-test-utilities/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/r4/BaseResourceProviderR4Test.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/r4/BaseResourceProviderR4Test.java index a351e7f4dea..2eaf9a4c56f 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/r4/BaseResourceProviderR4Test.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/r4/BaseResourceProviderR4Test.java @@ -115,8 +115,11 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test { ourRestServer.getInterceptorService().unregisterAllInterceptors(); } + @Override @BeforeEach public void before() throws Exception { + super.before(); + myFhirContext.getRestfulClientFactory().setServerValidationMode(ServerValidationModeEnum.NEVER); myFhirContext.getRestfulClientFactory().setSocketTimeout(1200 * 1000); myFhirContext.setParserErrorHandler(new StrictErrorHandler()); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaTest.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaTest.java index 1d51a1709d6..54002cf35b7 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaTest.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaTest.java @@ -73,6 +73,7 @@ import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry; import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener; import ca.uhn.fhir.jpa.util.MemoryCacheService; import ca.uhn.fhir.rest.api.server.IBundleProvider; +import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; @@ -778,7 +779,7 @@ public abstract class BaseJpaTest extends BaseTest { for (int count = 0; ; count++) { try { - theSystemDao.expunge(new ExpungeOptions().setExpungeEverything(true), null); + theSystemDao.expunge(new ExpungeOptions().setExpungeEverything(true), new SystemRequestDetails()); break; } catch (Exception e) { if (count >= 3) { diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestR5Config.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestR5Config.java index fb32c171f0d..a01fc128d70 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestR5Config.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestR5Config.java @@ -158,6 +158,7 @@ public class TestR5Config { .afterQuery(captureQueriesListener()) .afterQuery(new CurrentThreadCaptureQueriesListener()) .countQuery(singleQueryCountHolder()) + .afterMethod(captureQueriesListener()) .build(); return dataSource; diff --git a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml index a1af87d765d..f927e80ba6f 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml +++ b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-server-mdm/pom.xml b/hapi-fhir-server-mdm/pom.xml index 94fb2076ff4..b6c300cad9a 100644 --- a/hapi-fhir-server-mdm/pom.xml +++ b/hapi-fhir-server-mdm/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmControllerHelper.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmControllerHelper.java index 6ba5cbafcce..1176557e0e1 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmControllerHelper.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmControllerHelper.java @@ -22,6 +22,7 @@ package ca.uhn.fhir.mdm.provider; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; import ca.uhn.fhir.mdm.api.IMdmMatchFinderSvc; @@ -137,7 +138,8 @@ public class MdmControllerHelper { * Helper method which will return a bundle of all Matches and Possible Matches. */ public IBaseBundle getMatchesAndPossibleMatchesForResource(IAnyResource theResource, String theResourceType, RequestDetails theRequestDetails) { - RequestPartitionId requestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, theResourceType, null); + ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forSearchType(theResourceType, null, null); + RequestPartitionId requestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, details); List matches = myMdmMatchFinderSvc.getMatchedTargets(theResourceType, theResource, requestPartitionId); matches.sort(Comparator.comparing((MatchedTarget m) -> m.getMatchResult().getNormalizedScore()).reversed()); diff --git a/hapi-fhir-server-openapi/pom.xml b/hapi-fhir-server-openapi/pom.xml index 36a2bc32d24..d31364d9923 100644 --- a/hapi-fhir-server-openapi/pom.xml +++ b/hapi-fhir-server-openapi/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server/pom.xml b/hapi-fhir-server/pom.xml index e1622f46811..89a0bf2f33a 100644 --- a/hapi-fhir-server/pom.xml +++ b/hapi-fhir-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/CompositeInterceptorBroadcaster.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/CompositeInterceptorBroadcaster.java index db05162dc6b..239a3285d37 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/CompositeInterceptorBroadcaster.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/CompositeInterceptorBroadcaster.java @@ -52,6 +52,7 @@ public class CompositeInterceptorBroadcaster { return newCompositeBroadcaster(theInterceptorBroadcaster, theRequestDetails).callHooksAndReturnObject(thePointcut, theParams); } + // TODO: JA - Refactor to make thePointcut the last argument in order to be consistent with thr other methods here public static boolean hasHooks(Pointcut thePointcut, IInterceptorBroadcaster theInterceptorBroadcaster, RequestDetails theRequestDetails) { return newCompositeBroadcaster(theInterceptorBroadcaster, theRequestDetails).hasHooks(thePointcut); } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ICachedSearchDetails.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ICachedSearchDetails.java index d11bbe88130..8482b3e0380 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ICachedSearchDetails.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ICachedSearchDetails.java @@ -22,6 +22,10 @@ package ca.uhn.fhir.rest.server.util; public interface ICachedSearchDetails { + String getUuid(); + + void setUuid(String theUuid); + /** * Mark the search as being non-reusable by future queries (as in, the search * can serve multiple page requests for pages of the same search, but it diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml index 53b68b9f641..9787468a29d 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml @@ -7,7 +7,7 @@ hapi-fhir-serviceloaders ca.uhn.hapi.fhir - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml index c296d73aa17..750b50229fa 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml @@ -7,7 +7,7 @@ hapi-fhir-serviceloaders ca.uhn.hapi.fhir - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../pom.xml @@ -20,7 +20,7 @@ ca.uhn.hapi.fhir hapi-fhir-caching-api - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT com.github.ben-manes.caffeine diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml index d28ef13b6d6..c561f4614c7 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml @@ -7,7 +7,7 @@ hapi-fhir-serviceloaders ca.uhn.hapi.fhir - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml index 1fde459eddd..ed7be6107a6 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml @@ -7,7 +7,7 @@ hapi-fhir ca.uhn.hapi.fhir - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../../pom.xml diff --git a/hapi-fhir-serviceloaders/pom.xml b/hapi-fhir-serviceloaders/pom.xml index d4ffcdf51bb..36642d9c173 100644 --- a/hapi-fhir-serviceloaders/pom.xml +++ b/hapi-fhir-serviceloaders/pom.xml @@ -5,7 +5,7 @@ hapi-deployable-pom ca.uhn.hapi.fhir - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml index 76585461d6f..16dca9e97e4 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml index 7d5f78acc23..8217d41d0b1 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT hapi-fhir-spring-boot-sample-client-apache diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml index 0c372797448..ccd488446cf 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT hapi-fhir-spring-boot-sample-client-okhttp diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml index d9e847f3d13..2787d3eac70 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT hapi-fhir-spring-boot-sample-server-jersey diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml index df6ea42bfc4..1ebbbffc63d 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT hapi-fhir-spring-boot-samples diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml index e8ebd8e529e..5c845310bfb 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/pom.xml b/hapi-fhir-spring-boot/pom.xml index 85b7fb43578..01821fdf59d 100644 --- a/hapi-fhir-spring-boot/pom.xml +++ b/hapi-fhir-spring-boot/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-sql-migrate/pom.xml b/hapi-fhir-sql-migrate/pom.xml index 4c7b6206aec..00c24155a28 100644 --- a/hapi-fhir-sql-migrate/pom.xml +++ b/hapi-fhir-sql-migrate/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-batch2-jobs/pom.xml b/hapi-fhir-storage-batch2-jobs/pom.xml index 07fba5d71e1..d1b639ea92a 100644 --- a/hapi-fhir-storage-batch2-jobs/pom.xml +++ b/hapi-fhir-storage-batch2-jobs/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml 4.0.0 diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeJobSubmitterImpl.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeJobSubmitterImpl.java index e2cec69a4b5..4da4bf751f7 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeJobSubmitterImpl.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeJobSubmitterImpl.java @@ -34,17 +34,17 @@ import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse; import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; -import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter; import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException; +import ca.uhn.fhir.rest.server.provider.ProviderConstants; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; - import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; + import java.util.List; import static ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeAppCtx.JOB_DELETE_EXPUNGE; @@ -91,9 +91,9 @@ public class DeleteExpungeJobSubmitterImpl implements IDeleteExpungeJobSubmitter .forEach(deleteExpungeJobParameters::addPartitionedUrl); deleteExpungeJobParameters.setBatchSize(theBatchSize); - ReadPartitionIdRequestDetails details = new ReadPartitionIdRequestDetails(null, RestOperationTypeEnum.EXTENDED_OPERATION_SERVER, null, null, null); + ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forOperation(null, null, ProviderConstants.OPERATION_DELETE_EXPUNGE); // Also set toplevel partition in case there are no urls - RequestPartitionId requestPartition = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, null, details); + RequestPartitionId requestPartition = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, details); deleteExpungeJobParameters.setRequestPartitionId(requestPartition); JobInstanceStartRequest startRequest = new JobInstanceStartRequest(); diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexProvider.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexProvider.java index 930dbeba4e2..dbafc5733d4 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexProvider.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexProvider.java @@ -30,7 +30,6 @@ import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse; import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; import ca.uhn.fhir.rest.annotation.Operation; import ca.uhn.fhir.rest.annotation.OperationParam; -import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.provider.ProviderConstants; import ca.uhn.fhir.util.ParametersUtil; @@ -72,8 +71,8 @@ public class ReindexProvider { .forEach(params::addPartitionedUrl); } - ReadPartitionIdRequestDetails details= new ReadPartitionIdRequestDetails(null, RestOperationTypeEnum.EXTENDED_OPERATION_SERVER, null, null, null); - RequestPartitionId requestPartition = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, null, details); + ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forOperation(null, null, ProviderConstants.OPERATION_REINDEX); + RequestPartitionId requestPartition = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, details); params.setRequestPartitionId(requestPartition); JobInstanceStartRequest request = new JobInstanceStartRequest(); diff --git a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexProviderTest.java b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexProviderTest.java index c69ed89db66..9c97348815a 100644 --- a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexProviderTest.java +++ b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexProviderTest.java @@ -69,7 +69,7 @@ public class ReindexProviderTest { when(myJobCoordinator.startInstance(any())) .thenReturn(createJobStartResponse()); - when(myRequestPartitionHelperSvc.determineReadPartitionForRequest(any(), any(), any())).thenReturn(RequestPartitionId.allPartitions()); + when(myRequestPartitionHelperSvc.determineReadPartitionForRequest(any(), any())).thenReturn(RequestPartitionId.allPartitions()); } private Batch2JobStartResponse createJobStartResponse() { diff --git a/hapi-fhir-storage-batch2/pom.xml b/hapi-fhir-storage-batch2/pom.xml index 0db68563665..5ef04eac996 100644 --- a/hapi-fhir-storage-batch2/pom.xml +++ b/hapi-fhir-storage-batch2/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-cr/pom.xml b/hapi-fhir-storage-cr/pom.xml index 30a893b23a3..600778ae0d1 100644 --- a/hapi-fhir-storage-cr/pom.xml +++ b/hapi-fhir-storage-cr/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-cr/src/test/resources/logback-test.xml b/hapi-fhir-storage-cr/src/test/resources/logback-test.xml new file mode 100644 index 00000000000..e40a4e73953 --- /dev/null +++ b/hapi-fhir-storage-cr/src/test/resources/logback-test.xml @@ -0,0 +1,15 @@ + + + + INFO + + + %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} [%file:%line] %msg%n + + + + + + + + diff --git a/hapi-fhir-storage-mdm/pom.xml b/hapi-fhir-storage-mdm/pom.xml index 7163977bad8..0bc37154487 100644 --- a/hapi-fhir-storage-mdm/pom.xml +++ b/hapi-fhir-storage-mdm/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml 4.0.0 diff --git a/hapi-fhir-storage-test-utilities/pom.xml b/hapi-fhir-storage-test-utilities/pom.xml index ea4f218dd6f..7d538cc8b00 100644 --- a/hapi-fhir-storage-test-utilities/pom.xml +++ b/hapi-fhir-storage-test-utilities/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml 4.0.0 diff --git a/hapi-fhir-storage/pom.xml b/hapi-fhir-storage/pom.xml index e3fef4e756d..c059d3505e4 100644 --- a/hapi-fhir-storage/pom.xml +++ b/hapi-fhir-storage/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml @@ -106,6 +106,11 @@ + + org.springframework + spring-tx + + com.fasterxml.jackson.core jackson-annotations @@ -142,20 +147,20 @@ spring-test test - - com.github.dnault - xml-patch - - - io.dogote - json-patch - - - org.springframework.data - spring-data-commons - + + com.github.dnault + xml-patch + + + io.dogote + json-patch + + + org.springframework.data + spring-data-commons + - + diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/api/IDaoRegistry.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/IDaoRegistry.java similarity index 96% rename from hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/api/IDaoRegistry.java rename to hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/IDaoRegistry.java index 8a718e69d61..e55161fc900 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/api/IDaoRegistry.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/IDaoRegistry.java @@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.api; /*- * #%L - * HAPI FHIR JPA Model + * HAPI FHIR Storage api * %% * Copyright (C) 2014 - 2023 Smile CDR, Inc. * %% diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDao.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDao.java index 600835e2ef2..0995e2c5b04 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDao.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDao.java @@ -27,8 +27,7 @@ import ca.uhn.fhir.jpa.api.model.DeleteConflictList; import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome; import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.api.model.ExpungeOutcome; -import ca.uhn.fhir.jpa.model.entity.BaseHasResource; -import ca.uhn.fhir.jpa.model.entity.ResourceTable; +import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource; import ca.uhn.fhir.jpa.model.entity.TagTypeEnum; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.model.api.IQueryParameterType; @@ -188,16 +187,17 @@ public interface IFhirResourceDao extends IDao { /** * Read a resource by its internal PID + * * @throws ResourceNotFoundException If the ID is not known to the server - * @throws ResourceGoneException If the resource has been deleted + * @throws ResourceGoneException If the resource has been deleted */ T readByPid(IResourcePersistentId thePid); /** * Read a resource by its internal PID - * @throws ResourceNotFoundException If the ID is not known to the server - * @throws ResourceGoneException If the resource has been deleted and theDeletedOk is true * + * @throws ResourceNotFoundException If the ID is not known to the server + * @throws ResourceGoneException If the resource has been deleted and theDeletedOk is true */ default T readByPid(IResourcePersistentId thePid, boolean theDeletedOk) { throw new UnsupportedOperationException(Msg.code(571)); @@ -206,31 +206,36 @@ public interface IFhirResourceDao extends IDao { /** * @param theRequestDetails The request details including permissions and partitioning information * @throws ResourceNotFoundException If the ID is not known to the server - * @throws ResourceGoneException If the resource has been deleted + * @throws ResourceGoneException If the resource has been deleted */ T read(IIdType theId, RequestDetails theRequestDetails); /** * Should deleted resources be returned successfully. This should be false for * a normal FHIR read. + * * @throws ResourceNotFoundException If the ID is not known to the server - * @throws ResourceGoneException If the resource has been deleted and theDeletedOk is true + * @throws ResourceGoneException If the resource has been deleted and theDeletedOk is true */ T read(IIdType theId, RequestDetails theRequestDetails, boolean theDeletedOk); - BaseHasResource readEntity(IIdType theId, RequestDetails theRequest); - /** - * @param theCheckForForcedId If true, this method should fail if the requested ID contains a numeric PID which exists, but is - * obscured by a "forced ID" so should not exist as far as the outside world is concerned. + * Read an entity from the database, and return it. Note that here we're talking about whatever the + * native database representation is, not the parsed {@link IBaseResource} instance. + * + * @param theId The resource ID to fetch + * @param theRequest The request details object associated with the request */ - BaseHasResource readEntity(IIdType theId, boolean theCheckForForcedId, RequestDetails theRequest); + IBasePersistedResource readEntity(IIdType theId, RequestDetails theRequest); /** * Updates index tables associated with the given resource. Does not create a new * version or update the resource's update time. + * + * @param theResource The FHIR resource object corresponding to the entity to reindex + * @param theEntity The storage entity to reindex */ - void reindex(T theResource, ResourceTable theEntity); + void reindex(T theResource, IBasePersistedResource theEntity); void removeTag(IIdType theId, TagTypeEnum theTagType, String theSystem, String theCode, RequestDetails theRequestDetails); @@ -317,7 +322,7 @@ public interface IFhirResourceDao extends IDao { /** * Delete a list of resource Pids - * + *

    * CAUTION: This list does not throw an exception if there are delete conflicts. It should always be followed by * a call to DeleteConflictUtil.validateDeleteConflictsEmptyOrThrowException(fhirContext, conflicts); * to actually throw the exception. The reason this method doesn't do that itself is that it is expected to be diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/DeleteMethodOutcome.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/DeleteMethodOutcome.java index c23d2029b2c..e7b0ab1e0e3 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/DeleteMethodOutcome.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/DeleteMethodOutcome.java @@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.api.model; * #L% */ -import ca.uhn.fhir.jpa.model.entity.ResourceTable; +import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource; import ca.uhn.fhir.rest.api.MethodOutcome; import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; @@ -32,7 +32,7 @@ import java.util.List; */ public class DeleteMethodOutcome extends MethodOutcome { - private List myDeletedEntities; + private List myDeletedEntities; @Deprecated private long myExpungedResourcesCount; @Deprecated @@ -45,21 +45,21 @@ public class DeleteMethodOutcome extends MethodOutcome { super(theBaseOperationOutcome); } - public List getDeletedEntities() { + public List getDeletedEntities() { return myDeletedEntities; } /** - * Use {@link ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter#ENTITY_TOTAL_UPDATED_OR_DELETED} + * Use {@literal ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter#ENTITY_TOTAL_UPDATED_OR_DELETED} */ @Deprecated - public DeleteMethodOutcome setDeletedEntities(List theDeletedEntities) { + public DeleteMethodOutcome setDeletedEntities(List theDeletedEntities) { myDeletedEntities = theDeletedEntities; return this; } /** - * Use {@link ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener#RESOURCE_TOTAL_PROCESSED} + * Use {@literal ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener#RESOURCE_TOTAL_PROCESSED} */ @Deprecated public long getExpungedResourcesCount() { diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/ISearchCoordinatorSvc.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/ISearchCoordinatorSvc.java index 8c7daddf474..6fa0e69aaa0 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/ISearchCoordinatorSvc.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/ISearchCoordinatorSvc.java @@ -36,7 +36,7 @@ public interface ISearchCoordinatorSvc { void cancelAllActiveSearches(); - List getResources(String theUuid, int theFrom, int theTo, @Nullable RequestDetails theRequestDetails); + List getResources(String theUuid, int theFrom, int theTo, @Nullable RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId); IBundleProvider registerSearch(IFhirResourceDao theCallingDao, SearchParameterMap theParams, String theResourceType, CacheControlDirective theCacheControlDirective, @Nullable RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId); diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java index ec4bc5ed248..7c99f470d00 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java @@ -978,8 +978,8 @@ public abstract class BaseTransactionProcessor { matchUrl = performIdSubstitutionsInMatchUrl(theIdSubstitutions, matchUrl); DeleteMethodOutcome deleteOutcome = dao.deleteByUrl(matchUrl, deleteConflicts, theRequest); setConditionalUrlToBeValidatedLater(conditionalUrlToIdMap, matchUrl, deleteOutcome.getId()); - List allDeleted = deleteOutcome.getDeletedEntities(); - for (ResourceTable deleted : allDeleted) { + List allDeleted = deleteOutcome.getDeletedEntities(); + for (IBasePersistedResource deleted : allDeleted) { deletedResources.add(deleted.getIdDt().toUnqualifiedVersionless().getValueAsString()); } if (allDeleted.isEmpty()) { diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/tx/HapiTransactionService.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/tx/HapiTransactionService.java index 1ac6a1c6bed..f7101250b4e 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/tx/HapiTransactionService.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/tx/HapiTransactionService.java @@ -27,6 +27,7 @@ import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.model.ResourceVersionConflictResolutionStrategy; import ca.uhn.fhir.jpa.dao.DaoFailureUtil; +import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; @@ -38,24 +39,24 @@ import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; import ca.uhn.fhir.util.ICallable; import ca.uhn.fhir.util.TestUtil; import com.google.common.annotations.VisibleForTesting; +import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.DataIntegrityViolationException; -import org.springframework.orm.jpa.JpaDialect; -import org.springframework.orm.jpa.JpaTransactionManager; -import org.springframework.orm.jpa.vendor.HibernateJpaDialect; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.annotation.Isolation; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.support.TransactionCallback; import org.springframework.transaction.support.TransactionCallbackWithoutResult; +import org.springframework.transaction.support.TransactionSynchronizationManager; import org.springframework.transaction.support.TransactionTemplate; import javax.annotation.Nonnull; import javax.annotation.Nullable; +import java.util.Objects; import java.util.concurrent.Callable; /** @@ -73,7 +74,9 @@ public class HapiTransactionService implements IHapiTransactionService { protected PlatformTransactionManager myTransactionManager; @Autowired protected IRequestPartitionHelperSvc myRequestPartitionHelperSvc; - private volatile Boolean myCustomIsolationSupported; + @Autowired + protected PartitionSettings myPartitionSettings; + private Propagation myTransactionPropagationWhenChangingPartitions = Propagation.REQUIRED; @VisibleForTesting public void setInterceptorBroadcaster(IInterceptorBroadcaster theInterceptorBroadcaster) { @@ -156,15 +159,7 @@ public class HapiTransactionService implements IHapiTransactionService { } public boolean isCustomIsolationSupported() { - if (myCustomIsolationSupported == null) { - if (myTransactionManager instanceof JpaTransactionManager) { - JpaDialect jpaDialect = ((JpaTransactionManager) myTransactionManager).getJpaDialect(); - myCustomIsolationSupported = (jpaDialect instanceof HibernateJpaDialect); - } else { - myCustomIsolationSupported = false; - } - } - return myCustomIsolationSupported; + return false; } @VisibleForTesting @@ -197,6 +192,30 @@ public class HapiTransactionService implements IHapiTransactionService { ourRequestPartitionThreadLocal.set(requestPartitionId); } + if (Objects.equals(previousRequestPartitionId, requestPartitionId)) { + if (canReuseExistingTransaction(theExecutionBuilder)) { + /* + * If we're already in an active transaction, and it's for the right partition, + * and it's not a read-only transaction, we don't need to open a new transaction + * so let's just add a method to the stack trace that makes this obvious. + */ + return executeInExistingTransaction(theCallback); + } + } else if (myTransactionPropagationWhenChangingPartitions == Propagation.REQUIRES_NEW) { + return executeInNewTransactionForPartitionChange(theExecutionBuilder, theCallback, requestPartitionId, previousRequestPartitionId); + } + + return doExecuteInTransaction(theExecutionBuilder, theCallback, requestPartitionId, previousRequestPartitionId); + } + + @Nullable + private T executeInNewTransactionForPartitionChange(ExecutionBuilder theExecutionBuilder, TransactionCallback theCallback, RequestPartitionId requestPartitionId, RequestPartitionId previousRequestPartitionId) { + theExecutionBuilder.myPropagation = myTransactionPropagationWhenChangingPartitions; + return doExecuteInTransaction(theExecutionBuilder, theCallback, requestPartitionId, previousRequestPartitionId); + } + + @Nullable + private T doExecuteInTransaction(ExecutionBuilder theExecutionBuilder, TransactionCallback theCallback, RequestPartitionId requestPartitionId, RequestPartitionId previousRequestPartitionId) { try { for (int i = 0; ; i++) { try { @@ -270,6 +289,11 @@ public class HapiTransactionService implements IHapiTransactionService { } } + public void setTransactionPropagationWhenChangingPartitions(Propagation theTransactionPropagationWhenChangingPartitions) { + Validate.notNull(theTransactionPropagationWhenChangingPartitions); + myTransactionPropagationWhenChangingPartitions = theTransactionPropagationWhenChangingPartitions; + } + @Nullable protected T doExecuteCallback(ExecutionBuilder theExecutionBuilder, TransactionCallback theCallback) { try { @@ -387,6 +411,22 @@ public class HapiTransactionService implements IHapiTransactionService { } } + /** + * Returns true if we alreadyt have an active transaction associated with the current thread, AND + * either it's non-read-only or we only need a read-only transaction, AND + * the newly requested transaction has a propagation of REQUIRED + */ + private static boolean canReuseExistingTransaction(ExecutionBuilder theExecutionBuilder) { + return TransactionSynchronizationManager.isActualTransactionActive() + && (!TransactionSynchronizationManager.isCurrentTransactionReadOnly() || theExecutionBuilder.myReadOnly) + && (theExecutionBuilder.myPropagation == null || theExecutionBuilder.myPropagation == Propagation.REQUIRED); + } + + @Nullable + private static T executeInExistingTransaction(TransactionCallback theCallback) { + return theCallback.doInTransaction(null); + } + /** * Invokes {@link Callable#call()} and rethrows any exceptions thrown by that method. * If the exception extends {@link BaseServerResponseException} it is rethrown unmodified. @@ -415,4 +455,18 @@ public class HapiTransactionService implements IHapiTransactionService { public static RequestPartitionId getRequestPartitionAssociatedWithThread() { return ourRequestPartitionThreadLocal.get(); } + + /** + * Throws an {@link IllegalArgumentException} if a transaction is active + */ + public static void noTransactionAllowed() { + Validate.isTrue(!TransactionSynchronizationManager.isActualTransactionActive(), "Transaction must not be active but found an active transaction"); + } + + /** + * Throws an {@link IllegalArgumentException} if no transaction is active + */ + public static void requireTransaction() { + Validate.isTrue(TransactionSynchronizationManager.isActualTransactionActive(), "Transaction required here but no active transaction found"); + } } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptor.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptor.java index 8bbe8b979ec..d9391a4d7e7 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptor.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptor.java @@ -29,6 +29,7 @@ import ca.uhn.fhir.interceptor.api.Interceptor; import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails; import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.extractor.BaseSearchParamExtractor; import ca.uhn.fhir.jpa.searchparam.extractor.ISearchParamExtractor; @@ -65,6 +66,9 @@ public class PatientIdPartitionInterceptor { @Autowired private ISearchParamExtractor mySearchParamExtractor; + @Autowired + private PartitionSettings myPartitionSettings; + /** * Constructor */ @@ -75,10 +79,11 @@ public class PatientIdPartitionInterceptor { /** * Constructor */ - public PatientIdPartitionInterceptor(FhirContext theFhirContext, ISearchParamExtractor theSearchParamExtractor) { + public PatientIdPartitionInterceptor(FhirContext theFhirContext, ISearchParamExtractor theSearchParamExtractor, PartitionSettings thePartitionSettings) { this(); myFhirContext = theFhirContext; mySearchParamExtractor = theSearchParamExtractor; + myPartitionSettings = thePartitionSettings; } @Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE) @@ -121,6 +126,9 @@ public class PatientIdPartitionInterceptor { @Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_READ) public RequestPartitionId identifyForRead(ReadPartitionIdRequestDetails theReadDetails, RequestDetails theRequestDetails) { + if (isBlank(theReadDetails.getResourceType())) { + return provideNonCompartmentMemberTypeResponse(null); + } RuntimeResourceDefinition resourceDef = myFhirContext.getResourceDefinition(theReadDetails.getResourceType()); List compartmentSps = getCompartmentSearchParams(resourceDef); if (compartmentSps.isEmpty()) { @@ -259,7 +267,7 @@ public class PatientIdPartitionInterceptor { @SuppressWarnings("unused") @Nonnull protected RequestPartitionId provideNonCompartmentMemberTypeResponse(IBaseResource theResource) { - return RequestPartitionId.defaultPartition(); + return RequestPartitionId.fromPartitionId(myPartitionSettings.getDefaultPartitionId()); } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/SearchRuntimeDetails.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/model/search/SearchRuntimeDetails.java similarity index 99% rename from hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/SearchRuntimeDetails.java rename to hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/model/search/SearchRuntimeDetails.java index 33e3846b826..4a7a58f5016 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/SearchRuntimeDetails.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/model/search/SearchRuntimeDetails.java @@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.model.search; /*- * #%L - * HAPI FHIR JPA Model + * HAPI FHIR Storage api * %% * Copyright (C) 2014 - 2023 Smile CDR, Inc. * %% diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/SearchStatusEnum.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/model/search/SearchStatusEnum.java similarity index 98% rename from hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/SearchStatusEnum.java rename to hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/model/search/SearchStatusEnum.java index 152e72ed206..5d5be1f3f5d 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/SearchStatusEnum.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/model/search/SearchStatusEnum.java @@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.model.search; /*- * #%L - * HAPI FHIR JPA Model + * HAPI FHIR Storage api * %% * Copyright (C) 2014 - 2023 Smile CDR, Inc. * %% diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/partition/BaseRequestPartitionHelperSvc.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/partition/BaseRequestPartitionHelperSvc.java index fec84bc0078..34c84c12f65 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/partition/BaseRequestPartitionHelperSvc.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/partition/BaseRequestPartitionHelperSvc.java @@ -29,7 +29,6 @@ import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.model.config.PartitionSettings; -import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.SystemRequestDetails; @@ -47,7 +46,6 @@ import java.util.List; import java.util.Set; import java.util.stream.Collectors; -import static ca.uhn.fhir.jpa.model.util.JpaConstants.ALL_PARTITIONS_NAME; import static ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster.doCallHooks; import static ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster.doCallHooksAndReturnObject; import static ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster.hasHooks; @@ -94,39 +92,44 @@ public abstract class BaseRequestPartitionHelperSvc implements IRequestPartition */ @Nonnull @Override - public RequestPartitionId determineReadPartitionForRequest(@Nullable RequestDetails theRequest, String theResourceType, ReadPartitionIdRequestDetails theDetails) { + public RequestPartitionId determineReadPartitionForRequest(@Nullable RequestDetails theRequest, ReadPartitionIdRequestDetails theDetails) { RequestPartitionId requestPartitionId; - boolean nonPartitionableResource = !isResourcePartitionable(theResourceType); + String resourceType = theDetails != null ? theDetails.getResourceType() : null; + boolean nonPartitionableResource = !isResourcePartitionable(resourceType); if (myPartitionSettings.isPartitioningEnabled()) { - // Handle system requests + + RequestDetails requestDetails = theRequest; //TODO GGG eventually, theRequest will not be allowed to be null here, and we will pass through SystemRequestDetails instead. - if ((theRequest == null || theRequest instanceof SystemRequestDetails) && nonPartitionableResource) { - return RequestPartitionId.defaultPartition(); + if (requestDetails == null) { + requestDetails = new SystemRequestDetails(); } - if (theRequest instanceof SystemRequestDetails && systemRequestHasExplicitPartition((SystemRequestDetails) theRequest)) { - requestPartitionId = getSystemRequestPartitionId((SystemRequestDetails) theRequest, nonPartitionableResource); - } else if (hasHooks(Pointcut.STORAGE_PARTITION_IDENTIFY_ANY, myInterceptorBroadcaster, theRequest)) { + // Handle system requests + if (requestDetails instanceof SystemRequestDetails && systemRequestHasExplicitPartition((SystemRequestDetails) requestDetails) && !nonPartitionableResource) { + requestPartitionId = getSystemRequestPartitionId((SystemRequestDetails) requestDetails, nonPartitionableResource); + } else if ((requestDetails instanceof SystemRequestDetails) && nonPartitionableResource) { + return RequestPartitionId.fromPartitionId(myPartitionSettings.getDefaultPartitionId()); + } else if (hasHooks(Pointcut.STORAGE_PARTITION_IDENTIFY_ANY, myInterceptorBroadcaster, requestDetails)) { // Interceptor call: STORAGE_PARTITION_IDENTIFY_ANY HookParams params = new HookParams() - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest); - requestPartitionId = (RequestPartitionId) doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PARTITION_IDENTIFY_ANY, params); - } else if (hasHooks(Pointcut.STORAGE_PARTITION_IDENTIFY_READ, myInterceptorBroadcaster, theRequest)) { + .add(RequestDetails.class, requestDetails) + .addIfMatchesType(ServletRequestDetails.class, requestDetails); + requestPartitionId = (RequestPartitionId) doCallHooksAndReturnObject(myInterceptorBroadcaster, requestDetails, Pointcut.STORAGE_PARTITION_IDENTIFY_ANY, params); + } else if (hasHooks(Pointcut.STORAGE_PARTITION_IDENTIFY_READ, myInterceptorBroadcaster, requestDetails)) { // Interceptor call: STORAGE_PARTITION_IDENTIFY_READ HookParams params = new HookParams() - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest) + .add(RequestDetails.class, requestDetails) + .addIfMatchesType(ServletRequestDetails.class, requestDetails) .add(ReadPartitionIdRequestDetails.class, theDetails); - requestPartitionId = (RequestPartitionId) doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PARTITION_IDENTIFY_READ, params); + requestPartitionId = (RequestPartitionId) doCallHooksAndReturnObject(myInterceptorBroadcaster, requestDetails, Pointcut.STORAGE_PARTITION_IDENTIFY_READ, params); } else { requestPartitionId = null; } validateRequestPartitionNotNull(requestPartitionId, Pointcut.STORAGE_PARTITION_IDENTIFY_READ); - return validateNormalizeAndNotifyHooksForRead(requestPartitionId, theRequest, theResourceType); + return validateNormalizeAndNotifyHooksForRead(requestPartitionId, requestDetails, resourceType); } return RequestPartitionId.allPartitions(); @@ -242,16 +245,6 @@ public abstract class BaseRequestPartitionHelperSvc implements IRequestPartition return theRequest.getRequestPartitionId() != null || theRequest.getTenantId() != null; } - @Nonnull - @Override - public PartitionablePartitionId toStoragePartition(@Nonnull RequestPartitionId theRequestPartitionId) { - Integer partitionId = theRequestPartitionId.getFirstPartitionIdOrNull(); - if (partitionId == null) { - partitionId = myPartitionSettings.getDefaultPartitionId(); - } - return new PartitionablePartitionId(partitionId, theRequestPartitionId.getPartitionDate()); - } - @Nonnull @Override public Set toReadPartitions(@Nonnull RequestPartitionId theRequestPartitionId) { diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/BaseCaptureQueriesListener.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/BaseCaptureQueriesListener.java similarity index 93% rename from hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/BaseCaptureQueriesListener.java rename to hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/BaseCaptureQueriesListener.java index 8798db74605..883a4c92824 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/BaseCaptureQueriesListener.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/BaseCaptureQueriesListener.java @@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.util; /*- * #%L - * hapi-fhir-jpa + * HAPI FHIR Storage api * %% * Copyright (C) 2014 - 2023 Smile CDR, Inc. * %% @@ -20,6 +20,8 @@ package ca.uhn.fhir.jpa.util; * #L% */ +import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; import net.ttddyy.dsproxy.ExecutionInfo; import net.ttddyy.dsproxy.QueryInfo; import net.ttddyy.dsproxy.listener.MethodExecutionContext; @@ -60,6 +62,8 @@ public abstract class BaseCaptureQueriesListener implements ProxyDataSourceBuild return; } + RequestPartitionId requestPartitionId = HapiTransactionService.getRequestPartitionAssociatedWithThread(); + /* * Note on how this works: * @@ -98,7 +102,7 @@ public abstract class BaseCaptureQueriesListener implements ProxyDataSourceBuild long elapsedTime = theExecutionInfo.getElapsedTime(); long startTime = System.currentTimeMillis() - elapsedTime; - SqlQuery sqlQuery = new SqlQuery(sql, params, startTime, elapsedTime, stackTraceElements, size); + SqlQuery sqlQuery = new SqlQuery(sql, params, startTime, elapsedTime, stackTraceElements, size, requestPartitionId); queryList.add(sqlQuery); } } diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/CircularQueueCaptureQueriesListener.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/CircularQueueCaptureQueriesListener.java similarity index 98% rename from hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/CircularQueueCaptureQueriesListener.java rename to hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/CircularQueueCaptureQueriesListener.java index b41dc31d3fe..1bdfb2cab27 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/CircularQueueCaptureQueriesListener.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/CircularQueueCaptureQueriesListener.java @@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.util; /*- * #%L - * hapi-fhir-jpa + * HAPI FHIR Storage api * %% * Copyright (C) 2014 - 2023 Smile CDR, Inc. * %% @@ -426,6 +426,10 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe StringBuilder b = new StringBuilder(); b.append("SqlQuery at "); b.append(new InstantType(new Date(theQuery.getQueryTimestamp())).getValueAsString()); + if (theQuery.getRequestPartitionId() != null && theQuery.getRequestPartitionId().hasPartitionIds()) { + b.append(" on partition "); + b.append(theQuery.getRequestPartitionId().getPartitionIds()); + } b.append(" took ").append(StopWatch.formatMillis(theQuery.getElapsedTime())); b.append(" on Thread: ").append(theQuery.getThreadName()); if (theQuery.getSize() > 1) { diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/CurrentThreadCaptureQueriesListener.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/CurrentThreadCaptureQueriesListener.java similarity index 99% rename from hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/CurrentThreadCaptureQueriesListener.java rename to hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/CurrentThreadCaptureQueriesListener.java index 9bee7dd8d49..ba32d6643ea 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/CurrentThreadCaptureQueriesListener.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/CurrentThreadCaptureQueriesListener.java @@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.util; /*- * #%L - * hapi-fhir-jpa + * HAPI FHIR Storage api * %% * Copyright (C) 2014 - 2023 Smile CDR, Inc. * %% diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/SqlQuery.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/SqlQuery.java similarity index 88% rename from hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/SqlQuery.java rename to hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/SqlQuery.java index 88886196698..334bd9b5e21 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/SqlQuery.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/SqlQuery.java @@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.util; /*- * #%L - * hapi-fhir-jpa + * HAPI FHIR Storage api * %% * Copyright (C) 2014 - 2023 Smile CDR, Inc. * %% @@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.util; * #L% */ +import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.util.UrlUtil; import org.apache.commons.lang3.Validate; import org.hibernate.engine.jdbc.internal.BasicFormatterImpl; @@ -40,12 +41,13 @@ public class SqlQuery { private final int mySize; private final LanguageEnum myLanguage; private final String myNamespace; + private final RequestPartitionId myRequestPartitionId; - public SqlQuery(String theSql, List theParams, long theQueryTimestamp, long theElapsedTime, StackTraceElement[] theStackTraceElements, int theSize) { - this(null, theSql, theParams, theQueryTimestamp, theElapsedTime, theStackTraceElements, theSize, LanguageEnum.SQL); + public SqlQuery(String theSql, List theParams, long theQueryTimestamp, long theElapsedTime, StackTraceElement[] theStackTraceElements, int theSize, RequestPartitionId theRequestPartitionId) { + this(null, theSql, theParams, theQueryTimestamp, theElapsedTime, theStackTraceElements, theSize, LanguageEnum.SQL, theRequestPartitionId); } - public SqlQuery(String theNamespace, String theSql, List theParams, long theQueryTimestamp, long theElapsedTime, StackTraceElement[] theStackTraceElements, int theSize, LanguageEnum theLanguage) { + public SqlQuery(String theNamespace, String theSql, List theParams, long theQueryTimestamp, long theElapsedTime, StackTraceElement[] theStackTraceElements, int theSize, LanguageEnum theLanguage, RequestPartitionId theRequestPartitionId) { Validate.notNull(theLanguage, "theLanguage must not be null"); myNamespace = theNamespace; @@ -56,6 +58,11 @@ public class SqlQuery { myStackTrace = theStackTraceElements; mySize = theSize; myLanguage = theLanguage; + myRequestPartitionId = theRequestPartitionId; + } + + public RequestPartitionId getRequestPartitionId() { + return myRequestPartitionId; } public String getNamespace() { diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/SqlQueryList.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/SqlQueryList.java similarity index 97% rename from hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/SqlQueryList.java rename to hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/SqlQueryList.java index 7ba3d0df24f..52740598316 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/SqlQueryList.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/SqlQueryList.java @@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.util; /*- * #%L - * hapi-fhir-jpa + * HAPI FHIR Storage api * %% * Copyright (C) 2014 - 2023 Smile CDR, Inc. * %% diff --git a/hapi-fhir-structures-dstu2.1/pom.xml b/hapi-fhir-structures-dstu2.1/pom.xml index 8d574d26289..70d8a137023 100644 --- a/hapi-fhir-structures-dstu2.1/pom.xml +++ b/hapi-fhir-structures-dstu2.1/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu2/pom.xml b/hapi-fhir-structures-dstu2/pom.xml index 95412544583..b14c13f840f 100644 --- a/hapi-fhir-structures-dstu2/pom.xml +++ b/hapi-fhir-structures-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu3/pom.xml b/hapi-fhir-structures-dstu3/pom.xml index 9b4521c92b1..0475d912a4d 100644 --- a/hapi-fhir-structures-dstu3/pom.xml +++ b/hapi-fhir-structures-dstu3/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/rest/server/UpdateDstu3Test.java b/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/rest/server/UpdateDstu3Test.java index 9ef90931323..2407952b76c 100644 --- a/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/rest/server/UpdateDstu3Test.java +++ b/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/rest/server/UpdateDstu3Test.java @@ -8,7 +8,8 @@ import ca.uhn.fhir.rest.annotation.ResourceParam; import ca.uhn.fhir.rest.annotation.Update; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.MethodOutcome; -import ca.uhn.fhir.test.utilities.JettyUtil; +import ca.uhn.fhir.test.utilities.HttpClientExtension; +import ca.uhn.fhir.test.utilities.server.RestfulServerExtension; import ca.uhn.fhir.util.TestUtil; import org.apache.commons.io.IOUtils; import org.apache.http.HttpResponse; @@ -16,23 +17,16 @@ import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpPut; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClientBuilder; -import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; -import org.eclipse.jetty.server.Server; -import org.eclipse.jetty.servlet.ServletHandler; -import org.eclipse.jetty.servlet.ServletHolder; import org.hl7.fhir.dstu3.model.IdType; import org.hl7.fhir.dstu3.model.InstantType; import org.hl7.fhir.dstu3.model.OperationOutcome; import org.hl7.fhir.dstu3.model.Patient; import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; import java.nio.charset.StandardCharsets; -import java.util.concurrent.TimeUnit; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; @@ -43,13 +37,15 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; public class UpdateDstu3Test { - private static CloseableHttpClient ourClient; - private static String ourConditionalUrl; - private static FhirContext ourCtx = FhirContext.forDstu3(); - private static IdType ourId; private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(UpdateDstu3Test.class); - private static int ourPort; - private static Server ourServer; + @RegisterExtension + private static HttpClientExtension ourClient = new HttpClientExtension(); + private static String ourConditionalUrl; + private static final FhirContext ourCtx = FhirContext.forDstu3Cached(); + @RegisterExtension + private static RestfulServerExtension ourServer = new RestfulServerExtension(ourCtx) + .registerProvider(new PatientProvider()); + private static IdType ourId; private static InstantType ourSetLastUpdated; @BeforeEach @@ -67,12 +63,12 @@ public class UpdateDstu3Test { patient.addIdentifier().setValue("002"); ourSetLastUpdated = new InstantType("2002-04-22T11:22:33.022Z"); - HttpPut httpPost = new HttpPut("http://localhost:" + ourPort + "/Patient/123"); + HttpPut httpPost = new HttpPut(ourServer.getBaseUrl() + "/Patient/123"); httpPost.setEntity(new StringEntity(ourCtx.newXmlParser().encodeResourceToString(patient), ContentType.create(Constants.CT_FHIR_XML, "UTF-8"))); HttpResponse status = ourClient.execute(httpPost); - String responseContent = IOUtils.toString(status.getEntity().getContent()); + String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8); IOUtils.closeQuietly(status.getEntity().getContent()); ourLog.info("Response was:\n{}", responseContent); @@ -85,8 +81,8 @@ public class UpdateDstu3Test { assertEquals(patient.getIdentifier().get(0).getValue(), actualPatient.getIdentifier().get(0).getValue()); assertEquals(200, status.getStatusLine().getStatusCode()); - assertEquals(null, status.getFirstHeader("location")); - assertEquals("http://localhost:" + ourPort + "/Patient/123/_history/002", status.getFirstHeader("content-location").getValue()); + assertNull(status.getFirstHeader("location")); + assertEquals(ourServer.getBaseUrl() + "/Patient/123/_history/002", status.getFirstHeader("content-location").getValue()); assertEquals("W/\"002\"", status.getFirstHeader(Constants.HEADER_ETAG_LC).getValue()); assertEquals("Mon, 22 Apr 2002 11:22:33 GMT", status.getFirstHeader(Constants.HEADER_LAST_MODIFIED_LOWERCASE).getValue()); @@ -99,7 +95,7 @@ public class UpdateDstu3Test { patient.setId("001"); patient.addIdentifier().setValue("002"); - HttpPut httpPost = new HttpPut("http://localhost:" + ourPort + "/Patient?_id=001"); + HttpPut httpPost = new HttpPut(ourServer.getBaseUrl() + "/Patient?_id=001"); httpPost.setEntity(new StringEntity(ourCtx.newXmlParser().encodeResourceToString(patient), ContentType.create(Constants.CT_FHIR_XML, "UTF-8"))); CloseableHttpResponse status = ourClient.execute(httpPost); @@ -108,8 +104,8 @@ public class UpdateDstu3Test { ourLog.info("Response was:\n{}", responseContent); assertEquals(200, status.getStatusLine().getStatusCode()); - assertEquals("Patient?_id=001",ourConditionalUrl); - assertEquals(null, ourId); + assertEquals("Patient?_id=001", ourConditionalUrl); + assertNull(ourId); } finally { IOUtils.closeQuietly(status.getEntity().getContent()); } @@ -122,7 +118,7 @@ public class UpdateDstu3Test { Patient patient = new Patient(); patient.addIdentifier().setValue("002"); - HttpPut httpPost = new HttpPut("http://localhost:" + ourPort + "/Patient/001"); + HttpPut httpPost = new HttpPut(ourServer.getBaseUrl() + "/Patient/001"); httpPost.setEntity(new StringEntity(ourCtx.newXmlParser().encodeResourceToString(patient), ContentType.create(Constants.CT_FHIR_XML, "UTF-8"))); HttpResponse status = ourClient.execute(httpPost); @@ -133,7 +129,7 @@ public class UpdateDstu3Test { ourLog.info("Response was:\n{}", responseContent); assertEquals(400, status.getStatusLine().getStatusCode()); - + OperationOutcome oo = ourCtx.newXmlParser().parseResource(OperationOutcome.class, responseContent); assertEquals(Msg.code(419) + "Can not update resource, resource body must contain an ID element for update (PUT) operation", oo.getIssue().get(0).getDiagnostics()); } @@ -145,7 +141,7 @@ public class UpdateDstu3Test { patient.setId("001"); patient.addIdentifier().setValue("002"); - HttpPut httpPost = new HttpPut("http://localhost:" + ourPort + "/Patient/001"); + HttpPut httpPost = new HttpPut(ourServer.getBaseUrl() + "/Patient/001"); httpPost.setEntity(new StringEntity(ourCtx.newXmlParser().encodeResourceToString(patient), ContentType.create(Constants.CT_FHIR_XML, "UTF-8"))); CloseableHttpResponse status = ourClient.execute(httpPost); @@ -169,7 +165,7 @@ public class UpdateDstu3Test { patient.setId("Patient/3/_history/4"); patient.addIdentifier().setValue("002"); - HttpPut httpPost = new HttpPut("http://localhost:" + ourPort + "/Patient/1/_history/2"); + HttpPut httpPost = new HttpPut(ourServer.getBaseUrl() + "/Patient/1/_history/2"); httpPost.setEntity(new StringEntity(ourCtx.newXmlParser().encodeResourceToString(patient), ContentType.create(Constants.CT_FHIR_XML, "UTF-8"))); HttpResponse status = ourClient.execute(httpPost); @@ -183,32 +179,6 @@ public class UpdateDstu3Test { assertThat(responseContent, containsString("Resource body ID of "3" does not match")); } - @AfterAll - public static void afterClassClearContext() throws Exception { - JettyUtil.closeServer(ourServer); - TestUtil.randomizeLocaleAndTimezone(); - } - - @BeforeAll - public static void beforeClass() throws Exception { - ourServer = new Server(0); - - ServletHandler proxyHandler = new ServletHandler(); - RestfulServer servlet = new RestfulServer(ourCtx); - servlet.setResourceProviders(new PatientProvider()); - ServletHolder servletHolder = new ServletHolder(servlet); - proxyHandler.addServletWithMapping(servletHolder, "/*"); - ourServer.setHandler(proxyHandler); - JettyUtil.startServer(ourServer); - ourPort = JettyUtil.getPortForStartedServer(ourServer); - - PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS); - HttpClientBuilder builder = HttpClientBuilder.create(); - builder.setConnectionManager(connectionManager); - ourClient = builder.build(); - - } - public static class PatientProvider implements IResourceProvider { @Override @@ -235,4 +205,9 @@ public class UpdateDstu3Test { } } + + @AfterAll + public static void afterClassClearContext() throws Exception { + TestUtil.randomizeLocaleAndTimezone(); + } } diff --git a/hapi-fhir-structures-hl7org-dstu2/pom.xml b/hapi-fhir-structures-hl7org-dstu2/pom.xml index c24f230bc71..592e64c5259 100644 --- a/hapi-fhir-structures-hl7org-dstu2/pom.xml +++ b/hapi-fhir-structures-hl7org-dstu2/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4/pom.xml b/hapi-fhir-structures-r4/pom.xml index 2617ccd5ac4..fb8b0898ce9 100644 --- a/hapi-fhir-structures-r4/pom.xml +++ b/hapi-fhir-structures-r4/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4b/pom.xml b/hapi-fhir-structures-r4b/pom.xml index 4feb10e4b7a..4d84c17c1a6 100644 --- a/hapi-fhir-structures-r4b/pom.xml +++ b/hapi-fhir-structures-r4b/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r5/pom.xml b/hapi-fhir-structures-r5/pom.xml index cc0465c3c0b..c377d9802b6 100644 --- a/hapi-fhir-structures-r5/pom.xml +++ b/hapi-fhir-structures-r5/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-test-utilities/pom.xml b/hapi-fhir-test-utilities/pom.xml index ab366500783..f9343dc2101 100644 --- a/hapi-fhir-test-utilities/pom.xml +++ b/hapi-fhir-test-utilities/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-testpage-overlay/pom.xml b/hapi-fhir-testpage-overlay/pom.xml index 6cd2289821e..b99445adca4 100644 --- a/hapi-fhir-testpage-overlay/pom.xml +++ b/hapi-fhir-testpage-overlay/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-validation-resources-dstu2.1/pom.xml b/hapi-fhir-validation-resources-dstu2.1/pom.xml index 742e11b93c1..ffb6dbaee48 100644 --- a/hapi-fhir-validation-resources-dstu2.1/pom.xml +++ b/hapi-fhir-validation-resources-dstu2.1/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu2/pom.xml b/hapi-fhir-validation-resources-dstu2/pom.xml index 4b1d759f233..a31ded4700a 100644 --- a/hapi-fhir-validation-resources-dstu2/pom.xml +++ b/hapi-fhir-validation-resources-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu3/pom.xml b/hapi-fhir-validation-resources-dstu3/pom.xml index 680b4a9699b..ec2ec11cc94 100644 --- a/hapi-fhir-validation-resources-dstu3/pom.xml +++ b/hapi-fhir-validation-resources-dstu3/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r4/pom.xml b/hapi-fhir-validation-resources-r4/pom.xml index cde9cb17ea6..3f799f75831 100644 --- a/hapi-fhir-validation-resources-r4/pom.xml +++ b/hapi-fhir-validation-resources-r4/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r5/pom.xml b/hapi-fhir-validation-resources-r5/pom.xml index 91dc24b6f59..16a6d8a2dc9 100644 --- a/hapi-fhir-validation-resources-r5/pom.xml +++ b/hapi-fhir-validation-resources-r5/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation/pom.xml b/hapi-fhir-validation/pom.xml index 4debf4152bf..bb04eb0f8a3 100644 --- a/hapi-fhir-validation/pom.xml +++ b/hapi-fhir-validation/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-tinder-plugin/pom.xml b/hapi-tinder-plugin/pom.xml index 9b0546f0981..ed420cfd0db 100644 --- a/hapi-tinder-plugin/pom.xml +++ b/hapi-tinder-plugin/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../pom.xml diff --git a/hapi-tinder-test/pom.xml b/hapi-tinder-test/pom.xml index b5339e92174..6e82bc66e3c 100644 --- a/hapi-tinder-test/pom.xml +++ b/hapi-tinder-test/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../pom.xml diff --git a/pom.xml b/pom.xml index 8f0f5992686..f9983b16ace 100644 --- a/pom.xml +++ b/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-fhir pom - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT HAPI-FHIR An open-source implementation of the FHIR specification in Java. https://hapifhir.io @@ -1329,9 +1329,9 @@ ${swagger_version} - mysql - mysql-connector-java - 8.0.31 + com.mysql + mysql-connector-j + 8.0.32 org.springdoc diff --git a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml index 8e5b3359bbd..efea098ce3d 100644 --- a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml +++ b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-client/pom.xml b/tests/hapi-fhir-base-test-mindeps-client/pom.xml index 2ffa6a33301..e0247b85bf1 100644 --- a/tests/hapi-fhir-base-test-mindeps-client/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-server/pom.xml b/tests/hapi-fhir-base-test-mindeps-server/pom.xml index 4a56e107df3..30385867bff 100644 --- a/tests/hapi-fhir-base-test-mindeps-server/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 6.5.2-SNAPSHOT + 6.5.3-SNAPSHOT ../../pom.xml