From f5dfc3a5ed05329b7d7c0cd89227204705cca277 Mon Sep 17 00:00:00 2001 From: Ken Stevens Date: Thu, 29 Jul 2021 19:18:49 -0400 Subject: [PATCH] New reindex spring batch operation (#2845) * first sweep to add new $reindex job * first cut done. next up integration test. * start roughing out reindex everything * Failing test * test passes * add BundleUtil helper method * rough in processEverything * rough in processEverything * fix mock test * merge master * Make BaseJpaTest.mySrd lenient. * fix test * reindex everything works with FIXMEs * reindex everything more tests * moar tests * moar tests * fix test * fix test * fix regression and fix test * fix test cycle * fixme * fix test * test provider layer * reindex everything partition aware * yay last fixme! * run reindex in partitioned threads * add fixmes for areas that require extra testing * added transaction and log message * manual integration testing * changelog * reindex all partitions * bump hapi version * fix test * moar test * pre-review cleanup * fix javadoc * review feedback * review feedback * review feedback * Update hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_5_0/2845-reindex-operation.yaml Co-authored-by: michaelabuckley * review feedback * review feedback * Update hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/delete/job/ReindexJobTest.java Co-authored-by: michaelabuckley * Update hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/delete/job/ReindexJobTest.java Co-authored-by: michaelabuckley * Update hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/MultitenantBatchOperationR4Test.java Co-authored-by: michaelabuckley * Update hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/MultitenantBatchOperationR4Test.java Co-authored-by: michaelabuckley * Update hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/ResourceSearch.java Co-authored-by: michaelabuckley * Update hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/IDeleteExpungeJobSubmitter.java Co-authored-by: michaelabuckley Co-authored-by: michaelabuckley --- hapi-deployable-pom/pom.xml | 2 +- hapi-fhir-android/pom.xml | 2 +- hapi-fhir-base/pom.xml | 2 +- .../model/ReadPartitionIdRequestDetails.java | 10 +- .../interceptor/model/RequestPartitionId.java | 5 + .../java/ca/uhn/fhir/util/BundleUtil.java | 9 + .../main/java/ca/uhn/fhir/util/JsonUtil.java | 10 + hapi-fhir-bom/pom.xml | 14 +- hapi-fhir-cli/hapi-fhir-cli-api/pom.xml | 2 +- hapi-fhir-cli/hapi-fhir-cli-app/pom.xml | 2 +- hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml | 2 +- hapi-fhir-cli/pom.xml | 2 +- hapi-fhir-client-okhttp/pom.xml | 2 +- hapi-fhir-client/pom.xml | 2 +- hapi-fhir-converter/pom.xml | 2 +- hapi-fhir-dist/pom.xml | 2 +- hapi-fhir-docs/pom.xml | 2 +- .../5_5_0/2845-reindex-operation.yaml | 5 + hapi-fhir-jacoco/pom.xml | 2 +- hapi-fhir-jaxrsserver-base/pom.xml | 2 +- hapi-fhir-jaxrsserver-example/pom.xml | 2 +- hapi-fhir-jpaserver-api/pom.xml | 2 +- .../ca/uhn/fhir/jpa/api/config/DaoConfig.java | 42 +- hapi-fhir-jpaserver-base/pom.xml | 3 +- .../uhn/fhir/jpa/batch/BatchJobsConfig.java | 17 +- .../job/MultiUrlJobParameterValidator.java} | 16 +- .../batch/job/MultiUrlProcessorJobConfig.java | 57 + .../batch/job/PartitionedUrlValidator.java | 72 + .../job}/model/PartitionedUrl.java | 2 +- .../job}/model/RequestListJson.java | 16 +- .../listener/PidReaderCounterListener.java | 6 +- .../reader/BatchDateThresholdUpdater.java | 96 + .../batch/reader/BatchResourceSearcher.java | 65 + ...CronologicalBatchAllResourcePidReader.java | 163 ++ ...rseCronologicalBatchResourcePidReader.java | 96 +- .../jpa/batch/writer/SqlExecutorWriter.java | 2 + .../bulk/export/job/ResourceToFileWriter.java | 4 +- .../ca/uhn/fhir/jpa/config/BaseConfig.java | 42 +- .../fhir/jpa/dao/BaseHapiFhirResourceDao.java | 4 +- .../BaseHapiFhirResourceDaoObservation.java | 7 +- .../jpa/dao/FhirResourceDaoPatientDstu2.java | 2 +- .../jpa/dao/data/IResourceReindexJobDao.java | 2 +- .../fhir/jpa/dao/data/IResourceTableDao.java | 4 + .../FhirResourceDaoObservationDstu3.java | 2 +- .../jpa/dao/expunge/DeleteExpungeService.java | 9 +- .../jpa/dao/expunge/ExpungeOperation.java | 18 +- .../fhir/jpa/dao/expunge/PartitionRunner.java | 42 +- .../jpa/dao/r4/FhirResourceDaoPatientR4.java | 2 +- .../jpa/dao/r5/FhirResourceDaoPatientR5.java | 2 +- .../delete/DeleteExpungeJobSubmitterImpl.java | 32 +- .../delete/job/DeleteExpungeJobConfig.java | 68 +- .../delete/job/DeleteExpungeProcessor.java | 8 +- .../jpa/entity/ResourceReindexJobEntity.java | 11 +- .../jpa/packages/PackageInstallerSvcImpl.java | 10 +- .../partition/IRequestPartitionHelperSvc.java | 1 - .../jpa/provider/BaseJpaSystemProvider.java | 10 + .../BaseJpaSystemProviderDstu2Plus.java | 12 +- .../jpa/reindex/ReindexJobSubmitterImpl.java | 107 + .../job/ReindexEverythingJobConfig.java | 98 + .../jpa/reindex/job/ReindexJobConfig.java | 89 + .../fhir/jpa/reindex/job/ReindexWriter.java | 67 + .../jpa/search/builder/SearchBuilder.java | 8 +- .../jpa/search/reindex/ResourceReindexer.java | 102 + .../reindex/ResourceReindexingSvcImpl.java | 158 +- .../batch/job/MultiUrlJobParameterUtil.java | 29 + .../reader/BatchDateThresholdUpdaterTest.java | 125 ++ ...ronologicalBatchResourcePidReaderTest.java | 48 +- .../java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java | 7 +- .../jpa/dao/expunge/PartitionRunnerTest.java | 61 +- .../FhirResourceDaoR4SearchOptimizedTest.java | 2 +- .../job/DeleteExpungeJobParameterUtil.java | 23 - .../jpa/delete/job/DeleteExpungeJobTest.java | 3 +- ...=> MultiUrlJobParameterValidatorTest.java} | 24 +- .../fhir/jpa/delete/job/ReindexJobTest.java | 121 + .../jpa/delete/job/ReindexTestHelper.java | 105 + .../PatientIdPartitionInterceptorTest.java | 8 +- .../r4/BaseResourceProviderR4Test.java | 5 +- .../r4/MultitenantBatchOperationR4Test.java | 230 ++ .../r4/MultitenantDeleteExpungeR4Test.java | 134 -- .../ResourceReindexingSvcImplTest.java | 45 +- .../uhn/fhir/jpa/util/MultimapCollector.java | 71 +- .../src/test/resources/r4/load_bundle.json | 1968 ++++++++--------- hapi-fhir-jpaserver-batch/pom.xml | 2 +- hapi-fhir-jpaserver-cql/pom.xml | 2 +- hapi-fhir-jpaserver-mdm/pom.xml | 2 +- hapi-fhir-jpaserver-migrate/pom.xml | 2 +- hapi-fhir-jpaserver-model/pom.xml | 2 +- hapi-fhir-jpaserver-searchparam/pom.xml | 2 +- .../fhir/jpa/searchparam/MatchUrlService.java | 9 +- .../fhir/jpa/searchparam/ResourceSearch.java | 16 +- hapi-fhir-jpaserver-subscription/pom.xml | 2 +- hapi-fhir-jpaserver-test-utilities/pom.xml | 2 +- hapi-fhir-jpaserver-uhnfhirtest/pom.xml | 2 +- hapi-fhir-server-mdm/pom.xml | 2 +- hapi-fhir-server-openapi/pom.xml | 2 +- hapi-fhir-server/pom.xml | 2 +- .../fhir/rest/api/server/IBundleProvider.java | 9 +- .../storage/IDeleteExpungeJobSubmitter.java | 17 +- .../server/storage/IMultiUrlJobSubmitter.java | 37 + .../server/storage/IReindexJobSubmitter.java | 30 + .../provider/BaseMultiUrlProcessor.java | 65 + .../provider/DeleteExpungeProvider.java | 28 +- .../server/provider/ProviderConstants.java | 25 + .../rest/server/provider/ReindexProvider.java | 79 + .../pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../hapi-fhir-spring-boot-samples/pom.xml | 2 +- .../hapi-fhir-spring-boot-starter/pom.xml | 2 +- hapi-fhir-spring-boot/pom.xml | 2 +- hapi-fhir-structures-dstu2.1/pom.xml | 2 +- hapi-fhir-structures-dstu2/pom.xml | 2 +- hapi-fhir-structures-dstu3/pom.xml | 2 +- hapi-fhir-structures-hl7org-dstu2/pom.xml | 2 +- hapi-fhir-structures-r4/pom.xml | 2 +- .../server/provider/BatchProviderTest.java | 201 ++ .../provider/DeleteExpungeProviderTest.java | 87 - hapi-fhir-structures-r5/pom.xml | 2 +- hapi-fhir-test-utilities/pom.xml | 2 +- hapi-fhir-testpage-overlay/pom.xml | 2 +- .../pom.xml | 2 +- hapi-fhir-validation-resources-dstu2/pom.xml | 2 +- hapi-fhir-validation-resources-dstu3/pom.xml | 2 +- hapi-fhir-validation-resources-r4/pom.xml | 2 +- hapi-fhir-validation-resources-r5/pom.xml | 2 +- hapi-fhir-validation/pom.xml | 2 +- hapi-tinder-plugin/pom.xml | 16 +- hapi-tinder-test/pom.xml | 2 +- pom.xml | 2 +- restful-server-example/pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- .../pom.xml | 2 +- 134 files changed, 3580 insertions(+), 1785 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_5_0/2845-reindex-operation.yaml rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/{delete/job/DeleteExpungeJobParameterValidator.java => batch/job/MultiUrlJobParameterValidator.java} (79%) create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/job/MultiUrlProcessorJobConfig.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/job/PartitionedUrlValidator.java rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/{delete => batch/job}/model/PartitionedUrl.java (97%) rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/{delete => batch/job}/model/RequestListJson.java (88%) create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/reader/BatchDateThresholdUpdater.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/reader/BatchResourceSearcher.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/reader/CronologicalBatchAllResourcePidReader.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/reindex/ReindexJobSubmitterImpl.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/reindex/job/ReindexEverythingJobConfig.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/reindex/job/ReindexJobConfig.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/reindex/job/ReindexWriter.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexer.java create mode 100644 hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/batch/job/MultiUrlJobParameterUtil.java create mode 100644 hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/batch/reader/BatchDateThresholdUpdaterTest.java delete mode 100644 hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeJobParameterUtil.java rename hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/delete/job/{DeleteExpungeJobParameterValidatorTest.java => MultiUrlJobParameterValidatorTest.java} (66%) create mode 100644 hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/delete/job/ReindexJobTest.java create mode 100644 hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/delete/job/ReindexTestHelper.java create mode 100644 hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/MultitenantBatchOperationR4Test.java delete mode 100644 hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/MultitenantDeleteExpungeR4Test.java create mode 100644 hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/IMultiUrlJobSubmitter.java create mode 100644 hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/IReindexJobSubmitter.java create mode 100644 hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/BaseMultiUrlProcessor.java create mode 100644 hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ReindexProvider.java create mode 100644 hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/provider/BatchProviderTest.java delete mode 100644 hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/provider/DeleteExpungeProviderTest.java diff --git a/hapi-deployable-pom/pom.xml b/hapi-deployable-pom/pom.xml index b9e16473e1a..e89630c261a 100644 --- a/hapi-deployable-pom/pom.xml +++ b/hapi-deployable-pom/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-android/pom.xml b/hapi-fhir-android/pom.xml index e99361c3363..bc0cfa6f187 100644 --- a/hapi-fhir-android/pom.xml +++ b/hapi-fhir-android/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-base/pom.xml b/hapi-fhir-base/pom.xml index 37a04d3b776..b440419860b 100644 --- a/hapi-fhir-base/pom.xml +++ b/hapi-fhir-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/ReadPartitionIdRequestDetails.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/ReadPartitionIdRequestDetails.java index 5c629ee4288..aca47dac767 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/ReadPartitionIdRequestDetails.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/ReadPartitionIdRequestDetails.java @@ -42,8 +42,9 @@ public class ReadPartitionIdRequestDetails { myConditionalTargetOrNull = theConditionalTargetOrNull; } - public IBaseResource getConditionalTargetOrNull() { - return myConditionalTargetOrNull; + public static ReadPartitionIdRequestDetails forRead(String theResourceType, IIdType theId, boolean theIsVread) { + RestOperationTypeEnum op = theIsVread ? RestOperationTypeEnum.VREAD : RestOperationTypeEnum.READ; + return new ReadPartitionIdRequestDetails(theResourceType, op, theId.withResourceType(theResourceType), null, null); } public String getResourceType() { @@ -62,9 +63,8 @@ public class ReadPartitionIdRequestDetails { return mySearchParams; } - public static ReadPartitionIdRequestDetails forRead(String theResourceType, IIdType theId, boolean theIsVread) { - RestOperationTypeEnum op = theIsVread ? RestOperationTypeEnum.VREAD : RestOperationTypeEnum.READ; - return new ReadPartitionIdRequestDetails(theResourceType, op, theId.withResourceType(theResourceType), null, null); + public IBaseResource getConditionalTargetOrNull() { + return myConditionalTargetOrNull; } public static ReadPartitionIdRequestDetails forSearchType(String theResourceType, Object theParams, IBaseResource theConditionalOperationTargetOrNull) { diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/RequestPartitionId.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/RequestPartitionId.java index d161be864f1..35b89939a7d 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/RequestPartitionId.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/RequestPartitionId.java @@ -21,6 +21,7 @@ package ca.uhn.fhir.interceptor.model; */ import ca.uhn.fhir.model.api.IModelJson; +import ca.uhn.fhir.util.JsonUtil; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; @@ -155,6 +156,10 @@ public class RequestPartitionId implements IModelJson { .toHashCode(); } + public String toJson() { + return JsonUtil.serializeOrInvalidRequest(this); + } + @Nullable public Integer getFirstPartitionIdOrNull() { if (myPartitionIds != null) { diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleUtil.java index f53af2cd4ca..e9c47f48490 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleUtil.java @@ -449,6 +449,15 @@ public class BundleUtil { return toListOfResourcesOfType(theContext, theBundle, IBaseResource.class); } + /** + * Extract all of ids of all the resources from a given bundle + */ + public static List toListOfResourceIds(FhirContext theContext, IBaseBundle theBundle) { + return toListOfResourcesOfType(theContext, theBundle, IBaseResource.class).stream() + .map(resource -> resource.getIdElement().getIdPart()) + .collect(Collectors.toList()); + } + /** * Extract all of the resources of a given type from a given bundle */ diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/JsonUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/JsonUtil.java index 72545a3ea2c..9c967289761 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/JsonUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/JsonUtil.java @@ -20,7 +20,10 @@ package ca.uhn.fhir.util; * #L% */ +import ca.uhn.fhir.model.api.IModelJson; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; @@ -82,4 +85,11 @@ public class JsonUtil { theWriter.append(serialize(theInput)); } + public static String serializeOrInvalidRequest(IModelJson theJson) { + try { + return ourMapperNonPrettyPrint.writeValueAsString(theJson); + } catch (JsonProcessingException e) { + throw new InvalidRequestException("Failed to encode " + theJson.getClass(), e); + } + } } diff --git a/hapi-fhir-bom/pom.xml b/hapi-fhir-bom/pom.xml index 5e2c982af71..e330d694ce7 100644 --- a/hapi-fhir-bom/pom.xml +++ b/hapi-fhir-bom/pom.xml @@ -3,16 +3,16 @@ 4.0.0 ca.uhn.hapi.fhir hapi-fhir-bom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT pom HAPI FHIR BOM - - ca.uhn.hapi.fhir - hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT - ../hapi-deployable-pom/pom.xml - + + ca.uhn.hapi.fhir + hapi-deployable-pom + 5.5.0-PRE8-SNAPSHOT + ../hapi-deployable-pom/pom.xml + diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml index ba080272c45..ef49d86d7f3 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml index 1a2e3701f42..faacba58cd8 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir-cli - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml index 9fe3bec0a31..d1bce28c525 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../../hapi-deployable-pom diff --git a/hapi-fhir-cli/pom.xml b/hapi-fhir-cli/pom.xml index 454a3a95bef..935047c7060 100644 --- a/hapi-fhir-cli/pom.xml +++ b/hapi-fhir-cli/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-client-okhttp/pom.xml b/hapi-fhir-client-okhttp/pom.xml index d3e4e0bccac..3a5579f6b0b 100644 --- a/hapi-fhir-client-okhttp/pom.xml +++ b/hapi-fhir-client-okhttp/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-client/pom.xml b/hapi-fhir-client/pom.xml index 075afdc3046..8598814e473 100644 --- a/hapi-fhir-client/pom.xml +++ b/hapi-fhir-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-converter/pom.xml b/hapi-fhir-converter/pom.xml index 831a0c69b78..d33e25672ee 100644 --- a/hapi-fhir-converter/pom.xml +++ b/hapi-fhir-converter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-dist/pom.xml b/hapi-fhir-dist/pom.xml index b55f14174d9..6c83bad936f 100644 --- a/hapi-fhir-dist/pom.xml +++ b/hapi-fhir-dist/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-docs/pom.xml b/hapi-fhir-docs/pom.xml index fe6c2fc7d7d..7a38121bfcb 100644 --- a/hapi-fhir-docs/pom.xml +++ b/hapi-fhir-docs/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_5_0/2845-reindex-operation.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_5_0/2845-reindex-operation.yaml new file mode 100644 index 00000000000..4dab1eb28fd --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_5_0/2845-reindex-operation.yaml @@ -0,0 +1,5 @@ +--- +type: add +issue: 2845 +title: "Added new `$reindex` operation with similar syntax to `$delete-expunge` that creates a spring-batch job to reindex selected resources. +`$mark-all-resources-for-reindexing` and `$perform-reindexing-pass` are now deprecated, and will likely be removed in a future release." diff --git a/hapi-fhir-jacoco/pom.xml b/hapi-fhir-jacoco/pom.xml index 2f66266f48e..5806383069c 100644 --- a/hapi-fhir-jacoco/pom.xml +++ b/hapi-fhir-jacoco/pom.xml @@ -11,7 +11,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jaxrsserver-base/pom.xml b/hapi-fhir-jaxrsserver-base/pom.xml index f5823a8c865..bce2629e751 100644 --- a/hapi-fhir-jaxrsserver-base/pom.xml +++ b/hapi-fhir-jaxrsserver-base/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jaxrsserver-example/pom.xml b/hapi-fhir-jaxrsserver-example/pom.xml index 627bf1c344b..4d51572f3f1 100644 --- a/hapi-fhir-jaxrsserver-example/pom.xml +++ b/hapi-fhir-jaxrsserver-example/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-jpaserver-api/pom.xml b/hapi-fhir-jpaserver-api/pom.xml index c30ed47c449..60b9ff7e0a6 100644 --- a/hapi-fhir-jpaserver-api/pom.xml +++ b/hapi-fhir-jpaserver-api/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/config/DaoConfig.java b/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/config/DaoConfig.java index 83e07a25b27..767d2f00e5c 100644 --- a/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/config/DaoConfig.java +++ b/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/config/DaoConfig.java @@ -97,7 +97,8 @@ public class DaoConfig { private static final Integer DEFAULT_MAXIMUM_SEARCH_RESULT_COUNT_IN_TRANSACTION = null; private static final Integer DEFAULT_MAXIMUM_TRANSACTION_BUNDLE_SIZE = null; private static final Logger ourLog = LoggerFactory.getLogger(DaoConfig.class); - private static final int DEFAULT_EXPUNGE_BATCH_SIZE = 800; + public static final int DEFAULT_EXPUNGE_BATCH_SIZE = 800; + private static final int DEFAULT_REINDEX_BATCH_SIZE = 800; private static final int DEFAULT_MAXIMUM_DELETE_CONFLICT_COUNT = 60; /** * Child Configurations @@ -163,6 +164,7 @@ public class DaoConfig { private boolean myExpungeEnabled; private boolean myDeleteExpungeEnabled; private int myExpungeBatchSize = DEFAULT_EXPUNGE_BATCH_SIZE; + private int myReindexBatchSize = DEFAULT_REINDEX_BATCH_SIZE; private int myReindexThreadCount; private int myExpungeThreadCount; private Set myBundleTypesAllowedForStorage; @@ -217,6 +219,12 @@ public class DaoConfig { * @since 5.2.0 */ private boolean myUseLegacySearchBuilder = false; + + /** + * @since 5.5.0 + */ + private boolean myReindexEnabled = true; + /** * update setter javadoc if default changes */ @@ -1646,6 +1654,38 @@ public class DaoConfig { myExpungeBatchSize = theExpungeBatchSize; } + /** + * The reindex batch size (default 800) determines the number of records reindexed in a single transaction. + */ + public int getReindexBatchSize() { + return myReindexBatchSize; + } + + /** + * The reindex batch size (default 800) determines the number of records reindexed in a single transaction. + */ + public void setReindexBatchSize(int theReindexBatchSize) { + myReindexBatchSize = theReindexBatchSize; + } + + + /** + * If set to false (default is true), reindexing of resources will be disabled on this + * server. + */ + public boolean isReindexEnabled() { + return myReindexEnabled; + } + + /** + * If set to false (default is true), reindexing of resources will be disabled on this + * server. + */ + + public void setReindexEnabled(boolean theReindexEnabled) { + myReindexEnabled = theReindexEnabled; + } + /** * Should resources be marked as needing reindexing when a * SearchParameter resource is added or changed. This should generally diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml index 833b54077b4..f4c5cfc8ed4 100644 --- a/hapi-fhir-jpaserver-base/pom.xml +++ b/hapi-fhir-jpaserver-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml @@ -150,7 +150,6 @@ hapi-fhir-jpaserver-batch ${project.version} - net.ttddyy datasource-proxy diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/BatchJobsConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/BatchJobsConfig.java index ba2318149a4..4314a68c8b5 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/BatchJobsConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/BatchJobsConfig.java @@ -23,6 +23,8 @@ package ca.uhn.fhir.jpa.batch; import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; import ca.uhn.fhir.jpa.bulk.imprt.job.BulkImportJobConfig; import ca.uhn.fhir.jpa.delete.job.DeleteExpungeJobConfig; +import ca.uhn.fhir.jpa.reindex.job.ReindexEverythingJobConfig; +import ca.uhn.fhir.jpa.reindex.job.ReindexJobConfig; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; @@ -36,7 +38,9 @@ import java.util.Set; CommonBatchJobConfig.class, BulkExportJobConfig.class, BulkImportJobConfig.class, - DeleteExpungeJobConfig.class + DeleteExpungeJobConfig.class, + ReindexJobConfig.class, + ReindexEverythingJobConfig.class }) public class BatchJobsConfig { @@ -79,4 +83,15 @@ public class BatchJobsConfig { * Delete Expunge */ public static final String DELETE_EXPUNGE_JOB_NAME = "deleteExpungeJob"; + + /** + * Reindex + */ + public static final String REINDEX_JOB_NAME = "reindexJob"; + + /** + * Reindex Everything + */ + public static final String REINDEX_EVERYTHING_JOB_NAME = "reindexEverythingJob"; + } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeJobParameterValidator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/job/MultiUrlJobParameterValidator.java similarity index 79% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeJobParameterValidator.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/job/MultiUrlJobParameterValidator.java index e94c44f9ca8..57b77ea52ac 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeJobParameterValidator.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/job/MultiUrlJobParameterValidator.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.delete.job; +package ca.uhn.fhir.jpa.batch.job; /*- * #%L @@ -21,11 +21,10 @@ package ca.uhn.fhir.jpa.delete.job; */ import ca.uhn.fhir.jpa.api.dao.DaoRegistry; -import ca.uhn.fhir.jpa.delete.model.PartitionedUrl; -import ca.uhn.fhir.jpa.delete.model.RequestListJson; +import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl; +import ca.uhn.fhir.jpa.batch.job.model.RequestListJson; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.ResourceSearch; -import ca.uhn.fhir.rest.server.provider.ProviderConstants; import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.JobParametersInvalidException; import org.springframework.batch.core.JobParametersValidator; @@ -35,11 +34,12 @@ import static ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidRe /** * This class will prevent a job from running any of the provided URLs are not valid on this server. */ -public class DeleteExpungeJobParameterValidator implements JobParametersValidator { +public class MultiUrlJobParameterValidator implements JobParametersValidator { + public static String JOB_PARAM_OPERATION_NAME = "operation-name"; private final MatchUrlService myMatchUrlService; private final DaoRegistry myDaoRegistry; - public DeleteExpungeJobParameterValidator(MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) { + public MultiUrlJobParameterValidator(MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) { myMatchUrlService = theMatchUrlService; myDaoRegistry = theDaoRegistry; } @@ -54,13 +54,13 @@ public class DeleteExpungeJobParameterValidator implements JobParametersValidato for (PartitionedUrl partitionedUrl : requestListJson.getPartitionedUrls()) { String url = partitionedUrl.getUrl(); try { - ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(url); + ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(url, partitionedUrl.getRequestPartitionId()); String resourceName = resourceSearch.getResourceName(); if (!myDaoRegistry.isResourceTypeSupported(resourceName)) { throw new JobParametersInvalidException("The resource type " + resourceName + " is not supported on this server."); } } catch (UnsupportedOperationException e) { - throw new JobParametersInvalidException("Failed to parse " + ProviderConstants.OPERATION_DELETE_EXPUNGE + " " + JOB_PARAM_REQUEST_LIST + " item " + url + ": " + e.getMessage()); + throw new JobParametersInvalidException("Failed to parse " + theJobParameters.getString(JOB_PARAM_OPERATION_NAME) + " " + JOB_PARAM_REQUEST_LIST + " item " + url + ": " + e.getMessage()); } } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/job/MultiUrlProcessorJobConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/job/MultiUrlProcessorJobConfig.java new file mode 100644 index 00000000000..c47f01b59d1 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/job/MultiUrlProcessorJobConfig.java @@ -0,0 +1,57 @@ +package ca.uhn.fhir.jpa.batch.job; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.api.dao.DaoRegistry; +import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener; +import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader; +import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter; +import ca.uhn.fhir.jpa.searchparam.MatchUrlService; +import org.springframework.batch.core.JobParametersValidator; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.context.annotation.Bean; + +public class MultiUrlProcessorJobConfig { + public static final int MINUTES_IN_FUTURE_TO_PROCESS_FROM = 1; + + @Bean + public JobParametersValidator multiUrlProcessorParameterValidator(MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) { + return new MultiUrlJobParameterValidator(theMatchUrlService, theDaoRegistry); + } + + @Bean + @StepScope + public SqlExecutorWriter sqlExecutorWriter() { + return new SqlExecutorWriter(); + } + + @Bean + @StepScope + public PidReaderCounterListener pidCountRecorderListener() { + return new PidReaderCounterListener(); + } + + @Bean + @StepScope + public ReverseCronologicalBatchResourcePidReader reverseCronologicalBatchResourcePidReader() { + return new ReverseCronologicalBatchResourcePidReader(); + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/job/PartitionedUrlValidator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/job/PartitionedUrlValidator.java new file mode 100644 index 00000000000..80f69936f3e --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/job/PartitionedUrlValidator.java @@ -0,0 +1,72 @@ +package ca.uhn.fhir.jpa.batch.job; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl; +import ca.uhn.fhir.jpa.batch.job.model.RequestListJson; +import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; +import ca.uhn.fhir.jpa.searchparam.MatchUrlService; +import ca.uhn.fhir.jpa.searchparam.ResourceSearch; +import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.ArrayList; +import java.util.List; +import java.util.Set; + +public class PartitionedUrlValidator { + @Autowired + MatchUrlService myMatchUrlService; + @Autowired + IRequestPartitionHelperSvc myRequestPartitionHelperSvc; + @Autowired + FhirContext myFhirContext; + + /** + * This method will throw an exception if the user is not allowed to access the requested resource type on the partition determined by the request + */ + + public RequestListJson buildRequestListJson(RequestDetails theRequest, List theUrlsToProcess) { + List partitionedUrls = new ArrayList<>(); + for (String url : theUrlsToProcess) { + ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(url); + RequestPartitionId requestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequest, resourceSearch.getResourceName(), resourceSearch.getSearchParameterMap(), null); + partitionedUrls.add(new PartitionedUrl(url, requestPartitionId)); + } + RequestListJson retval = new RequestListJson(); + retval.setPartitionedUrls(partitionedUrls); + return retval; + } + + public RequestPartitionId requestPartitionIdFromRequest(RequestDetails theRequest) { + Set allResourceNames = myFhirContext.getResourceTypes(); + SearchParameterMap map = SearchParameterMap.newSynchronous(); + // Verify that the user has access to every resource type on the server: + for (String resourceName : allResourceNames) { + myRequestPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequest, resourceName, map, null); + } + // Then return the partition for the Patient resource type. Note Patient was an arbitrary choice here. + return myRequestPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequest, "Patient", map, null); + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/model/PartitionedUrl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/job/model/PartitionedUrl.java similarity index 97% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/model/PartitionedUrl.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/job/model/PartitionedUrl.java index f1f0d4b7008..ebc69de9d18 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/model/PartitionedUrl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/job/model/PartitionedUrl.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.delete.model; +package ca.uhn.fhir.jpa.batch.job.model; /*- * #%L diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/model/RequestListJson.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/job/model/RequestListJson.java similarity index 88% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/model/RequestListJson.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/job/model/RequestListJson.java index 4824091aa20..30490ef870a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/model/RequestListJson.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/job/model/RequestListJson.java @@ -1,4 +1,4 @@ -package ca.uhn.fhir.jpa.delete.model; +package ca.uhn.fhir.jpa.batch.job.model; /*- * #%L @@ -23,7 +23,7 @@ package ca.uhn.fhir.jpa.delete.model; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.model.api.IModelJson; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; -import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import ca.uhn.fhir.util.JsonUtil; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; @@ -60,13 +60,15 @@ public class RequestListJson implements IModelJson { } } + public String toJson() { + return JsonUtil.serializeOrInvalidRequest(this); + } + @Override public String toString() { - try { - return ourObjectMapper.writeValueAsString(this); - } catch (JsonProcessingException e) { - throw new InvalidRequestException("Failed to encode " + RequestListJson.class, e); - } + return "RequestListJson{" + + "myPartitionedUrls=" + myPartitionedUrls + + '}'; } public List getPartitionedUrls() { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/listener/PidReaderCounterListener.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/listener/PidReaderCounterListener.java index 6a3bf1f60a1..54a0edf2240 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/listener/PidReaderCounterListener.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/listener/PidReaderCounterListener.java @@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.batch.listener; */ import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.annotation.AfterProcess; +import org.springframework.batch.core.annotation.AfterRead; import org.springframework.batch.core.annotation.BeforeStep; import java.util.List; @@ -40,8 +40,8 @@ public class PidReaderCounterListener { myStepExecution = stepExecution; } - @AfterProcess - public void afterProcess(List thePids, List theSqlList) { + @AfterRead + public void afterRead(List thePids) { myTotalPidsProcessed += thePids.size(); myStepExecution.getExecutionContext().putLong(RESOURCE_TOTAL_PROCESSED, myTotalPidsProcessed); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/reader/BatchDateThresholdUpdater.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/reader/BatchDateThresholdUpdater.java new file mode 100644 index 00000000000..3a1b1785f75 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/reader/BatchDateThresholdUpdater.java @@ -0,0 +1,96 @@ +package ca.uhn.fhir.jpa.batch.reader; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Date; +import java.util.List; +import java.util.Set; +import java.util.function.Function; + +public class BatchDateThresholdUpdater { + private static final Logger ourLog = LoggerFactory.getLogger(BatchDateThresholdUpdater.class); + + private Function myDateFromPid; + + public BatchDateThresholdUpdater() { + } + + public BatchDateThresholdUpdater(Function theDateFromPid) { + myDateFromPid = theDateFromPid; + } + + /** + * This method is used by batch jobs that process resource pids by date in multiple passes. It's used to ensure + * the same resource isn't processed twice. What it does is after a pass of processing pids, it sets + * the threshold date for the next pass from the last resource on the list and collects all of the resources that have that date into a temporary cache + * so that the caller can exclude those from the next pass. + * + * @param thePrevThreshold the date threshold from the previous pass + * @param theAlreadyProcessedPidsWithThresholdDate the set to load pids into that have the new threshold + * @param theProcessedPidsOrderedByDate the pids ordered by date (can be ascending or descending) + * @return the new date threshold (can be the same as the old threshold if all pids on the list share the same date) + */ + + public Date updateThresholdAndCache(Date thePrevThreshold, Set theAlreadyProcessedPidsWithThresholdDate, List theProcessedPidsOrderedByDate) { + if (theProcessedPidsOrderedByDate.isEmpty()) { + return thePrevThreshold; + } + + // Adjust the low threshold to be the last resource in the batch we found + Long pidOfLatestResourceInBatch = theProcessedPidsOrderedByDate.get(theProcessedPidsOrderedByDate.size() - 1); + Date latestUpdatedDate = myDateFromPid.apply(pidOfLatestResourceInBatch); + + // The latest date has changed, create a new cache to store pids with that date + if (thePrevThreshold != latestUpdatedDate) { + theAlreadyProcessedPidsWithThresholdDate.clear(); + } + theAlreadyProcessedPidsWithThresholdDate.add(pidOfLatestResourceInBatch); + + Date newThreshold = latestUpdatedDate; + if (theProcessedPidsOrderedByDate.size() <= 1) { + return newThreshold; + } + + // There is more than one resource in this batch, add any others with the same date. Assume the list is ordered by date. + for (int index = theProcessedPidsOrderedByDate.size() - 2; index >= 0; --index) { + Long pid = theProcessedPidsOrderedByDate.get(index); + Date newDate = myDateFromPid.apply(pid); + if (!latestUpdatedDate.equals(newDate)) { + break; + } + theAlreadyProcessedPidsWithThresholdDate.add(pid); + } + + return newThreshold; + } + + /** + * @param theDateFromPid this is a Function to extract a date from a resource id + * @return + */ + public BatchDateThresholdUpdater setDateFromPid(Function theDateFromPid) { + myDateFromPid = theDateFromPid; + return this; + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/reader/BatchResourceSearcher.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/reader/BatchResourceSearcher.java new file mode 100644 index 00000000000..48b1daeaa2d --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/reader/BatchResourceSearcher.java @@ -0,0 +1,65 @@ +package ca.uhn.fhir.jpa.batch.reader; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.jpa.api.dao.DaoRegistry; +import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; +import ca.uhn.fhir.jpa.dao.IResultIterator; +import ca.uhn.fhir.jpa.dao.ISearchBuilder; +import ca.uhn.fhir.jpa.dao.SearchBuilderFactory; +import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; +import ca.uhn.fhir.jpa.partition.SystemRequestDetails; +import ca.uhn.fhir.jpa.searchparam.ResourceSearch; +import org.springframework.beans.factory.annotation.Autowired; + +import javax.annotation.Nonnull; +import java.util.UUID; + +/** + * This service is used by batch processes to search resources + */ +public class BatchResourceSearcher { + @Autowired + private SearchBuilderFactory mySearchBuilderFactory; + @Autowired + private DaoRegistry myDaoRegistry; + + public IResultIterator performSearch(ResourceSearch theResourceSearch, Integer theBatchSize) { + String resourceName = theResourceSearch.getResourceName(); + RequestPartitionId requestPartitionId = theResourceSearch.getRequestPartitionId(); + + IFhirResourceDao dao = myDaoRegistry.getResourceDao(resourceName); + final ISearchBuilder sb = mySearchBuilderFactory.newSearchBuilder(dao, resourceName, theResourceSearch.getResourceType()); + sb.setFetchSize(theBatchSize); + SystemRequestDetails requestDetails = buildSystemRequestDetails(requestPartitionId); + SearchRuntimeDetails searchRuntimeDetails = new SearchRuntimeDetails(requestDetails, UUID.randomUUID().toString()); + IResultIterator resultIter = sb.createQuery(theResourceSearch.getSearchParameterMap(), searchRuntimeDetails, requestDetails, requestPartitionId); + return resultIter; + } + + @Nonnull + private SystemRequestDetails buildSystemRequestDetails(RequestPartitionId theRequestPartitionId) { + SystemRequestDetails retval = new SystemRequestDetails(); + retval.setRequestPartitionId(theRequestPartitionId); + return retval; + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/reader/CronologicalBatchAllResourcePidReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/reader/CronologicalBatchAllResourcePidReader.java new file mode 100644 index 00000000000..6eeed5db0f6 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/reader/CronologicalBatchAllResourcePidReader.java @@ -0,0 +1,163 @@ +package ca.uhn.fhir.jpa.batch.reader; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.jpa.api.config.DaoConfig; +import ca.uhn.fhir.jpa.batch.job.MultiUrlProcessorJobConfig; +import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; +import ca.uhn.fhir.jpa.model.entity.ResourceTable; +import com.fasterxml.jackson.core.JsonProcessingException; +import org.apache.commons.lang3.time.DateUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.batch.core.JobParameter; +import org.springframework.batch.core.JobParameters; +import org.springframework.batch.item.ExecutionContext; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemStream; +import org.springframework.batch.item.ItemStreamException; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Slice; + +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * This Spring Batch reader takes 3 parameters: + * {@link #JOB_PARAM_BATCH_SIZE}: The number of resources to return with each search. + * {@link #JOB_PARAM_START_TIME}: The latest timestamp of resources to search for + * {@link #JOB_PARAM_REQUEST_PARTITION}: (optional) The partition of resources to read + *

+ * The reader will return at most {@link #JOB_PARAM_BATCH_SIZE} pids every time it is called, or null + * once no more matching resources are available. It returns the resources in reverse chronological order + * appended with "." and the index number of the url list item it has gotten up to. This is to permit + * restarting jobs that use this reader so it can pick up where it left off. + */ +public class CronologicalBatchAllResourcePidReader implements ItemReader>, ItemStream { + public static final String JOB_PARAM_BATCH_SIZE = "batch-size"; + public static final String JOB_PARAM_START_TIME = "start-time"; + public static final String JOB_PARAM_REQUEST_PARTITION = "request-partition"; + public static final String CURRENT_THRESHOLD_LOW = "current.threshold-low"; + + private static final Logger ourLog = LoggerFactory.getLogger(CronologicalBatchAllResourcePidReader.class); + private static final Date BEGINNING_OF_TIME = new Date(0); + + @Autowired + private IResourceTableDao myResourceTableDao; + @Autowired + private DaoConfig myDaoConfig; + + private Integer myBatchSize; + private Date myThresholdLow = BEGINNING_OF_TIME; + private final BatchDateThresholdUpdater myBatchDateThresholdUpdater = new BatchDateThresholdUpdater(this::dateFromPid); + private final Set myAlreadyProcessedPidsWithLowDate = new HashSet<>(); + private Date myStartTime; + private RequestPartitionId myRequestPartitionId; + + @Autowired + public void setBatchSize(@Value("#{jobParameters['" + JOB_PARAM_BATCH_SIZE + "']}") Integer theBatchSize) { + myBatchSize = theBatchSize; + } + + @Autowired + public void setStartTime(@Value("#{jobParameters['" + JOB_PARAM_START_TIME + "']}") Date theStartTime) { + myStartTime = theStartTime; + } + + public static JobParameters buildJobParameters(Integer theBatchSize, RequestPartitionId theRequestPartitionId) { + Map map = new HashMap<>(); + map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_REQUEST_PARTITION, new JobParameter(theRequestPartitionId.toJson())); + map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), MultiUrlProcessorJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM))); + if (theBatchSize != null) { + map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue())); + } + JobParameters parameters = new JobParameters(map); + return parameters; + } + + @Override + public List read() throws Exception { + List nextBatch = getNextBatch(); + return nextBatch.isEmpty() ? null : nextBatch; + } + + private Date dateFromPid(Long thePid) { + ResourceTable entity = myResourceTableDao.findById(thePid).orElseThrow(IllegalStateException::new); + return entity.getUpdatedDate(); + } + + @Override + public void open(ExecutionContext executionContext) throws ItemStreamException { + if (myBatchSize == null) { + myBatchSize = myDaoConfig.getExpungeBatchSize(); + } + if (executionContext.containsKey(CURRENT_THRESHOLD_LOW)) { + myThresholdLow = new Date(executionContext.getLong(CURRENT_THRESHOLD_LOW)); + } + } + + @Override + public void update(ExecutionContext executionContext) throws ItemStreamException { + executionContext.putLong(CURRENT_THRESHOLD_LOW, myThresholdLow.getTime()); + } + + @Override + public void close() throws ItemStreamException { + } + + private List getNextBatch() { + PageRequest page = PageRequest.of(0, myBatchSize); + List retval = new ArrayList<>(); + Slice slice; + do { + if (myRequestPartitionId == null || myRequestPartitionId.isAllPartitions()) { + slice = myResourceTableDao.findIdsOfResourcesWithinUpdatedRangeOrderedFromOldest(page, myThresholdLow, myStartTime); + } else { + slice = myResourceTableDao.findIdsOfPartitionedResourcesWithinUpdatedRangeOrderedFromOldest(page, myThresholdLow, myStartTime, myRequestPartitionId.getFirstPartitionIdOrNull()); + } + retval.addAll(slice.getContent()); + retval.removeAll(myAlreadyProcessedPidsWithLowDate); + page = page.next(); + } while (retval.size() < myBatchSize && slice.hasNext()); + + if (ourLog.isDebugEnabled()) { + ourLog.debug("Results: {}", retval); + } + myThresholdLow = myBatchDateThresholdUpdater.updateThresholdAndCache(myThresholdLow, myAlreadyProcessedPidsWithLowDate, retval); + return retval; + } + + @Autowired + public void setRequestPartitionId(@Value("#{jobParameters['" + JOB_PARAM_REQUEST_PARTITION + "']}") String theRequestPartitionIdJson) throws JsonProcessingException { + if (theRequestPartitionIdJson == null) { + return; + } + myRequestPartitionId = RequestPartitionId.fromJson(theRequestPartitionIdJson); + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/reader/ReverseCronologicalBatchResourcePidReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/reader/ReverseCronologicalBatchResourcePidReader.java index 6fa38912438..c27e2089b4f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/reader/ReverseCronologicalBatchResourcePidReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/reader/ReverseCronologicalBatchResourcePidReader.java @@ -21,12 +21,15 @@ package ca.uhn.fhir.jpa.batch.reader; */ import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; -import ca.uhn.fhir.jpa.delete.model.PartitionedUrl; -import ca.uhn.fhir.jpa.delete.model.RequestListJson; -import ca.uhn.fhir.jpa.partition.SystemRequestDetails; +import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator; +import ca.uhn.fhir.jpa.batch.job.MultiUrlProcessorJobConfig; +import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl; +import ca.uhn.fhir.jpa.batch.job.model.RequestListJson; +import ca.uhn.fhir.jpa.dao.IResultIterator; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.ResourceSearch; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; @@ -35,9 +38,12 @@ import ca.uhn.fhir.rest.api.SortOrderEnum; import ca.uhn.fhir.rest.api.SortSpec; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.rest.param.DateRangeParam; +import org.apache.commons.lang3.time.DateUtils; import org.hl7.fhir.instance.model.api.IBaseResource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.batch.core.JobParameter; +import org.springframework.batch.core.JobParameters; import org.springframework.batch.item.ExecutionContext; import org.springframework.batch.item.ItemReader; import org.springframework.batch.item.ItemStream; @@ -46,10 +52,14 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import javax.annotation.Nonnull; +import java.util.ArrayList; import java.util.Date; import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.stream.Collectors; /** @@ -65,6 +75,7 @@ import java.util.stream.Collectors; * restarting jobs that use this reader so it can pick up where it left off. */ public class ReverseCronologicalBatchResourcePidReader implements ItemReader>, ItemStream { + private static final Logger ourLog = LoggerFactory.getLogger(ReverseCronologicalBatchResourcePidReader.class); public static final String JOB_PARAM_REQUEST_LIST = "url-list"; public static final String JOB_PARAM_BATCH_SIZE = "batch-size"; @@ -72,7 +83,6 @@ public class ReverseCronologicalBatchResourcePidReader implements ItemReader myPartitionedUrls; private Integer myBatchSize; private final Map myThresholdHighByUrlIndex = new HashMap<>(); + private final Map> myAlreadyProcessedPidsWithHighDate = new HashMap<>(); + private int myUrlIndex = 0; private Date myStartTime; @@ -108,8 +122,7 @@ public class ReverseCronologicalBatchResourcePidReader implements ItemReader read() throws Exception { while (myUrlIndex < myPartitionedUrls.size()) { - List nextBatch; - nextBatch = getNextBatch(); + List nextBatch = getNextBatch(); if (nextBatch.isEmpty()) { ++myUrlIndex; continue; @@ -121,51 +134,53 @@ public class ReverseCronologicalBatchResourcePidReader implements ItemReader getNextBatch() { - ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(myPartitionedUrls.get(myUrlIndex).getUrl()); - SearchParameterMap map = buildSearchParameterMap(resourceSearch); + RequestPartitionId requestPartitionId = myPartitionedUrls.get(myUrlIndex).getRequestPartitionId(); + ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(myPartitionedUrls.get(myUrlIndex).getUrl(), requestPartitionId); + addDateCountAndSortToSearch(resourceSearch); // Perform the search - IFhirResourceDao dao = myDaoRegistry.getResourceDao(resourceSearch.getResourceName()); - List retval = dao.searchForIds(map, buildSystemRequestDetails()).stream() - .map(ResourcePersistentId::getIdAsLong) - .collect(Collectors.toList()); + IResultIterator resultIter = myBatchResourceSearcher.performSearch(resourceSearch, myBatchSize); + Set newPids = new LinkedHashSet<>(); + Set alreadySeenPids = myAlreadyProcessedPidsWithHighDate.computeIfAbsent(myUrlIndex, i -> new HashSet<>()); + + do { + List pids = resultIter.getNextResultBatch(myBatchSize).stream().map(ResourcePersistentId::getIdAsLong).collect(Collectors.toList()); + newPids.addAll(pids); + newPids.removeAll(alreadySeenPids); + } while (newPids.size() < myBatchSize && resultIter.hasNext()); if (ourLog.isDebugEnabled()) { - ourLog.debug("Search for {}{} returned {} results", resourceSearch.getResourceName(), map.toNormalizedQueryString(myFhirContext), retval.size()); - ourLog.debug("Results: {}", retval); + ourLog.debug("Search for {}{} returned {} results", resourceSearch.getResourceName(), resourceSearch.getSearchParameterMap().toNormalizedQueryString(myFhirContext), newPids.size()); + ourLog.debug("Results: {}", newPids); } - if (!retval.isEmpty()) { - // Adjust the high threshold to be the earliest resource in the batch we found - Long pidOfOldestResourceInBatch = retval.get(retval.size() - 1); - IBaseResource earliestResource = dao.readByPid(new ResourcePersistentId(pidOfOldestResourceInBatch)); - myThresholdHighByUrlIndex.put(myUrlIndex, earliestResource.getMeta().getLastUpdated()); - } + setDateFromPidFunction(resourceSearch); + + List retval = new ArrayList<>(newPids); + Date newThreshold = myBatchDateThresholdUpdater.updateThresholdAndCache(myThresholdHighByUrlIndex.get(myUrlIndex), myAlreadyProcessedPidsWithHighDate.get(myUrlIndex), retval); + myThresholdHighByUrlIndex.put(myUrlIndex, newThreshold); return retval; } - @Nonnull - private SearchParameterMap buildSearchParameterMap(ResourceSearch resourceSearch) { + private void setDateFromPidFunction(ResourceSearch resourceSearch) { + final IFhirResourceDao dao = myDaoRegistry.getResourceDao(resourceSearch.getResourceName()); + + myBatchDateThresholdUpdater.setDateFromPid(pid -> { + IBaseResource oldestResource = dao.readByPid(new ResourcePersistentId(pid)); + return oldestResource.getMeta().getLastUpdated(); + }); + } + + private void addDateCountAndSortToSearch(ResourceSearch resourceSearch) { SearchParameterMap map = resourceSearch.getSearchParameterMap(); map.setLastUpdated(new DateRangeParam().setUpperBoundInclusive(myThresholdHighByUrlIndex.get(myUrlIndex))); map.setLoadSynchronousUpTo(myBatchSize); map.setSort(new SortSpec(Constants.PARAM_LASTUPDATED, SortOrderEnum.DESC)); - return map; - } - - @Nonnull - private SystemRequestDetails buildSystemRequestDetails() { - SystemRequestDetails retval = new SystemRequestDetails(); - retval.setRequestPartitionId(myPartitionedUrls.get(myUrlIndex).getRequestPartitionId()); - return retval; } @Override public void open(ExecutionContext executionContext) throws ItemStreamException { - if (myBatchSize == null) { - myBatchSize = myDaoConfig.getExpungeBatchSize(); - } if (executionContext.containsKey(CURRENT_URL_INDEX)) { myUrlIndex = new Long(executionContext.getLong(CURRENT_URL_INDEX)).intValue(); } @@ -197,4 +212,17 @@ public class ReverseCronologicalBatchResourcePidReader implements ItemReader map = new HashMap<>(); + map.put(MultiUrlJobParameterValidator.JOB_PARAM_OPERATION_NAME, new JobParameter(theOperationName)); + map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_REQUEST_LIST, new JobParameter(theRequestListJson.toJson())); + map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), MultiUrlProcessorJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM))); + if (theBatchSize != null) { + map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue())); + } + JobParameters parameters = new JobParameters(map); + return parameters; + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/writer/SqlExecutorWriter.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/writer/SqlExecutorWriter.java index 51a295ffb64..f4679968f35 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/writer/SqlExecutorWriter.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/writer/SqlExecutorWriter.java @@ -54,6 +54,8 @@ public class SqlExecutorWriter implements ItemWriter> { @Override public void write(List> theSqlLists) throws Exception { + + // Note that since our chunk size is 1, there will always be exactly one list for (List sqlList : theSqlLists) { ourLog.info("Executing {} sql commands", sqlList.size()); for (String sql : sqlList) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceToFileWriter.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceToFileWriter.java index ac807768926..d0655ca0619 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceToFileWriter.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceToFileWriter.java @@ -58,8 +58,8 @@ public class ResourceToFileWriter implements ItemWriter> { @Autowired private BulkExportDaoSvc myBulkExportDaoSvc; - private ByteArrayOutputStream myOutputStream; - private OutputStreamWriter myWriter; + private final ByteArrayOutputStream myOutputStream; + private final OutputStreamWriter myWriter; private IParser myParser; @Value("#{stepExecutionContext['bulkExportCollectionEntityId']}") diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java index 96e09ddeee5..2aebff2b539 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java @@ -15,6 +15,8 @@ import ca.uhn.fhir.jpa.batch.BatchConstants; import ca.uhn.fhir.jpa.batch.BatchJobsConfig; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; import ca.uhn.fhir.jpa.batch.config.NonPersistedBatchConfigurer; +import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator; +import ca.uhn.fhir.jpa.batch.reader.BatchResourceSearcher; import ca.uhn.fhir.jpa.batch.svc.BatchJobSubmitterImpl; import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider; import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor; @@ -38,7 +40,6 @@ import ca.uhn.fhir.jpa.dao.expunge.ExpungeEverythingService; import ca.uhn.fhir.jpa.dao.expunge.ExpungeOperation; import ca.uhn.fhir.jpa.dao.expunge.ExpungeService; import ca.uhn.fhir.jpa.dao.expunge.IResourceExpungeService; -import ca.uhn.fhir.jpa.dao.expunge.PartitionRunner; import ca.uhn.fhir.jpa.dao.expunge.ResourceExpungeService; import ca.uhn.fhir.jpa.dao.expunge.ResourceTableFKProvider; import ca.uhn.fhir.jpa.dao.index.DaoResourceLinkResolver; @@ -83,6 +84,7 @@ import ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc; import ca.uhn.fhir.jpa.provider.DiffProvider; import ca.uhn.fhir.jpa.provider.SubscriptionTriggeringProvider; import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider; +import ca.uhn.fhir.jpa.reindex.ReindexJobSubmitterImpl; import ca.uhn.fhir.jpa.sched.AutowiringSpringBeanJobFactory; import ca.uhn.fhir.jpa.sched.HapiSchedulerServiceImpl; import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; @@ -120,6 +122,7 @@ import ca.uhn.fhir.jpa.search.cache.DatabaseSearchResultCacheSvcImpl; import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc; import ca.uhn.fhir.jpa.search.cache.ISearchResultCacheSvc; import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc; +import ca.uhn.fhir.jpa.search.reindex.ResourceReindexer; import ca.uhn.fhir.jpa.search.reindex.ResourceReindexingSvcImpl; import ca.uhn.fhir.jpa.search.warm.CacheWarmingSvcImpl; import ca.uhn.fhir.jpa.search.warm.ICacheWarmingSvc; @@ -135,10 +138,12 @@ import ca.uhn.fhir.jpa.validation.JpaResourceLoader; import ca.uhn.fhir.jpa.validation.ValidationSettings; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter; +import ca.uhn.fhir.rest.api.server.storage.IReindexJobSubmitter; import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationInterceptor; import ca.uhn.fhir.rest.server.interceptor.consent.IConsentContextServices; import ca.uhn.fhir.rest.server.interceptor.partition.RequestTenantPartitionInterceptor; import ca.uhn.fhir.rest.server.provider.DeleteExpungeProvider; +import ca.uhn.fhir.rest.server.provider.ReindexProvider; import org.hibernate.jpa.HibernatePersistenceProvider; import org.hl7.fhir.common.hapi.validation.support.UnknownCodeSystemWarningValidationSupport; import org.hl7.fhir.instance.model.api.IBaseResource; @@ -416,6 +421,16 @@ public abstract class BaseConfig { return new ResourceReindexingSvcImpl(); } + @Bean + public ResourceReindexer resourceReindexer(FhirContext theFhirContext) { + return new ResourceReindexer(theFhirContext); + } + + @Bean + public BatchResourceSearcher myBatchResourceSearcher() { + return new BatchResourceSearcher(); + } + @Bean public IStaleSearchDeletingSvc staleSearchDeletingSvc() { return new StaleSearchDeletingSvcImpl(); @@ -534,16 +549,34 @@ public abstract class BaseConfig { @Bean @Lazy - public IDeleteExpungeJobSubmitter myDeleteExpungeJobSubmitter() { + public IDeleteExpungeJobSubmitter deleteExpungeJobSubmitter() { return new DeleteExpungeJobSubmitterImpl(); } + @Bean + @Lazy + public PartitionedUrlValidator partitionedUrlValidator() { + return new PartitionedUrlValidator(); + } + + @Bean + @Lazy + public IReindexJobSubmitter myReindexJobSubmitter() { + return new ReindexJobSubmitterImpl(); + } + @Bean @Lazy public DeleteExpungeProvider deleteExpungeProvider(FhirContext theFhirContext, IDeleteExpungeJobSubmitter theDeleteExpungeJobSubmitter) { return new DeleteExpungeProvider(theFhirContext, theDeleteExpungeJobSubmitter); } + @Bean + @Lazy + public ReindexProvider reindexProvider(FhirContext theFhirContext, IReindexJobSubmitter theReindexJobSubmitter) { + return new ReindexProvider(theFhirContext, theReindexJobSubmitter); + } + @Bean @Lazy public IBulkDataImportSvc bulkDataImportSvc() { @@ -863,11 +896,6 @@ public abstract class BaseConfig { return new DeleteExpungeService(); } - @Bean - public PartitionRunner partitionRunner(DaoConfig theDaoConfig) { - return new PartitionRunner(theDaoConfig); - } - @Bean public ResourceTableFKProvider resourceTableFKProvider() { return new ResourceTableFKProvider(); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java index 7c53cb0a341..1efdf533534 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java @@ -126,8 +126,8 @@ import org.springframework.transaction.support.TransactionSynchronizationAdapter import org.springframework.transaction.support.TransactionSynchronizationManager; import org.springframework.transaction.support.TransactionTemplate; -import javax.annotation.Nullable; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import javax.annotation.PostConstruct; import javax.persistence.NoResultException; import javax.persistence.TypedQuery; @@ -588,7 +588,7 @@ public abstract class BaseHapiFhirResourceDao extends B List urlsToDeleteExpunge = Collections.singletonList(theUrl); try { - JobExecution jobExecution = myDeleteExpungeJobSubmitter.submitJob(getConfig().getExpungeBatchSize(), theRequest, urlsToDeleteExpunge); + JobExecution jobExecution = myDeleteExpungeJobSubmitter.submitJob(getConfig().getExpungeBatchSize(), urlsToDeleteExpunge, theRequest); return new DeleteMethodOutcome(createInfoOperationOutcome("Delete job submitted with id " + jobExecution.getId())); } catch (JobParametersInvalidException e) { throw new InvalidRequestException("Invalid Delete Expunge Request: " + e.getMessage(), e); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDaoObservation.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDaoObservation.java index ae26b45e1e4..3705396ee9b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDaoObservation.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDaoObservation.java @@ -27,12 +27,13 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.model.api.IQueryParameterType; -import ca.uhn.fhir.rest.api.*; -import ca.uhn.fhir.rest.api.server.*; +import ca.uhn.fhir.rest.api.SortOrderEnum; +import ca.uhn.fhir.rest.api.SortSpec; +import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; import ca.uhn.fhir.rest.param.ReferenceParam; -import org.hl7.fhir.instance.model.api.*; +import org.hl7.fhir.instance.model.api.IBaseResource; import org.springframework.beans.factory.annotation.Autowired; import java.util.ArrayList; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoPatientDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoPatientDstu2.java index 31375debf3f..77cb1ab0741 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoPatientDstu2.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoPatientDstu2.java @@ -74,7 +74,7 @@ public class FhirResourceDaoPatientDstu2 extends BaseHapiFhirResourceDao { @Modifying diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceTableDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceTableDao.java index 4b678c2f9e9..25a3e3d9e1d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceTableDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceTableDao.java @@ -54,6 +54,10 @@ public interface IResourceTableDao extends JpaRepository { @Query("SELECT t.myId FROM ResourceTable t WHERE t.myUpdated >= :low AND t.myUpdated <= :high ORDER BY t.myUpdated ASC") Slice findIdsOfResourcesWithinUpdatedRangeOrderedFromOldest(Pageable thePage, @Param("low") Date theLow, @Param("high") Date theHigh); + // TODO in the future, consider sorting by pid as well so batch jobs process in the same order across restarts + @Query("SELECT t.myId FROM ResourceTable t WHERE t.myUpdated >= :low AND t.myUpdated <= :high AND t.myPartitionIdValue = :partition_id ORDER BY t.myUpdated ASC") + Slice findIdsOfPartitionedResourcesWithinUpdatedRangeOrderedFromOldest(Pageable thePage, @Param("low") Date theLow, @Param("high") Date theHigh, @Param("partition_id") Integer theRequestPartitionId); + @Query("SELECT t.myId FROM ResourceTable t WHERE t.myUpdated >= :low AND t.myUpdated <= :high AND t.myResourceType = :restype ORDER BY t.myUpdated ASC") Slice findIdsOfResourcesWithinUpdatedRangeOrderedFromOldest(Pageable thePage, @Param("restype") String theResourceType, @Param("low") Date theLow, @Param("high") Date theHigh); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoObservationDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoObservationDstu3.java index 490f909293a..cd6ff0da528 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoObservationDstu3.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoObservationDstu3.java @@ -31,8 +31,8 @@ import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; -import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.dstu3.model.Observation; +import org.hl7.fhir.instance.model.api.IBaseResource; import org.springframework.beans.factory.annotation.Autowired; import javax.servlet.http.HttpServletResponse; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/DeleteExpungeService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/DeleteExpungeService.java index 71d00b0ec76..21bcd2cddbb 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/DeleteExpungeService.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/DeleteExpungeService.java @@ -29,6 +29,7 @@ import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome; import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao; import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao; import ca.uhn.fhir.jpa.dao.index.IdHelperService; +import ca.uhn.fhir.jpa.delete.job.DeleteExpungeProcessor; import ca.uhn.fhir.jpa.model.entity.ResourceLink; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; @@ -69,8 +70,6 @@ public class DeleteExpungeService { @Autowired private FhirContext myFhirContext; @Autowired - private PartitionRunner myPartitionRunner; - @Autowired private ResourceTableFKProvider myResourceTableFKProvider; @Autowired private IResourceLinkDao myResourceLinkDao; @@ -99,7 +98,8 @@ public class DeleteExpungeService { ourLog.info("Expunging all records linking to {} resources...", thePids.getNumber()); AtomicLong expungedEntitiesCount = new AtomicLong(); AtomicLong expungedResourcesCount = new AtomicLong(); - myPartitionRunner.runInPartitionedThreads(thePids, pidChunk -> deleteInTransaction(theResourceName, pidChunk, expungedResourcesCount, expungedEntitiesCount, theRequest)); + PartitionRunner partitionRunner = new PartitionRunner(DeleteExpungeProcessor.PROCESS_NAME, DeleteExpungeProcessor.THREAD_PREFIX, myDaoConfig.getExpungeBatchSize(), myDaoConfig.getExpungeThreadCount()); + partitionRunner.runInPartitionedThreads(thePids, pidChunk -> deleteInTransaction(theResourceName, pidChunk, expungedResourcesCount, expungedEntitiesCount, theRequest)); ourLog.info("Expunged a total of {} records", expungedEntitiesCount); IBaseOperationOutcome oo; @@ -131,7 +131,8 @@ public class DeleteExpungeService { } List conflictResourceLinks = Collections.synchronizedList(new ArrayList<>()); - myPartitionRunner.runInPartitionedThreads(theAllTargetPids, someTargetPids -> findResourceLinksWithTargetPidIn(theAllTargetPids.getContent(), someTargetPids, conflictResourceLinks)); + PartitionRunner partitionRunner = new PartitionRunner(DeleteExpungeProcessor.PROCESS_NAME, DeleteExpungeProcessor.THREAD_PREFIX, myDaoConfig.getExpungeBatchSize(), myDaoConfig.getExpungeThreadCount()); + partitionRunner.runInPartitionedThreads(theAllTargetPids, someTargetPids -> findResourceLinksWithTargetPidIn(theAllTargetPids.getContent(), someTargetPids, conflictResourceLinks)); if (conflictResourceLinks.isEmpty()) { return; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeOperation.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeOperation.java index ad55507fe65..313267c2293 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeOperation.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeOperation.java @@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.dao.expunge; * #L% */ -import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; +import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.api.model.ExpungeOutcome; import ca.uhn.fhir.rest.api.server.RequestDetails; @@ -38,13 +38,13 @@ import java.util.concurrent.atomic.AtomicInteger; @Scope("prototype") public class ExpungeOperation implements Callable { private static final Logger ourLog = LoggerFactory.getLogger(ExpungeService.class); + public static final String PROCESS_NAME = "Expunging"; + public static final String THREAD_PREFIX = "expunge"; @Autowired private IResourceExpungeService myExpungeDaoService; @Autowired - private PartitionRunner myPartitionRunner; - @Autowired - protected IInterceptorBroadcaster myInterceptorBroadcaster; + private DaoConfig myDaoConfig; private final String myResourceName; private final Long myResourceId; @@ -113,15 +113,19 @@ public class ExpungeOperation implements Callable { private void expungeOldVersions() { Slice historicalIds = findHistoricalVersionsOfNonDeletedResources(); - myPartitionRunner.runInPartitionedThreads(historicalIds, partition -> myExpungeDaoService.expungeHistoricalVersions(myRequestDetails, partition, myRemainingCount)); + getPartitionRunner().runInPartitionedThreads(historicalIds, partition -> myExpungeDaoService.expungeHistoricalVersions(myRequestDetails, partition, myRemainingCount)); + } + + private PartitionRunner getPartitionRunner() { + return new PartitionRunner(PROCESS_NAME, THREAD_PREFIX, myDaoConfig.getExpungeBatchSize(), myDaoConfig.getExpungeThreadCount()); } private void deleteCurrentVersionsOfDeletedResources(Slice theResourceIds) { - myPartitionRunner.runInPartitionedThreads(theResourceIds, partition -> myExpungeDaoService.expungeCurrentVersionOfResources(myRequestDetails, partition, myRemainingCount)); + getPartitionRunner().runInPartitionedThreads(theResourceIds, partition -> myExpungeDaoService.expungeCurrentVersionOfResources(myRequestDetails, partition, myRemainingCount)); } private void deleteHistoricalVersions(Slice theResourceIds) { - myPartitionRunner.runInPartitionedThreads(theResourceIds, partition -> myExpungeDaoService.expungeHistoricalVersionsOfIds(myRequestDetails, partition, myRemainingCount)); + getPartitionRunner().runInPartitionedThreads(theResourceIds, partition -> myExpungeDaoService.expungeHistoricalVersionsOfIds(myRequestDetails, partition, myRemainingCount)); } private ExpungeOutcome expungeOutcome() { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/PartitionRunner.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/PartitionRunner.java index ecd0058010f..9d6446770d0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/PartitionRunner.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/PartitionRunner.java @@ -20,18 +20,13 @@ package ca.uhn.fhir.jpa.dao.expunge; * #L% */ -import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.util.StopWatch; import com.google.common.collect.Lists; import org.apache.commons.lang3.concurrent.BasicThreadFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Slice; -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Propagation; -import org.springframework.transaction.annotation.Transactional; import java.util.ArrayList; import java.util.List; @@ -46,16 +41,20 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; -@Service public class PartitionRunner { - private static final Logger ourLog = LoggerFactory.getLogger(ExpungeService.class); + private static final Logger ourLog = LoggerFactory.getLogger(PartitionRunner.class); private static final int MAX_POOL_SIZE = 1000; - private final DaoConfig myDaoConfig; + private final String myProcessName; + private final String myThreadPrefix; + private final int myBatchSize; + private final int myThreadCount; - @Autowired - public PartitionRunner(DaoConfig theDaoConfig) { - myDaoConfig = theDaoConfig; + public PartitionRunner(String theProcessName, String theThreadPrefix, int theBatchSize, int theThreadCount) { + myProcessName = theProcessName; + myThreadPrefix = theThreadPrefix; + myBatchSize = theBatchSize; + myThreadCount = theThreadCount; } public void runInPartitionedThreads(Slice theResourceIds, Consumer> partitionConsumer) { @@ -70,7 +69,7 @@ public class PartitionRunner { callableTasks.get(0).call(); return; } catch (Exception e) { - ourLog.error("Error while expunging.", e); + ourLog.error("Error while " + myProcessName, e); throw new InternalErrorException(e); } } @@ -83,10 +82,10 @@ public class PartitionRunner { future.get(); } } catch (InterruptedException e) { - ourLog.error("Interrupted while expunging.", e); + ourLog.error("Interrupted while " + myProcessName, e); Thread.currentThread().interrupt(); } catch (ExecutionException e) { - ourLog.error("Error while expunging.", e); + ourLog.error("Error while " + myProcessName, e); throw new InternalErrorException(e); } finally { executorService.shutdown(); @@ -96,12 +95,13 @@ public class PartitionRunner { private List> buildCallableTasks(Slice theResourceIds, Consumer> partitionConsumer) { List> retval = new ArrayList<>(); - List> partitions = Lists.partition(theResourceIds.getContent(), myDaoConfig.getExpungeBatchSize()); + ourLog.info("Splitting batch job of {} entries into chunks of {}", theResourceIds.getContent().size(), myBatchSize); + List> partitions = Lists.partition(theResourceIds.getContent(), myBatchSize); for (List nextPartition : partitions) { if (nextPartition.size() > 0) { Callable callableTask = () -> { - ourLog.info("Expunging any search results pointing to {} resources", nextPartition.size()); + ourLog.info(myProcessName + " {} resources", nextPartition.size()); partitionConsumer.accept(nextPartition); return null; }; @@ -113,24 +113,24 @@ public class PartitionRunner { } private ExecutorService buildExecutor(int numberOfTasks) { - int threadCount = Math.min(numberOfTasks, myDaoConfig.getExpungeThreadCount()); + int threadCount = Math.min(numberOfTasks, myThreadCount); assert (threadCount > 0); - ourLog.info("Expunging with {} threads", threadCount); + ourLog.info(myProcessName + " with {} threads", threadCount); LinkedBlockingQueue executorQueue = new LinkedBlockingQueue<>(MAX_POOL_SIZE); BasicThreadFactory threadFactory = new BasicThreadFactory.Builder() - .namingPattern("expunge-%d") + .namingPattern(myThreadPrefix + "-%d") .daemon(false) .priority(Thread.NORM_PRIORITY) .build(); RejectedExecutionHandler rejectedExecutionHandler = (theRunnable, theExecutor) -> { - ourLog.info("Note: Expunge executor queue is full ({} elements), waiting for a slot to become available!", executorQueue.size()); + ourLog.info("Note: " + myThreadPrefix + " executor queue is full ({} elements), waiting for a slot to become available!", executorQueue.size()); StopWatch sw = new StopWatch(); try { executorQueue.put(theRunnable); } catch (InterruptedException e) { throw new RejectedExecutionException("Task " + theRunnable.toString() + - " rejected from " + e.toString()); + " rejected from " + e); } ourLog.info("Slot become available after {}ms", sw.getMillis()); }; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoPatientR4.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoPatientR4.java index dea94ea7d5a..ad860b3de0c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoPatientR4.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoPatientR4.java @@ -69,7 +69,7 @@ public class FhirResourceDaoPatientR4 extends BaseHapiFhirResourceDaoim if (theId != null) { paramMap.add("_id", new StringParam(theId.getIdPart())); } - + if (!isPagingProviderDatabaseBacked(theRequest)) { paramMap.setLoadSynchronous(true); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoPatientR5.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoPatientR5.java index af1d8f6d380..c5b4d39ba70 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoPatientR5.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoPatientR5.java @@ -69,7 +69,7 @@ public class FhirResourceDaoPatientR5 extends BaseHapiFhirResourceDao i if (theId != null) { paramMap.add("_id", new StringParam(theId.getIdPart())); } - + if (!isPagingProviderDatabaseBacked(theRequest)) { paramMap.setLoadSynchronous(true); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/DeleteExpungeJobSubmitterImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/DeleteExpungeJobSubmitterImpl.java index 5a4fff96a7a..73e561d1dd1 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/DeleteExpungeJobSubmitterImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/DeleteExpungeJobSubmitterImpl.java @@ -24,17 +24,18 @@ import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.interceptor.api.HookParams; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.interceptor.api.Pointcut; -import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.batch.BatchJobsConfig; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; -import ca.uhn.fhir.jpa.delete.job.DeleteExpungeJobConfig; +import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator; +import ca.uhn.fhir.jpa.batch.job.model.RequestListJson; +import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader; import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; -import ca.uhn.fhir.jpa.searchparam.ResourceSearch; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter; import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException; +import ca.uhn.fhir.rest.server.provider.ProviderConstants; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; import org.springframework.batch.core.Job; @@ -45,7 +46,6 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import javax.transaction.Transactional; -import java.util.ArrayList; import java.util.List; public class DeleteExpungeJobSubmitterImpl implements IDeleteExpungeJobSubmitter { @@ -63,12 +63,17 @@ public class DeleteExpungeJobSubmitterImpl implements IDeleteExpungeJobSubmitter @Autowired DaoConfig myDaoConfig; @Autowired + PartitionedUrlValidator myPartitionedUrlValidator; + @Autowired IInterceptorBroadcaster myInterceptorBroadcaster; @Override @Transactional(Transactional.TxType.NEVER) - public JobExecution submitJob(Integer theBatchSize, RequestDetails theRequest, List theUrlsToDeleteExpunge) throws JobParametersInvalidException { - List requestPartitionIds = requestPartitionIdsFromRequestAndUrls(theRequest, theUrlsToDeleteExpunge); + public JobExecution submitJob(Integer theBatchSize, List theUrlsToDeleteExpunge, RequestDetails theRequest) throws JobParametersInvalidException { + if (theBatchSize == null) { + theBatchSize = myDaoConfig.getExpungeBatchSize(); + } + RequestListJson requestListJson = myPartitionedUrlValidator.buildRequestListJson(theRequest, theUrlsToDeleteExpunge); if (!myDaoConfig.canDeleteExpunge()) { throw new ForbiddenOperationException("Delete Expunge not allowed: " + myDaoConfig.cannotDeleteExpungeReason()); } @@ -81,20 +86,7 @@ public class DeleteExpungeJobSubmitterImpl implements IDeleteExpungeJobSubmitter CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRE_DELETE_EXPUNGE, params); } - JobParameters jobParameters = DeleteExpungeJobConfig.buildJobParameters(theBatchSize, theUrlsToDeleteExpunge, requestPartitionIds); + JobParameters jobParameters = ReverseCronologicalBatchResourcePidReader.buildJobParameters(ProviderConstants.OPERATION_DELETE_EXPUNGE, theBatchSize, requestListJson); return myBatchJobSubmitter.runJob(myDeleteExpungeJob, jobParameters); } - - /** - * This method will throw an exception if the user is not allowed to add the requested resource type on the partition determined by the request - */ - private List requestPartitionIdsFromRequestAndUrls(RequestDetails theRequest, List theUrlsToDeleteExpunge) { - List retval = new ArrayList<>(); - for (String url : theUrlsToDeleteExpunge) { - ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(url); - RequestPartitionId requestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequest, resourceSearch.getResourceName(), null, null); - retval.add(requestPartitionId); - } - return retval; - } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeJobConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeJobConfig.java index 54a398f0331..340be4372bc 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeJobConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeJobConfig.java @@ -20,19 +20,12 @@ package ca.uhn.fhir.jpa.delete.job; * #L% */ -import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; +import ca.uhn.fhir.jpa.batch.job.MultiUrlProcessorJobConfig; import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener; -import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader; import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter; -import ca.uhn.fhir.jpa.delete.model.RequestListJson; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; -import org.apache.commons.lang3.time.DateUtils; import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobParameter; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersValidator; import org.springframework.batch.core.Step; import org.springframework.batch.core.configuration.annotation.JobBuilderFactory; import org.springframework.batch.core.configuration.annotation.StepBuilderFactory; @@ -43,11 +36,7 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Lazy; -import javax.annotation.Nonnull; -import java.util.Date; -import java.util.HashMap; import java.util.List; -import java.util.Map; import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME; @@ -56,9 +45,8 @@ import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME; * Delete Expunge job. */ @Configuration -public class DeleteExpungeJobConfig { +public class DeleteExpungeJobConfig extends MultiUrlProcessorJobConfig { public static final String DELETE_EXPUNGE_URL_LIST_STEP_NAME = "delete-expunge-url-list-step"; - private static final int MINUTES_IN_FUTURE_TO_DELETE_FROM = 1; @Autowired private StepBuilderFactory myStepBuilderFactory; @@ -67,26 +55,13 @@ public class DeleteExpungeJobConfig { @Bean(name = DELETE_EXPUNGE_JOB_NAME) @Lazy - public Job deleteExpungeJob(FhirContext theFhirContext, MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) throws Exception { + public Job deleteExpungeJob(MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) { return myJobBuilderFactory.get(DELETE_EXPUNGE_JOB_NAME) - .validator(deleteExpungeJobParameterValidator(theFhirContext, theMatchUrlService, theDaoRegistry)) + .validator(multiUrlProcessorParameterValidator(theMatchUrlService, theDaoRegistry)) .start(deleteExpungeUrlListStep()) .build(); } - @Nonnull - public static JobParameters buildJobParameters(Integer theBatchSize, List theUrlList, List theRequestPartitionIds) { - Map map = new HashMap<>(); - RequestListJson requestListJson = RequestListJson.fromUrlStringsAndRequestPartitionIds(theUrlList, theRequestPartitionIds); - map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_REQUEST_LIST, new JobParameter(requestListJson.toString())); - map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), MINUTES_IN_FUTURE_TO_DELETE_FROM))); - if (theBatchSize != null) { - map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue())); - } - JobParameters parameters = new JobParameters(map); - return parameters; - } - @Bean public Step deleteExpungeUrlListStep() { return myStepBuilderFactory.get(DELETE_EXPUNGE_URL_LIST_STEP_NAME) @@ -95,20 +70,17 @@ public class DeleteExpungeJobConfig { .processor(deleteExpungeProcessor()) .writer(sqlExecutorWriter()) .listener(pidCountRecorderListener()) - .listener(promotionListener()) + .listener(deleteExpungePromotionListener()) .build(); } @Bean - @StepScope - public PidReaderCounterListener pidCountRecorderListener() { - return new PidReaderCounterListener(); - } + public ExecutionContextPromotionListener deleteExpungePromotionListener() { + ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener(); - @Bean - @StepScope - public ReverseCronologicalBatchResourcePidReader reverseCronologicalBatchResourcePidReader() { - return new ReverseCronologicalBatchResourcePidReader(); + listener.setKeys(new String[]{SqlExecutorWriter.ENTITY_TOTAL_UPDATED_OR_DELETED, PidReaderCounterListener.RESOURCE_TOTAL_PROCESSED}); + + return listener; } @Bean @@ -116,24 +88,4 @@ public class DeleteExpungeJobConfig { public DeleteExpungeProcessor deleteExpungeProcessor() { return new DeleteExpungeProcessor(); } - - @Bean - @StepScope - public SqlExecutorWriter sqlExecutorWriter() { - return new SqlExecutorWriter(); - } - - @Bean - public JobParametersValidator deleteExpungeJobParameterValidator(FhirContext theFhirContext, MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) { - return new DeleteExpungeJobParameterValidator(theMatchUrlService, theDaoRegistry); - } - - @Bean - public ExecutionContextPromotionListener promotionListener() { - ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener(); - - listener.setKeys(new String[]{SqlExecutorWriter.ENTITY_TOTAL_UPDATED_OR_DELETED, PidReaderCounterListener.RESOURCE_TOTAL_PROCESSED}); - - return listener; - } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeProcessor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeProcessor.java index a06a5d38377..969f7364961 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeProcessor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeProcessor.java @@ -47,6 +47,9 @@ import java.util.stream.Collectors; public class DeleteExpungeProcessor implements ItemProcessor, List> { private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeProcessor.class); + public static final String PROCESS_NAME = "Delete Expunging"; + public static final String THREAD_PREFIX = "delete-expunge"; + @Autowired ResourceTableFKProvider myResourceTableFKProvider; @Autowired @@ -55,8 +58,6 @@ public class DeleteExpungeProcessor implements ItemProcessor, List process(List thePids) throws Exception { @@ -84,7 +85,8 @@ public class DeleteExpungeProcessor implements ItemProcessor, List conflictResourceLinks = Collections.synchronizedList(new ArrayList<>()); - myPartitionRunner.runInPartitionedThreads(thePids, someTargetPids -> findResourceLinksWithTargetPidIn(thePids.getContent(), someTargetPids, conflictResourceLinks)); + PartitionRunner partitionRunner = new PartitionRunner(PROCESS_NAME, THREAD_PREFIX, myDaoConfig.getExpungeBatchSize(), myDaoConfig.getExpungeThreadCount()); + partitionRunner.runInPartitionedThreads(thePids, someTargetPids -> findResourceLinksWithTargetPidIn(thePids.getContent(), someTargetPids, conflictResourceLinks)); if (conflictResourceLinks.isEmpty()) { return; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceReindexJobEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceReindexJobEntity.java index 3ed5375a1e9..5adc3d15b2e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceReindexJobEntity.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceReindexJobEntity.java @@ -25,10 +25,19 @@ import com.google.common.annotations.VisibleForTesting; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; -import javax.persistence.*; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.SequenceGenerator; +import javax.persistence.Table; +import javax.persistence.Temporal; +import javax.persistence.TemporalType; import java.io.Serializable; import java.util.Date; +@Deprecated @Entity @Table(name = "HFJ_RES_REINDEX_JOB") public class ResourceReindexJobEntity implements Serializable { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java index 14a3aaaef13..e0a2e3fc010 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java @@ -37,12 +37,12 @@ import ca.uhn.fhir.jpa.model.entity.NpmPackageVersionEntity; import ca.uhn.fhir.jpa.partition.SystemRequestDetails; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistryController; -import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.param.StringParam; import ca.uhn.fhir.rest.param.TokenParam; import ca.uhn.fhir.rest.param.UriParam; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.util.FhirTerser; import ca.uhn.fhir.util.SearchParameterUtil; import com.google.common.annotations.VisibleForTesting; @@ -56,12 +56,12 @@ import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.hl7.fhir.r4.model.Identifier; import org.hl7.fhir.utilities.npm.IPackageCacheManager; +import org.hl7.fhir.utilities.npm.NpmPackage; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.support.TransactionTemplate; -import org.hl7.fhir.utilities.npm.NpmPackage; import javax.annotation.Nonnull; import javax.annotation.PostConstruct; @@ -227,7 +227,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { create(next, theOutcome); } catch (Exception e) { ourLog.warn("Failed to upload resource of type {} with ID {} - Error: {}", myFhirContext.getResourceType(next), next.getIdElement().getValue(), e.toString()); - throw new ImplementationGuideInstallationException(String.format("Error installing IG %s#%s: %s", name, version, e.toString()), e); + throw new ImplementationGuideInstallationException(String.format("Error installing IG %s#%s: %s", name, version, e), e); } } @@ -412,9 +412,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { List statusTypes = myFhirContext.newFhirPath().evaluate(theResource, "status", IPrimitiveType.class); if (statusTypes.size() > 0) { - if (!statusTypes.get(0).getValueAsString().equals("active")) { - return false; - } + return statusTypes.get(0).getValueAsString().equals("active"); } return true; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/IRequestPartitionHelperSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/IRequestPartitionHelperSvc.java index 178fe4be70c..cd6ab87dc1d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/IRequestPartitionHelperSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/IRequestPartitionHelperSvc.java @@ -23,7 +23,6 @@ package ca.uhn.fhir.jpa.partition; import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId; -import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.rest.api.server.RequestDetails; import org.hl7.fhir.instance.model.api.IBaseResource; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaSystemProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaSystemProvider.java index 768cf9eef35..05c695f6b47 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaSystemProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaSystemProvider.java @@ -45,7 +45,17 @@ import java.util.Date; public class BaseJpaSystemProvider extends BaseJpaProvider implements IJpaSystemProvider { + /** + * @see ProviderConstants#OPERATION_REINDEX + * @deprecated + */ + @Deprecated public static final String MARK_ALL_RESOURCES_FOR_REINDEXING = "$mark-all-resources-for-reindexing"; + /** + * @see ProviderConstants#OPERATION_REINDEX + * @deprecated + */ + @Deprecated public static final String PERFORM_REINDEXING_PASS = "$perform-reindexing-pass"; private IFhirSystemDao myDao; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaSystemProviderDstu2Plus.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaSystemProviderDstu2Plus.java index 9c3b95007d8..b179bca8d48 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaSystemProviderDstu2Plus.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaSystemProviderDstu2Plus.java @@ -26,13 +26,13 @@ import ca.uhn.fhir.rest.annotation.Operation; import ca.uhn.fhir.rest.annotation.OperationParam; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.util.ParametersUtil; -import org.hl7.fhir.dstu3.model.Bundle; import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseParameters; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IPrimitiveType; import javax.servlet.http.HttpServletRequest; +import java.util.List; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -43,6 +43,11 @@ public abstract class BaseJpaSystemProviderDstu2Plus extends BaseJpaSyste @Operation(name = MARK_ALL_RESOURCES_FOR_REINDEXING, idempotent = false, returnParameters = { @OperationParam(name = "status") }) + /** + * @deprecated + * @see ca.uhn.fhir.rest.server.provider.ReindexProvider#Reindex(List, IPrimitiveType, RequestDetails) + */ + @Deprecated public IBaseResource markAllResourcesForReindexing( @OperationParam(name="type", min = 0, max = 1, typeName = "code") IPrimitiveType theType ) { @@ -65,6 +70,11 @@ public abstract class BaseJpaSystemProviderDstu2Plus extends BaseJpaSyste @Operation(name = PERFORM_REINDEXING_PASS, idempotent = false, returnParameters = { @OperationParam(name = "status") }) + /** + * @deprecated + * @see ca.uhn.fhir.rest.server.provider.ReindexProvider#Reindex(List, IPrimitiveType, RequestDetails) + */ + @Deprecated public IBaseResource performReindexingPass() { Integer count = getResourceReindexingSvc().runReindexingPass(); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/reindex/ReindexJobSubmitterImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/reindex/ReindexJobSubmitterImpl.java new file mode 100644 index 00000000000..582eba93e23 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/reindex/ReindexJobSubmitterImpl.java @@ -0,0 +1,107 @@ +package ca.uhn.fhir.jpa.reindex; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.jpa.api.config.DaoConfig; +import ca.uhn.fhir.jpa.batch.BatchJobsConfig; +import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; +import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator; +import ca.uhn.fhir.jpa.batch.job.model.RequestListJson; +import ca.uhn.fhir.jpa.batch.reader.CronologicalBatchAllResourcePidReader; +import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.api.server.storage.IReindexJobSubmitter; +import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException; +import ca.uhn.fhir.rest.server.provider.ProviderConstants; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.JobParametersInvalidException; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; + +import javax.transaction.Transactional; +import java.util.List; + +public class ReindexJobSubmitterImpl implements IReindexJobSubmitter { + @Autowired + PartitionedUrlValidator myPartitionedUrlValidator; + @Autowired + DaoConfig myDaoConfig; + @Autowired + private ISearchParamRegistry mySearchParamRegistry; + @Autowired + private IBatchJobSubmitter myBatchJobSubmitter; + @Autowired + @Qualifier(BatchJobsConfig.REINDEX_JOB_NAME) + private Job myReindexJob; + @Autowired + @Qualifier(BatchJobsConfig.REINDEX_EVERYTHING_JOB_NAME) + private Job myReindexEverythingJob; + + @Override + @Transactional(Transactional.TxType.NEVER) + public JobExecution submitJob(Integer theBatchSize, List theUrlsToReindex, RequestDetails theRequest) throws JobParametersInvalidException { + if (theBatchSize == null) { + theBatchSize = myDaoConfig.getReindexBatchSize(); + } + RequestListJson requestListJson = myPartitionedUrlValidator.buildRequestListJson(theRequest, theUrlsToReindex); + if (!myDaoConfig.isReindexEnabled()) { + throw new ForbiddenOperationException("Reindexing is disabled on this server."); + } + + /* + * On the first time we run a particular reindex job, let's make sure we + * have the latest search parameters loaded. A common reason to + * be reindexing is that the search parameters have changed in some way, so + * this makes sure we're on the latest versions + */ + mySearchParamRegistry.forceRefresh(); + + JobParameters jobParameters = ReverseCronologicalBatchResourcePidReader.buildJobParameters(ProviderConstants.OPERATION_REINDEX, theBatchSize, requestListJson); + return myBatchJobSubmitter.runJob(myReindexJob, jobParameters); + } + + @Override + @Transactional(Transactional.TxType.NEVER) + public JobExecution submitEverythingJob(Integer theBatchSize, RequestDetails theRequest) throws JobParametersInvalidException { + if (theBatchSize == null) { + theBatchSize = myDaoConfig.getReindexBatchSize(); + } + RequestPartitionId requestPartitionId = myPartitionedUrlValidator.requestPartitionIdFromRequest(theRequest); + if (!myDaoConfig.isReindexEnabled()) { + throw new ForbiddenOperationException("Reindexing is disabled on this server."); + } + + /* + * On the first time we run a particular reindex job, let's make sure we + * have the latest search parameters loaded. A common reason to + * be reindexing is that the search parameters have changed in some way, so + * this makes sure we're on the latest versions + */ + mySearchParamRegistry.forceRefresh(); + + JobParameters jobParameters = CronologicalBatchAllResourcePidReader.buildJobParameters(theBatchSize, requestPartitionId); + return myBatchJobSubmitter.runJob(myReindexEverythingJob, jobParameters); + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/reindex/job/ReindexEverythingJobConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/reindex/job/ReindexEverythingJobConfig.java new file mode 100644 index 00000000000..b0a0b77a642 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/reindex/job/ReindexEverythingJobConfig.java @@ -0,0 +1,98 @@ +package ca.uhn.fhir.jpa.reindex.job; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener; +import ca.uhn.fhir.jpa.batch.reader.CronologicalBatchAllResourcePidReader; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.JobBuilderFactory; +import org.springframework.batch.core.configuration.annotation.StepBuilderFactory; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.listener.ExecutionContextPromotionListener; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Lazy; + +import java.util.List; + +import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.REINDEX_EVERYTHING_JOB_NAME; + +/** + * Spring batch Job configuration file. Contains all necessary plumbing to run a + * Reindex job. + */ +@Configuration +public class ReindexEverythingJobConfig { + public static final String REINDEX_EVERYTHING_STEP_NAME = "reindex-everything-step"; + + @Autowired + private StepBuilderFactory myStepBuilderFactory; + @Autowired + private JobBuilderFactory myJobBuilderFactory; + + @Bean(name = REINDEX_EVERYTHING_JOB_NAME) + @Lazy + public Job reindexJob() { + return myJobBuilderFactory.get(REINDEX_EVERYTHING_JOB_NAME) + .start(reindexEverythingStep()) + .build(); + } + + @Bean + public Step reindexEverythingStep() { + return myStepBuilderFactory.get(REINDEX_EVERYTHING_STEP_NAME) + ., List>chunk(1) + .reader(cronologicalBatchAllResourcePidReader()) + .writer(reindexWriter()) + .listener(reindexEverythingPidCountRecorderListener()) + .listener(reindexEverythingPromotionListener()) + .build(); + } + + @Bean + @StepScope + public CronologicalBatchAllResourcePidReader cronologicalBatchAllResourcePidReader() { + return new CronologicalBatchAllResourcePidReader(); + } + + @Bean + @StepScope + public ReindexWriter reindexWriter() { + return new ReindexWriter(); + } + + @Bean + @StepScope + public PidReaderCounterListener reindexEverythingPidCountRecorderListener() { + return new PidReaderCounterListener(); + } + + @Bean + public ExecutionContextPromotionListener reindexEverythingPromotionListener() { + ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener(); + + listener.setKeys(new String[]{PidReaderCounterListener.RESOURCE_TOTAL_PROCESSED}); + + return listener; + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/reindex/job/ReindexJobConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/reindex/job/ReindexJobConfig.java new file mode 100644 index 00000000000..9b5bf617332 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/reindex/job/ReindexJobConfig.java @@ -0,0 +1,89 @@ +package ca.uhn.fhir.jpa.reindex.job; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.api.dao.DaoRegistry; +import ca.uhn.fhir.jpa.batch.job.MultiUrlProcessorJobConfig; +import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener; +import ca.uhn.fhir.jpa.searchparam.MatchUrlService; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.JobBuilderFactory; +import org.springframework.batch.core.configuration.annotation.StepBuilderFactory; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.listener.ExecutionContextPromotionListener; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Lazy; + +import java.util.List; + +import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.REINDEX_JOB_NAME; + +/** + * Spring batch Job configuration file. Contains all necessary plumbing to run a + * Reindex job. + */ +@Configuration +public class ReindexJobConfig extends MultiUrlProcessorJobConfig { + public static final String REINDEX_URL_LIST_STEP_NAME = "reindex-url-list-step"; + + @Autowired + private StepBuilderFactory myStepBuilderFactory; + @Autowired + private JobBuilderFactory myJobBuilderFactory; + + @Bean(name = REINDEX_JOB_NAME) + @Lazy + public Job reindexJob(MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) { + return myJobBuilderFactory.get(REINDEX_JOB_NAME) + .validator(multiUrlProcessorParameterValidator(theMatchUrlService, theDaoRegistry)) + .start(reindexUrlListStep()) + .build(); + } + + @Bean + public Step reindexUrlListStep() { + return myStepBuilderFactory.get(REINDEX_URL_LIST_STEP_NAME) + ., List>chunk(1) + .reader(reverseCronologicalBatchResourcePidReader()) + .writer(reindexWriter()) + .listener(pidCountRecorderListener()) + .listener(reindexPromotionListener()) + .build(); + } + + @Bean + @StepScope + public ReindexWriter reindexWriter() { + return new ReindexWriter(); + } + + @Bean + public ExecutionContextPromotionListener reindexPromotionListener() { + ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener(); + + listener.setKeys(new String[]{PidReaderCounterListener.RESOURCE_TOTAL_PROCESSED}); + + return listener; + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/reindex/job/ReindexWriter.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/reindex/job/ReindexWriter.java new file mode 100644 index 00000000000..5f1ee6b91ee --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/reindex/job/ReindexWriter.java @@ -0,0 +1,67 @@ +package ca.uhn.fhir.jpa.reindex.job; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.api.config.DaoConfig; +import ca.uhn.fhir.jpa.dao.expunge.PartitionRunner; +import ca.uhn.fhir.jpa.search.reindex.ResourceReindexer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.SliceImpl; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.support.TransactionTemplate; + +import java.util.List; + +/** + * Reindex the provided list of pids of resources + */ + +public class ReindexWriter implements ItemWriter> { + private static final Logger ourLog = LoggerFactory.getLogger(ReindexWriter.class); + + public static final String PROCESS_NAME = "Reindexing"; + public static final String THREAD_PREFIX = "reindex"; + + @Autowired + ResourceReindexer myResourceReindexer; + @Autowired + DaoConfig myDaoConfig; + @Autowired + protected PlatformTransactionManager myTxManager; + + @Override + public void write(List> thePidLists) throws Exception { + PartitionRunner partitionRunner = new PartitionRunner(PROCESS_NAME, THREAD_PREFIX, myDaoConfig.getReindexBatchSize(), myDaoConfig.getReindexThreadCount()); + + // Note that since our chunk size is 1, there will always be exactly one list + for (List pidList : thePidLists) { + partitionRunner.runInPartitionedThreads(new SliceImpl<>(pidList), pids -> reindexPids(pidList)); + } + } + + private void reindexPids(List pidList) { + TransactionTemplate txTemplate = new TransactionTemplate(myTxManager); + txTemplate.executeWithoutResult(t -> pidList.forEach(pid -> myResourceReindexer.readAndReindexResourceByPid(pid))); + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java index ebff3054e7f..7fde597b8e3 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java @@ -57,14 +57,11 @@ import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryExecutor; import ca.uhn.fhir.jpa.search.builder.sql.SqlObjectFactory; import ca.uhn.fhir.jpa.search.lastn.IElasticsearchSvc; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; -import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; -import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.jpa.searchparam.util.Dstu3DistanceHelper; +import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; import ca.uhn.fhir.jpa.searchparam.util.LastNParameterHelper; -import ca.uhn.fhir.rest.api.SearchContainedModeEnum; import ca.uhn.fhir.jpa.util.BaseIterator; import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener; -import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; import ca.uhn.fhir.jpa.util.QueryChunker; import ca.uhn.fhir.jpa.util.SqlQueryList; import ca.uhn.fhir.model.api.IQueryParameterType; @@ -75,6 +72,7 @@ import ca.uhn.fhir.model.primitive.InstantDt; import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; +import ca.uhn.fhir.rest.api.SearchContainedModeEnum; import ca.uhn.fhir.rest.api.SortOrderEnum; import ca.uhn.fhir.rest.api.SortSpec; import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails; @@ -85,6 +83,8 @@ import ca.uhn.fhir.rest.param.ReferenceParam; import ca.uhn.fhir.rest.param.StringParam; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; +import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.util.StopWatch; import ca.uhn.fhir.util.StringUtil; import ca.uhn.fhir.util.UrlUtil; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexer.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexer.java new file mode 100644 index 00000000000..8c941db4cf6 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexer.java @@ -0,0 +1,102 @@ +package ca.uhn.fhir.jpa.search.reindex; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.RuntimeResourceDefinition; +import ca.uhn.fhir.jpa.api.dao.DaoRegistry; +import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; +import ca.uhn.fhir.jpa.dao.data.IForcedIdDao; +import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; +import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; +import ca.uhn.fhir.jpa.model.entity.ForcedId; +import ca.uhn.fhir.jpa.model.entity.ResourceTable; +import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import static org.apache.commons.lang3.StringUtils.isBlank; + +@Service +public class ResourceReindexer { + private static final Logger ourLog = LoggerFactory.getLogger(ResourceReindexer.class); + @Autowired + private IResourceHistoryTableDao myResourceHistoryTableDao; + @Autowired + private IForcedIdDao myForcedIdDao; + @Autowired + private IResourceTableDao myResourceTableDao; + @Autowired + private DaoRegistry myDaoRegistry; + + private final FhirContext myFhirContext; + + public ResourceReindexer(FhirContext theFhirContext) { + myFhirContext = theFhirContext; + } + + public void readAndReindexResourceByPid(Long theResourcePid) { + ResourceTable resourceTable = myResourceTableDao.findById(theResourcePid).orElseThrow(IllegalStateException::new); + reindexResourceEntity(resourceTable); + } + + public void reindexResourceEntity(ResourceTable theResourceTable) { + /* + * This part is because from HAPI 1.5 - 1.6 we changed the format of forced ID to be "type/id" instead of just "id" + */ + ForcedId forcedId = theResourceTable.getForcedId(); + if (forcedId != null) { + if (isBlank(forcedId.getResourceType())) { + ourLog.info("Updating resource {} forcedId type to {}", forcedId.getForcedId(), theResourceTable.getResourceType()); + forcedId.setResourceType(theResourceTable.getResourceType()); + myForcedIdDao.save(forcedId); + } + } + + IFhirResourceDao dao = myDaoRegistry.getResourceDao(theResourceTable.getResourceType()); + long expectedVersion = theResourceTable.getVersion(); + IBaseResource resource = dao.readByPid(new ResourcePersistentId(theResourceTable.getId()), true); + + if (resource == null) { + throw new InternalErrorException("Could not find resource version " + theResourceTable.getIdDt().toUnqualified().getValue() + " in database"); + } + + Long actualVersion = resource.getIdElement().getVersionIdPartAsLong(); + if (actualVersion < expectedVersion) { + ourLog.warn("Resource {} version {} does not exist, renumbering version {}", resource.getIdElement().toUnqualifiedVersionless().getValue(), resource.getIdElement().getVersionIdPart(), expectedVersion); + myResourceHistoryTableDao.updateVersion(theResourceTable.getId(), actualVersion, expectedVersion); + } + + doReindex(theResourceTable, resource); + } + + @SuppressWarnings("unchecked") + void doReindex(ResourceTable theResourceTable, T theResource) { + RuntimeResourceDefinition resourceDefinition = myFhirContext.getResourceDefinition(theResource.getClass()); + Class resourceClass = (Class) resourceDefinition.getImplementingClass(); + final IFhirResourceDao dao = myDaoRegistry.getResourceDao(resourceClass); + dao.reindex(theResource, theResourceTable); + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java index fe971407069..124a0a50c90 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java @@ -21,34 +21,28 @@ package ca.uhn.fhir.jpa.search.reindex; */ import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; -import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao; import ca.uhn.fhir.jpa.dao.data.IForcedIdDao; -import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; import ca.uhn.fhir.jpa.dao.data.IResourceReindexJobDao; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; import ca.uhn.fhir.jpa.entity.ResourceReindexJobEntity; -import ca.uhn.fhir.jpa.model.entity.ForcedId; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.sched.HapiJob; import ca.uhn.fhir.jpa.model.sched.ISchedulerService; import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; -import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.parser.DataFormatException; -import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; -import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.util.StopWatch; import com.google.common.annotations.VisibleForTesting; import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.concurrent.BasicThreadFactory; import org.apache.commons.lang3.time.DateUtils; -import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.r4.model.InstantType; +import org.jetbrains.annotations.Nullable; import org.quartz.JobExecutionContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -80,9 +74,13 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.ReentrantLock; import java.util.stream.Collectors; -import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; +/** + * @see ca.uhn.fhir.jpa.reindex.job.ReindexJobConfig + * @deprecated + */ +@Deprecated public class ResourceReindexingSvcImpl implements IResourceReindexingSvc { private static final Date BEGINNING_OF_TIME = new Date(0); @@ -96,13 +94,11 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc { @Autowired private PlatformTransactionManager myTxManager; private TransactionTemplate myTxTemplate; - private ThreadFactory myReindexingThreadFactory = new BasicThreadFactory.Builder().namingPattern("ResourceReindex-%d").build(); + private final ThreadFactory myReindexingThreadFactory = new BasicThreadFactory.Builder().namingPattern("ResourceReindex-%d").build(); private ThreadPoolExecutor myTaskExecutor; @Autowired private IResourceTableDao myResourceTableDao; @Autowired - private IResourceHistoryTableDao myResourceHistoryTableDao; - @Autowired private DaoRegistry myDaoRegistry; @Autowired private IForcedIdDao myForcedIdDao; @@ -114,47 +110,19 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc { private ISearchParamRegistry mySearchParamRegistry; @Autowired private ISchedulerService mySchedulerService; - - @VisibleForTesting - void setReindexJobDaoForUnitTest(IResourceReindexJobDao theReindexJobDao) { - myReindexJobDao = theReindexJobDao; - } + @Autowired + private ResourceReindexer myResourceReindexer; @VisibleForTesting void setDaoConfigForUnitTest(DaoConfig theDaoConfig) { myDaoConfig = theDaoConfig; } - @VisibleForTesting - void setTxManagerForUnitTest(PlatformTransactionManager theTxManager) { - myTxManager = theTxManager; - } - - @VisibleForTesting - void setResourceTableDaoForUnitTest(IResourceTableDao theResourceTableDao) { - myResourceTableDao = theResourceTableDao; - } - - @VisibleForTesting - void setDaoRegistryForUnitTest(DaoRegistry theDaoRegistry) { - myDaoRegistry = theDaoRegistry; - } - - @VisibleForTesting - void setForcedIdDaoForUnitTest(IForcedIdDao theForcedIdDao) { - myForcedIdDao = theForcedIdDao; - } - @VisibleForTesting void setContextForUnitTest(FhirContext theContext) { myContext = theContext; } - @VisibleForTesting - void setSchedulerServiceForUnitTest(ISchedulerService theSchedulerService) { - mySchedulerService = theSchedulerService; - } - @PostConstruct public void start() { myTxTemplate = new TransactionTemplate(myTxManager); @@ -173,6 +141,7 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc { rejectHandler ); } + /** * A handler for rejected tasks that will have the caller block until space is available. * This was stolen from old hibernate search(5.X.X), as it has been removed in HS6. We can probably come up with a better solution though. @@ -189,8 +158,7 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc { public void rejectedExecution(Runnable r, ThreadPoolExecutor e) { try { e.getQueue().put( r ); - } - catch (InterruptedException e1) { + } catch (InterruptedException e1) { ourLog.error("Interrupted Execption for task: {}",r, e1 ); Thread.currentThread().interrupt(); } @@ -289,15 +257,15 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc { ourLog.info("Cancelling and purging all resource reindexing jobs"); myIndexingLock.lock(); try { - myTxTemplate.execute(t -> { - myReindexJobDao.markAllOfTypeAsDeleted(); - return null; - }); + myTxTemplate.execute(t -> { + myReindexJobDao.markAllOfTypeAsDeleted(); + return null; + }); - myTaskExecutor.shutdown(); - initExecutor(); + myTaskExecutor.shutdown(); + initExecutor(); - expungeJobsMarkedAsDeleted(); + expungeJobsMarkedAsDeleted(); } finally { myIndexingLock.unlock(); } @@ -346,8 +314,8 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc { } @VisibleForTesting - void setSearchParamRegistryForUnitTest(ISearchParamRegistry theSearchParamRegistry) { - mySearchParamRegistry = theSearchParamRegistry; + public void setResourceReindexerForUnitTest(ResourceReindexer theResourceReindexer) { + myResourceReindexer = theResourceReindexer; } private int runReindexJob(ResourceReindexJobEntity theJob) { @@ -387,7 +355,7 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc { }); Validate.notNull(range); int count = range.getNumberOfElements(); - ourLog.info("Loaded {} resources for reindexing in {}", count, pageSw.toString()); + ourLog.info("Loaded {} resources for reindexing in {}", count, pageSw); // If we didn't find any results at all, mark as deleted if (count == 0) { @@ -446,7 +414,7 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc { return null; }); - ourLog.info("Completed pass of reindex JOB[{}] - Indexed {} resources in {} ({} / sec) - Have indexed until: {}", theJob.getId(), count, sw.toString(), sw.formatThroughput(count, TimeUnit.SECONDS), new InstantType(newLow)); + ourLog.info("Completed pass of reindex JOB[{}] - Indexed {} resources in {} ({} / sec) - Have indexed until: {}", theJob.getId(), count, sw, sw.formatThroughput(count, TimeUnit.SECONDS), new InstantType(newLow)); return counter.get(); } @@ -465,7 +433,7 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc { TransactionTemplate txTemplate = new TransactionTemplate(myTxManager); txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); txTemplate.execute((TransactionCallback) theStatus -> { - ourLog.info("Marking resource with PID {} as indexing_failed", new Object[]{theId}); + ourLog.info("Marking resource with PID {} as indexing_failed", theId); myResourceTableDao.updateIndexStatus(theId, BaseHapiFhirDao.INDEX_STATUS_INDEXING_FAILED); @@ -492,7 +460,7 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc { q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamQuantityNormalized t WHERE t.myResourcePid = :id"); q.setParameter("id", theId); q.executeUpdate(); - + q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamString t WHERE t.myResourcePid = :id"); q.setParameter("id", theId); q.executeUpdate(); @@ -527,63 +495,12 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc { myCounter = theCounter; } - - @SuppressWarnings("unchecked") - private void doReindex(ResourceTable theResourceTable, T theResource) { - RuntimeResourceDefinition resourceDefinition = myContext.getResourceDefinition(theResource.getClass()); - Class resourceClass = (Class) resourceDefinition.getImplementingClass(); - final IFhirResourceDao dao = myDaoRegistry.getResourceDao(resourceClass); - dao.reindex(theResource, theResourceTable); - - myCounter.incrementAndGet(); - } - @Override public Date call() { Throwable reindexFailure; + try { - reindexFailure = myTxTemplate.execute(t -> { - ResourceTable resourceTable = myResourceTableDao.findById(myNextId).orElseThrow(IllegalStateException::new); - myUpdated = resourceTable.getUpdatedDate(); - - try { - /* - * This part is because from HAPI 1.5 - 1.6 we changed the format of forced ID to be "type/id" instead of just "id" - */ - ForcedId forcedId = resourceTable.getForcedId(); - if (forcedId != null) { - if (isBlank(forcedId.getResourceType())) { - ourLog.info("Updating resource {} forcedId type to {}", forcedId.getForcedId(), resourceTable.getResourceType()); - forcedId.setResourceType(resourceTable.getResourceType()); - myForcedIdDao.save(forcedId); - } - } - - IFhirResourceDao dao = myDaoRegistry.getResourceDao(resourceTable.getResourceType()); - long expectedVersion = resourceTable.getVersion(); - IBaseResource resource = dao.readByPid(new ResourcePersistentId(resourceTable.getId()), true); - - if (resource == null) { - throw new InternalErrorException("Could not find resource version " + resourceTable.getIdDt().toUnqualified().getValue() + " in database"); - } - - Long actualVersion = resource.getIdElement().getVersionIdPartAsLong(); - if (actualVersion < expectedVersion) { - ourLog.warn("Resource {} version {} does not exist, renumbering version {}", resource.getIdElement().toUnqualifiedVersionless().getValue(), resource.getIdElement().getVersionIdPart(), expectedVersion); - myResourceHistoryTableDao.updateVersion(resourceTable.getId(), actualVersion, expectedVersion); - } - - doReindex(resourceTable, resource); - - return null; - - } catch (Exception e) { - ourLog.error("Failed to index resource {}: {}", resourceTable.getIdDt(), e.toString(), e); - t.setRollbackOnly(); - return e; - } - }); - + reindexFailure = readResourceAndReindex(); } catch (ResourceVersionConflictException e) { /* * We reindex in multiple threads, so it's technically possible that two threads try @@ -603,5 +520,26 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc { return myUpdated; } + + @Nullable + private Throwable readResourceAndReindex() { + Throwable reindexFailure; + reindexFailure = myTxTemplate.execute(t -> { + ResourceTable resourceTable = myResourceTableDao.findById(myNextId).orElseThrow(IllegalStateException::new); + myUpdated = resourceTable.getUpdatedDate(); + + try { + myResourceReindexer.reindexResourceEntity(resourceTable); + myCounter.incrementAndGet(); + return null; + + } catch (Exception e) { + ourLog.error("Failed to index resource {}: {}", resourceTable.getIdDt(), e, e); + t.setRollbackOnly(); + return e; + } + }); + return reindexFailure; + } } } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/batch/job/MultiUrlJobParameterUtil.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/batch/job/MultiUrlJobParameterUtil.java new file mode 100644 index 00000000000..018d5393805 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/batch/job/MultiUrlJobParameterUtil.java @@ -0,0 +1,29 @@ +package ca.uhn.fhir.jpa.batch.job; + +import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl; +import ca.uhn.fhir.jpa.batch.job.model.RequestListJson; +import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader; +import ca.uhn.fhir.rest.server.provider.ProviderConstants; +import org.springframework.batch.core.JobParameters; + +import javax.annotation.Nonnull; +import java.util.ArrayList; +import java.util.List; + +public final class MultiUrlJobParameterUtil { + private MultiUrlJobParameterUtil() { + } + + @Nonnull + public static JobParameters buildJobParameters(String... theUrls) { + List partitionedUrls = new ArrayList<>(); + for (String url : theUrls) { + partitionedUrls.add(new PartitionedUrl(url, RequestPartitionId.defaultPartition())); + } + + RequestListJson requestListJson = new RequestListJson(); + requestListJson.setPartitionedUrls(partitionedUrls); + return ReverseCronologicalBatchResourcePidReader.buildJobParameters(ProviderConstants.OPERATION_REINDEX, 2401, requestListJson); + } +} diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/batch/reader/BatchDateThresholdUpdaterTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/batch/reader/BatchDateThresholdUpdaterTest.java new file mode 100644 index 00000000000..36ef7692fd8 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/batch/reader/BatchDateThresholdUpdaterTest.java @@ -0,0 +1,125 @@ +package ca.uhn.fhir.jpa.batch.reader; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Date; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.hasSize; +import static org.junit.jupiter.api.Assertions.assertEquals; + +@ExtendWith(MockitoExtension.class) +class BatchDateThresholdUpdaterTest { + static Date LATE_DATE = new Date(); + static Date EARLY_DATE = new Date(LATE_DATE.getTime() - 1000); + static Long PID1 = 1L; + static Long PID2 = 2L; + static Long PID3 = 3L; + BatchDateThresholdUpdater mySvc = new BatchDateThresholdUpdater(); + + @Test + public void testEmptyList() { + Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, Collections.emptySet(), Collections.emptyList()); + assertEquals(LATE_DATE, newThreshold); + } + + @Test + public void oneItem() { + mySvc.setDateFromPid(pid -> LATE_DATE); + Set seenPids = new HashSet<>(); + Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, seenPids, Collections.singletonList(PID1)); + assertEquals(LATE_DATE, newThreshold); + assertThat(seenPids, contains(PID1)); + } + + @Test + public void twoItemsSameDate() { + mySvc.setDateFromPid(pid -> LATE_DATE); + Set seenPids = new HashSet<>(); + Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, seenPids, Arrays.asList(PID1, PID2)); + assertEquals(LATE_DATE, newThreshold); + assertThat(seenPids, contains(PID1, PID2)); + } + + @Test + public void twoItemsDiffDate() { + List dates = Arrays.asList(EARLY_DATE, LATE_DATE); + mySvc.setDateFromPid(pid -> dates.get(pid.intValue() - 1)); + Set seenPids = new HashSet<>(); + Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, seenPids, Arrays.asList(PID1, PID2)); + assertEquals(LATE_DATE, newThreshold); + assertThat(seenPids, contains(PID2)); + } + + @Test + public void threeItemsSameDate() { + mySvc.setDateFromPid(pid -> LATE_DATE); + Set seenPids = new HashSet<>(); + Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, seenPids, Arrays.asList(PID1, PID2, PID3)); + assertEquals(LATE_DATE, newThreshold); + assertThat(seenPids, contains(PID1, PID2, PID3)); + } + + @Test + public void threeItemsDifferentEEL() { + List dates = Arrays.asList(EARLY_DATE, EARLY_DATE, LATE_DATE); + mySvc.setDateFromPid(pid -> dates.get(pid.intValue() - 1)); + Set seenPids = new HashSet<>(); + Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, seenPids, Arrays.asList(PID1, PID2, PID3)); + assertEquals(LATE_DATE, newThreshold); + assertThat(seenPids, contains(PID3)); + } + + @Test + public void threeItemsDifferentELL() { + List dates = Arrays.asList(EARLY_DATE, LATE_DATE, LATE_DATE); + mySvc.setDateFromPid(pid -> dates.get(pid.intValue() - 1)); + Set seenPids = new HashSet<>(); + Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, seenPids, Arrays.asList(PID1, PID2, PID3)); + assertEquals(LATE_DATE, newThreshold); + assertThat(seenPids, contains(PID2, PID3)); + } + + + @Test + public void threeItemsDifferentLEE() { + List dates = Arrays.asList(LATE_DATE, EARLY_DATE, EARLY_DATE); + mySvc.setDateFromPid(pid -> dates.get(pid.intValue() - 1)); + Set seenPids = new HashSet<>(); + Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, seenPids, Arrays.asList(PID1, PID2, PID3)); + assertEquals(EARLY_DATE, newThreshold); + assertThat(seenPids, contains(PID2, PID3)); + } + + @Test + public void threeItemsDifferentLLE() { + List dates = Arrays.asList(LATE_DATE, LATE_DATE, EARLY_DATE); + mySvc.setDateFromPid(pid -> dates.get(pid.intValue() - 1)); + Set seenPids = new HashSet<>(); + Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, seenPids, Arrays.asList(PID1, PID2, PID3)); + assertEquals(EARLY_DATE, newThreshold); + assertThat(seenPids, contains(PID3)); + } + + @Test + public void oneHundredItemsSameDate() { + mySvc.setDateFromPid(pid -> LATE_DATE); + Set seenPids = new HashSet<>(); + List bigList = new ArrayList<>(); + for (int i = 0; i < 100; ++i) { + bigList.add((long) i); + } + Date newThreshold = mySvc.updateThresholdAndCache(LATE_DATE, seenPids, bigList); + assertEquals(LATE_DATE, newThreshold); + assertThat(seenPids, hasSize(100)); + } +} diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/batch/reader/ReverseCronologicalBatchResourcePidReaderTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/batch/reader/ReverseCronologicalBatchResourcePidReaderTest.java index 59b38338df9..6f0e0db249d 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/batch/reader/ReverseCronologicalBatchResourcePidReaderTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/batch/reader/ReverseCronologicalBatchResourcePidReaderTest.java @@ -5,8 +5,9 @@ import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; -import ca.uhn.fhir.jpa.delete.model.PartitionedUrl; -import ca.uhn.fhir.jpa.delete.model.RequestListJson; +import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl; +import ca.uhn.fhir.jpa.batch.job.model.RequestListJson; +import ca.uhn.fhir.jpa.dao.IResultIterator; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.ResourceSearch; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; @@ -39,6 +40,7 @@ import static org.mockito.Mockito.when; @ExtendWith(MockitoExtension.class) class ReverseCronologicalBatchResourcePidReaderTest { + private static final int BATCH_SIZE = 3; static FhirContext ourFhirContext = FhirContext.forR4Cached(); static String URL_A = "a"; static String URL_B = "b"; @@ -54,9 +56,14 @@ class ReverseCronologicalBatchResourcePidReaderTest { DaoRegistry myDaoRegistry; @Mock IFhirResourceDao myPatientDao; + private final RequestPartitionId myDefaultPartitionId = RequestPartitionId.defaultPartition(); + @Mock + private IResultIterator myResultIter; @InjectMocks ReverseCronologicalBatchResourcePidReader myReader = new ReverseCronologicalBatchResourcePidReader(); + @Mock + private BatchResourceSearcher myBatchResourceSearcher; @BeforeEach public void before() throws JsonProcessingException { @@ -65,17 +72,20 @@ class ReverseCronologicalBatchResourcePidReaderTest { ObjectMapper mapper = new ObjectMapper(); String requestListJsonString = mapper.writeValueAsString(requestListJson); myReader.setRequestListJson(requestListJsonString); + myReader.setBatchSize(BATCH_SIZE); SearchParameterMap map = new SearchParameterMap(); RuntimeResourceDefinition patientResDef = ourFhirContext.getResourceDefinition("Patient"); - when(myMatchUrlService.getResourceSearch(URL_A)).thenReturn(new ResourceSearch(patientResDef, map)); - when(myMatchUrlService.getResourceSearch(URL_B)).thenReturn(new ResourceSearch(patientResDef, map)); - when(myMatchUrlService.getResourceSearch(URL_C)).thenReturn(new ResourceSearch(patientResDef, map)); + when(myMatchUrlService.getResourceSearch(URL_A, myDefaultPartitionId)).thenReturn(new ResourceSearch(patientResDef, map, myDefaultPartitionId)); + when(myMatchUrlService.getResourceSearch(URL_B, myDefaultPartitionId)).thenReturn(new ResourceSearch(patientResDef, map, myDefaultPartitionId)); + when(myMatchUrlService.getResourceSearch(URL_C, myDefaultPartitionId)).thenReturn(new ResourceSearch(patientResDef, map, myDefaultPartitionId)); when(myDaoRegistry.getResourceDao("Patient")).thenReturn(myPatientDao); myPatient = new Patient(); when(myPatientDao.readByPid(any())).thenReturn(myPatient); Calendar cal = new GregorianCalendar(2021, 1, 1); myPatient.getMeta().setLastUpdated(cal.getTime()); + + when(myBatchResourceSearcher.performSearch(any(), any())).thenReturn(myResultIter); } private Set buildPidSet(Integer... thePids) { @@ -87,7 +97,7 @@ class ReverseCronologicalBatchResourcePidReaderTest { @Test public void test3x1() throws Exception { - when(myPatientDao.searchForIds(any(), any())) + when(myResultIter.getNextResultBatch(BATCH_SIZE)) .thenReturn(buildPidSet(1, 2, 3)) .thenReturn(emptySet) .thenReturn(buildPidSet(4, 5, 6)) @@ -101,10 +111,30 @@ class ReverseCronologicalBatchResourcePidReaderTest { assertNull(myReader.read()); } + @Test + public void testReadRepeat() throws Exception { + when(myResultIter.getNextResultBatch(BATCH_SIZE)) + .thenReturn(buildPidSet(1, 2, 3)) + .thenReturn(buildPidSet(1, 2, 3)) + .thenReturn(buildPidSet(2, 3, 4)) + .thenReturn(buildPidSet(4, 5)) + .thenReturn(emptySet); + + when(myResultIter.hasNext()) + .thenReturn(true) + .thenReturn(true) + .thenReturn(true) + .thenReturn(true) + .thenReturn(false); + + assertListEquals(myReader.read(), 1, 2, 3); + assertListEquals(myReader.read(), 4, 5); + assertNull(myReader.read()); + } @Test public void test1x3start() throws Exception { - when(myPatientDao.searchForIds(any(), any())) + when(myResultIter.getNextResultBatch(BATCH_SIZE)) .thenReturn(buildPidSet(1, 2, 3)) .thenReturn(buildPidSet(4, 5, 6)) .thenReturn(buildPidSet(7, 8)) @@ -120,7 +150,7 @@ class ReverseCronologicalBatchResourcePidReaderTest { @Test public void test1x3end() throws Exception { - when(myPatientDao.searchForIds(any(), any())) + when(myResultIter.getNextResultBatch(BATCH_SIZE)) .thenReturn(emptySet) .thenReturn(emptySet) .thenReturn(buildPidSet(1, 2, 3)) @@ -140,6 +170,4 @@ class ReverseCronologicalBatchResourcePidReaderTest { assertEquals(theList.get(i), Long.valueOf(theValues[i])); } } - - } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java index 95f2a255270..df9f2cefed2 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java @@ -35,7 +35,6 @@ import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider; import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc; import ca.uhn.fhir.jpa.search.cache.ISearchResultCacheSvc; import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc; -import ca.uhn.fhir.jpa.stresstest.GiantTransactionPerfTest; import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionLoader; import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry; import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener; @@ -110,6 +109,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.lenient; import static org.mockito.Mockito.when; @TestPropertySource(properties = { @@ -240,8 +240,9 @@ public abstract class BaseJpaTest extends BaseTest { when(mySrd.getInterceptorBroadcaster()).thenReturn(mySrdInterceptorService); when(mySrd.getUserData()).thenReturn(new HashMap<>()); when(mySrd.getHeaders(eq(JpaConstants.HEADER_META_SNAPSHOT_MODE))).thenReturn(new ArrayList<>()); - when(mySrd.getServer().getDefaultPageSize()).thenReturn(null); - when(mySrd.getServer().getMaximumPageSize()).thenReturn(null); + // TODO enforce strict mocking everywhere + lenient().when(mySrd.getServer().getDefaultPageSize()).thenReturn(null); + lenient().when(mySrd.getServer().getMaximumPageSize()).thenReturn(null); } protected CountDownLatch registerLatchHookInterceptor(int theCount, Pointcut theLatchPointcut) { diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/expunge/PartitionRunnerTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/expunge/PartitionRunnerTest.java index 9cd49c8544d..4518cfc9a2a 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/expunge/PartitionRunnerTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/expunge/PartitionRunnerTest.java @@ -2,20 +2,15 @@ package ca.uhn.fhir.jpa.dao.expunge; import ca.uhn.fhir.interceptor.api.HookParams; import ca.uhn.fhir.jpa.api.config.DaoConfig; -import ca.uhn.fhir.jpa.config.TestDstu3Config; import ca.uhn.test.concurrency.PointcutLatch; import com.google.common.collect.Sets; import org.apache.commons.lang3.builder.ToStringBuilder; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Slice; import org.springframework.data.domain.SliceImpl; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit.jupiter.SpringExtension; import java.util.ArrayList; import java.util.List; @@ -27,24 +22,15 @@ import static org.hamcrest.Matchers.isOneOf; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotEquals; -@ExtendWith(SpringExtension.class) -@ContextConfiguration(classes = {TestDstu3Config.class}) public class PartitionRunnerTest { private static final Logger ourLog = LoggerFactory.getLogger(PartitionRunnerTest.class); - private static final String EXPUNGE_THREADNAME_1 = "expunge-1"; - private static final String EXPUNGE_THREADNAME_2 = "expunge-2"; + private static final String TEST_THREADNAME_1 = "test-1"; + private static final String TEST_THREADNAME_2 = "test-2"; - @Autowired - private PartitionRunner myPartitionRunner; - - @Autowired - private DaoConfig myDaoConfig; - private PointcutLatch myLatch = new PointcutLatch("partition call"); + private final PointcutLatch myLatch = new PointcutLatch("partition call"); @AfterEach public void before() { - myDaoConfig.setExpungeThreadCount(new DaoConfig().getExpungeThreadCount()); - myDaoConfig.setExpungeBatchSize(new DaoConfig().getExpungeBatchSize()); myLatch.clear(); } @@ -53,10 +39,23 @@ public class PartitionRunnerTest { Slice resourceIds = buildSlice(0); Consumer> partitionConsumer = buildPartitionConsumer(myLatch); myLatch.setExpectedCount(0); - myPartitionRunner.runInPartitionedThreads(resourceIds, partitionConsumer); + + getPartitionRunner().runInPartitionedThreads(resourceIds, partitionConsumer); myLatch.clear(); } + private PartitionRunner getPartitionRunner() { + return getPartitionRunner(DaoConfig.DEFAULT_EXPUNGE_BATCH_SIZE); + } + + private PartitionRunner getPartitionRunner(int theBatchSize) { + return getPartitionRunner(theBatchSize, Runtime.getRuntime().availableProcessors()); + } + + private PartitionRunner getPartitionRunner(int theBatchSize, int theThreadCount) { + return new PartitionRunner("TEST", "test", theBatchSize, theThreadCount); + } + private Slice buildSlice(int size) { List list = new ArrayList<>(); for (long i = 0; i < size; ++i) { @@ -71,7 +70,7 @@ public class PartitionRunnerTest { Consumer> partitionConsumer = buildPartitionConsumer(myLatch); myLatch.setExpectedCount(1); - myPartitionRunner.runInPartitionedThreads(resourceIds, partitionConsumer); + getPartitionRunner().runInPartitionedThreads(resourceIds, partitionConsumer); PartitionCall partitionCall = (PartitionCall) PointcutLatch.getLatchInvocationParameter(myLatch.awaitExpected()); assertEquals("main", partitionCall.threadName); assertEquals(1, partitionCall.size); @@ -84,7 +83,7 @@ public class PartitionRunnerTest { Consumer> partitionConsumer = buildPartitionConsumer(myLatch); myLatch.setExpectedCount(1); - myPartitionRunner.runInPartitionedThreads(resourceIds, partitionConsumer); + getPartitionRunner().runInPartitionedThreads(resourceIds, partitionConsumer); PartitionCall partitionCall = (PartitionCall) PointcutLatch.getLatchInvocationParameter(myLatch.awaitExpected()); assertEquals("main", partitionCall.threadName); assertEquals(2, partitionCall.size); @@ -93,17 +92,16 @@ public class PartitionRunnerTest { @Test public void tenItemsBatch5() throws InterruptedException { Slice resourceIds = buildSlice(10); - myDaoConfig.setExpungeBatchSize(5); Consumer> partitionConsumer = buildPartitionConsumer(myLatch); myLatch.setExpectedCount(2); - myPartitionRunner.runInPartitionedThreads(resourceIds, partitionConsumer); + getPartitionRunner(5).runInPartitionedThreads(resourceIds, partitionConsumer); List calls = myLatch.awaitExpected(); PartitionCall partitionCall1 = (PartitionCall) PointcutLatch.getLatchInvocationParameter(calls, 0); - assertThat(partitionCall1.threadName, isOneOf(EXPUNGE_THREADNAME_1, EXPUNGE_THREADNAME_2)); + assertThat(partitionCall1.threadName, isOneOf(TEST_THREADNAME_1, TEST_THREADNAME_2)); assertEquals(5, partitionCall1.size); PartitionCall partitionCall2 = (PartitionCall) PointcutLatch.getLatchInvocationParameter(calls, 1); - assertThat(partitionCall2.threadName, isOneOf(EXPUNGE_THREADNAME_1, EXPUNGE_THREADNAME_2)); + assertThat(partitionCall2.threadName, isOneOf(TEST_THREADNAME_1, TEST_THREADNAME_2)); assertEquals(5, partitionCall2.size); assertNotEquals(partitionCall1.threadName, partitionCall2.threadName); } @@ -111,7 +109,6 @@ public class PartitionRunnerTest { @Test public void nineItemsBatch5() throws InterruptedException { Slice resourceIds = buildSlice(9); - myDaoConfig.setExpungeBatchSize(5); // We don't care in which order, but one partition size should be // 5 and one should be 4 @@ -119,13 +116,13 @@ public class PartitionRunnerTest { Consumer> partitionConsumer = buildPartitionConsumer(myLatch); myLatch.setExpectedCount(2); - myPartitionRunner.runInPartitionedThreads(resourceIds, partitionConsumer); + getPartitionRunner(5).runInPartitionedThreads(resourceIds, partitionConsumer); List calls = myLatch.awaitExpected(); PartitionCall partitionCall1 = (PartitionCall) PointcutLatch.getLatchInvocationParameter(calls, 0); - assertThat(partitionCall1.threadName, isOneOf(EXPUNGE_THREADNAME_1, EXPUNGE_THREADNAME_2)); + assertThat(partitionCall1.threadName, isOneOf(TEST_THREADNAME_1, TEST_THREADNAME_2)); assertEquals(true, nums.remove(partitionCall1.size)); PartitionCall partitionCall2 = (PartitionCall) PointcutLatch.getLatchInvocationParameter(calls, 1); - assertThat(partitionCall2.threadName, isOneOf(EXPUNGE_THREADNAME_1, EXPUNGE_THREADNAME_2)); + assertThat(partitionCall2.threadName, isOneOf(TEST_THREADNAME_1, TEST_THREADNAME_2)); assertEquals(true, nums.remove(partitionCall2.size)); assertNotEquals(partitionCall1.threadName, partitionCall2.threadName); } @@ -133,21 +130,19 @@ public class PartitionRunnerTest { @Test public void tenItemsOneThread() throws InterruptedException { Slice resourceIds = buildSlice(10); - myDaoConfig.setExpungeBatchSize(5); - myDaoConfig.setExpungeThreadCount(1); Consumer> partitionConsumer = buildPartitionConsumer(myLatch); myLatch.setExpectedCount(2); - myPartitionRunner.runInPartitionedThreads(resourceIds, partitionConsumer); + getPartitionRunner(5, 1).runInPartitionedThreads(resourceIds, partitionConsumer); List calls = myLatch.awaitExpected(); { PartitionCall partitionCall = (PartitionCall) PointcutLatch.getLatchInvocationParameter(calls, 0); - assertEquals(EXPUNGE_THREADNAME_1, partitionCall.threadName); + assertEquals(TEST_THREADNAME_1, partitionCall.threadName); assertEquals(5, partitionCall.size); } { PartitionCall partitionCall = (PartitionCall) PointcutLatch.getLatchInvocationParameter(calls, 1); - assertEquals(EXPUNGE_THREADNAME_1, partitionCall.threadName); + assertEquals(TEST_THREADNAME_1, partitionCall.threadName); assertEquals(5, partitionCall.size); } } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchOptimizedTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchOptimizedTest.java index 4cd403e2b2b..66201cfddba 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchOptimizedTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchOptimizedTest.java @@ -675,7 +675,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test { Throwable t = next.get(); if (t != null) { String stackTrace = ExceptionUtils.getStackTrace(t); - fail(t.toString() + "\n" + stackTrace); + fail(t + "\n" + stackTrace); } } executor.shutdownNow(); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeJobParameterUtil.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeJobParameterUtil.java deleted file mode 100644 index 3d6b21b98e8..00000000000 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeJobParameterUtil.java +++ /dev/null @@ -1,23 +0,0 @@ -package ca.uhn.fhir.jpa.delete.job; - -import ca.uhn.fhir.interceptor.model.RequestPartitionId; -import com.github.jsonldjava.shaded.com.google.common.collect.Lists; -import org.springframework.batch.core.JobParameters; - -import javax.annotation.Nonnull; -import java.util.ArrayList; -import java.util.List; - -public final class DeleteExpungeJobParameterUtil { - private DeleteExpungeJobParameterUtil() { - } - - @Nonnull - public static JobParameters buildJobParameters(String... theUrls) { - List requestPartitionIds = new ArrayList<>(); - for (int i = 0; i < theUrls.length; ++i) { - requestPartitionIds.add(RequestPartitionId.defaultPartition()); - } - return DeleteExpungeJobConfig.buildJobParameters(2401, Lists.newArrayList(theUrls), requestPartitionIds); - } -} diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeJobTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeJobTest.java index d39498052a7..442102c0973 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeJobTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeJobTest.java @@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.delete.job; import ca.uhn.fhir.jpa.batch.BatchJobsConfig; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; +import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterUtil; import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.test.utilities.BatchJobHelper; @@ -50,7 +51,7 @@ public class DeleteExpungeJobTest extends BaseJpaR4Test { assertEquals(2, myPatientDao.search(SearchParameterMap.newSynchronous()).size()); assertEquals(2, myObservationDao.search(SearchParameterMap.newSynchronous()).size()); - JobParameters jobParameters = DeleteExpungeJobParameterUtil.buildJobParameters("Observation?subject.active=false", "Patient?active=false"); + JobParameters jobParameters = MultiUrlJobParameterUtil.buildJobParameters("Observation?subject.active=false", "Patient?active=false"); // execute JobExecution jobExecution = myBatchJobSubmitter.runJob(myDeleteExpungeJob, jobParameters); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeJobParameterValidatorTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/delete/job/MultiUrlJobParameterValidatorTest.java similarity index 66% rename from hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeJobParameterValidatorTest.java rename to hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/delete/job/MultiUrlJobParameterValidatorTest.java index d0e5992f8ae..b502cb4d08d 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeJobParameterValidatorTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/delete/job/MultiUrlJobParameterValidatorTest.java @@ -1,7 +1,10 @@ package ca.uhn.fhir.jpa.delete.job; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; +import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterUtil; +import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.ResourceSearch; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; @@ -16,13 +19,14 @@ import org.springframework.batch.core.JobParametersInvalidException; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @ExtendWith(MockitoExtension.class) -class DeleteExpungeJobParameterValidatorTest { +class MultiUrlJobParameterValidatorTest { static final FhirContext ourFhirContext = FhirContext.forR4Cached(); @Mock @@ -30,32 +34,32 @@ class DeleteExpungeJobParameterValidatorTest { @Mock DaoRegistry myDaoRegistry; - DeleteExpungeJobParameterValidator mySvc; + MultiUrlJobParameterValidator mySvc; @BeforeEach public void initMocks() { - mySvc = new DeleteExpungeJobParameterValidator(myMatchUrlService, myDaoRegistry); + mySvc = new MultiUrlJobParameterValidator(myMatchUrlService, myDaoRegistry); } @Test public void testValidate() throws JobParametersInvalidException, JsonProcessingException { // setup - JobParameters parameters = DeleteExpungeJobParameterUtil.buildJobParameters("Patient?address=memory", "Patient?name=smith"); - ResourceSearch resourceSearch = new ResourceSearch(ourFhirContext.getResourceDefinition("Patient"), new SearchParameterMap()); - when(myMatchUrlService.getResourceSearch(anyString())).thenReturn(resourceSearch); + JobParameters parameters = MultiUrlJobParameterUtil.buildJobParameters("Patient?address=memory", "Patient?name=smith"); + ResourceSearch resourceSearch = new ResourceSearch(ourFhirContext.getResourceDefinition("Patient"), new SearchParameterMap(), RequestPartitionId.defaultPartition()); + when(myMatchUrlService.getResourceSearch(anyString(), any())).thenReturn(resourceSearch); when(myDaoRegistry.isResourceTypeSupported("Patient")).thenReturn(true); // execute mySvc.validate(parameters); // verify - verify(myMatchUrlService, times(2)).getResourceSearch(anyString()); + verify(myMatchUrlService, times(2)).getResourceSearch(anyString(), any()); } @Test public void testValidateBadType() throws JobParametersInvalidException, JsonProcessingException { - JobParameters parameters = DeleteExpungeJobParameterUtil.buildJobParameters("Patient?address=memory"); - ResourceSearch resourceSearch = new ResourceSearch(ourFhirContext.getResourceDefinition("Patient"), new SearchParameterMap()); - when(myMatchUrlService.getResourceSearch(anyString())).thenReturn(resourceSearch); + JobParameters parameters = MultiUrlJobParameterUtil.buildJobParameters("Patient?address=memory"); + ResourceSearch resourceSearch = new ResourceSearch(ourFhirContext.getResourceDefinition("Patient"), new SearchParameterMap(), RequestPartitionId.defaultPartition()); + when(myMatchUrlService.getResourceSearch(anyString(), any())).thenReturn(resourceSearch); when(myDaoRegistry.isResourceTypeSupported("Patient")).thenReturn(false); try { diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/delete/job/ReindexJobTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/delete/job/ReindexJobTest.java new file mode 100644 index 00000000000..6c1b52518bc --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/delete/job/ReindexJobTest.java @@ -0,0 +1,121 @@ +package ca.uhn.fhir.jpa.delete.job; + +import ca.uhn.fhir.jpa.batch.BatchJobsConfig; +import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; +import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterUtil; +import ca.uhn.fhir.jpa.batch.job.MultiUrlProcessorJobConfig; +import ca.uhn.fhir.jpa.batch.reader.CronologicalBatchAllResourcePidReader; +import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test; +import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; +import ca.uhn.fhir.test.utilities.BatchJobHelper; +import org.apache.commons.lang3.time.DateUtils; +import org.hl7.fhir.instance.model.api.IIdType; +import org.hl7.fhir.r4.model.Observation; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobParameter; +import org.springframework.batch.core.JobParameters; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; + +import javax.annotation.PostConstruct; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.hasSize; +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class ReindexJobTest extends BaseJpaR4Test { + private static final Logger ourLog = LoggerFactory.getLogger(ReindexJobTest.class); + + @Autowired + private IBatchJobSubmitter myBatchJobSubmitter; + @Autowired + @Qualifier(BatchJobsConfig.REINDEX_JOB_NAME) + private Job myReindexJob; + @Autowired + @Qualifier(BatchJobsConfig.REINDEX_EVERYTHING_JOB_NAME) + private Job myReindexEverythingJob; + @Autowired + private BatchJobHelper myBatchJobHelper; + + private ReindexTestHelper myReindexTestHelper; + + @PostConstruct + public void postConstruct() { + myReindexTestHelper = new ReindexTestHelper(myFhirCtx, myDaoRegistry, mySearchParamRegistry); + } + + @Test + public void testReindexJob() throws Exception { + // setup + + IIdType obsFinalId = myReindexTestHelper.createObservationWithAlleleExtension(Observation.ObservationStatus.FINAL); + IIdType obsCancelledId = myReindexTestHelper.createObservationWithAlleleExtension(Observation.ObservationStatus.CANCELLED); + + myReindexTestHelper.createAlleleSearchParameter(); + + assertEquals(2, myObservationDao.search(SearchParameterMap.newSynchronous()).size()); + // The searchparam value is on the observation, but it hasn't been indexed yet + assertThat(myReindexTestHelper.getAlleleObservationIds(), hasSize(0)); + + // Only reindex one of them + JobParameters jobParameters = MultiUrlJobParameterUtil.buildJobParameters("Observation?status=final"); + + // execute + JobExecution jobExecution = myBatchJobSubmitter.runJob(myReindexJob, jobParameters); + + myBatchJobHelper.awaitJobCompletion(jobExecution); + + // validate + assertEquals(2, myObservationDao.search(SearchParameterMap.newSynchronous()).size()); + // Now one of them should be indexed + List alleleObservationIds = myReindexTestHelper.getAlleleObservationIds(); + assertThat(alleleObservationIds, hasSize(1)); + assertEquals(obsFinalId.getIdPart(), alleleObservationIds.get(0)); + } + + @Test + public void testReindexEverythingJob() throws Exception { + // setup + + for (int i = 0; i < 50; ++i) { + myReindexTestHelper.createObservationWithAlleleExtension(Observation.ObservationStatus.FINAL); + } + + myReindexTestHelper.createAlleleSearchParameter(); + mySearchParamRegistry.forceRefresh(); + + assertEquals(50, myObservationDao.search(SearchParameterMap.newSynchronous()).size()); + // The searchparam value is on the observation, but it hasn't been indexed yet + assertThat(myReindexTestHelper.getAlleleObservationIds(), hasSize(0)); + + JobParameters jobParameters = buildEverythingJobParameters(3L); + + // execute + JobExecution jobExecution = myBatchJobSubmitter.runJob(myReindexEverythingJob, jobParameters); + + myBatchJobHelper.awaitJobCompletion(jobExecution); + + // validate + assertEquals(50, myObservationDao.search(SearchParameterMap.newSynchronous()).size()); + // Now all of them should be indexed + assertThat(myReindexTestHelper.getAlleleObservationIds(), hasSize(50)); + } + + private JobParameters buildEverythingJobParameters(Long theBatchSize) { + Map map = new HashMap<>(); + map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), MultiUrlProcessorJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM))); + map.put(CronologicalBatchAllResourcePidReader.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue())); + JobParameters parameters = new JobParameters(map); + return parameters; + } + + +} diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/delete/job/ReindexTestHelper.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/delete/job/ReindexTestHelper.java new file mode 100644 index 00000000000..785e0d4e588 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/delete/job/ReindexTestHelper.java @@ -0,0 +1,105 @@ +package ca.uhn.fhir.jpa.delete.job; + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.jpa.api.dao.DaoRegistry; +import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; +import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; +import ca.uhn.fhir.rest.api.CacheControlDirective; +import ca.uhn.fhir.rest.api.server.IBundleProvider; +import ca.uhn.fhir.rest.client.api.IGenericClient; +import ca.uhn.fhir.rest.gclient.StringClientParam; +import ca.uhn.fhir.rest.param.TokenParam; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; +import ca.uhn.fhir.util.BundleUtil; +import org.hl7.fhir.instance.model.api.IBaseBundle; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; +import org.hl7.fhir.r4.model.Enumerations; +import org.hl7.fhir.r4.model.Observation; +import org.hl7.fhir.r4.model.SearchParameter; +import org.hl7.fhir.r4.model.StringType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.annotation.Nonnull; +import java.util.List; + +public class ReindexTestHelper { + public static final String ALLELE_EXTENSION_URL = "http://hl7.org/fhir/StructureDefinition/observation-geneticsAlleleName"; + public static final String ALLELE_SP_CODE = "alleleName"; + private static final Logger ourLog = LoggerFactory.getLogger(ReindexTestHelper.class); + private static final String TEST_ALLELE_VALUE = "HERC"; + + private final FhirContext myFhirContext; + private final DaoRegistry myDaoRegistry; + private final ISearchParamRegistry mySearchParamRegistry; + private final IFhirResourceDao mySearchParameterDao; + private final IFhirResourceDao myObservationDao; + + public ReindexTestHelper(FhirContext theFhirContext, DaoRegistry theDaoRegistry, ISearchParamRegistry theSearchParamRegistry) { + myFhirContext = theFhirContext; + myDaoRegistry = theDaoRegistry; + mySearchParamRegistry = theSearchParamRegistry; + mySearchParameterDao = myDaoRegistry.getResourceDao(SearchParameter.class); + myObservationDao = myDaoRegistry.getResourceDao(Observation.class); + } + + public void createAlleleSearchParameter() { + createAlleleSearchParameter(ALLELE_SP_CODE); + } + + public void createAlleleSearchParameter(String theCode) { + SearchParameter alleleName = new SearchParameter(); + alleleName.setId("SearchParameter/alleleName"); + alleleName.setStatus(Enumerations.PublicationStatus.ACTIVE); + alleleName.addBase("Observation"); + alleleName.setCode(theCode); + alleleName.setType(Enumerations.SearchParamType.TOKEN); + alleleName.setTitle("AlleleName"); + alleleName.setExpression("Observation.extension('" + ALLELE_EXTENSION_URL + "')"); + alleleName.setXpathUsage(SearchParameter.XPathUsageType.NORMAL); + mySearchParameterDao.create(alleleName); + mySearchParamRegistry.forceRefresh(); + } + + public IIdType createObservationWithAlleleExtension(Observation.ObservationStatus theStatus) { + Observation observation = buildObservationWithAlleleExtension(theStatus); + return myObservationDao.create(observation).getId(); + } + + @Nonnull + public Observation buildObservationWithAlleleExtension(Observation.ObservationStatus theStatus) { + Observation observation = new Observation(); + observation.addExtension(ALLELE_EXTENSION_URL, new StringType(TEST_ALLELE_VALUE)); + observation.setStatus(theStatus); + return observation; + } + + public List getAlleleObservationIds() { + return getAlleleObservationIds(ALLELE_SP_CODE, null); + } + + public List getAlleleObservationIds(String theCode, String theIdentifier) { + SearchParameterMap map = SearchParameterMap.newSynchronous(); + map.add(theCode, new TokenParam(TEST_ALLELE_VALUE)); + if (theIdentifier != null) { + map.add(Observation.SP_IDENTIFIER, new TokenParam(theIdentifier)); + } + ourLog.info("Searching with url {}", map.toNormalizedQueryString(myFhirContext)); + IBundleProvider result = myObservationDao.search(map); + return result.getAllResourceIds(); + } + + public IBaseResource buildObservationWithAlleleExtension() { + return buildObservationWithAlleleExtension(Observation.ObservationStatus.FINAL); + } + + public List getAlleleObservationIds(IGenericClient theClient) { + IBaseBundle result = theClient.search() + .forResource("Observation") + .where(new StringClientParam(ALLELE_SP_CODE).matches().value(TEST_ALLELE_VALUE)) + .cacheControl(new CacheControlDirective().setNoCache(true)) + .execute(); + return BundleUtil.toListOfResourceIds(myFhirContext, result); + } +} diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptorTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptorTest.java index faded1b6479..0f28baac6cf 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptorTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptorTest.java @@ -17,7 +17,6 @@ import ca.uhn.fhir.rest.param.TokenParam; import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException; import com.google.common.collect.ListMultimap; import com.google.common.collect.Multimap; -import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.Bundle; import org.hl7.fhir.r4.model.Encounter; import org.hl7.fhir.r4.model.Enumerations; @@ -32,7 +31,6 @@ import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import java.io.IOException; -import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.stream.Collectors; @@ -347,7 +345,7 @@ public class PatientIdPartitionInterceptorTest extends BaseJpaR4SystemTest { Multimap resourcesByType = runInTransaction(() -> { logAllResources(); - return myResourceTableDao.findAll().stream().collect(MultimapCollector.toMultimap(t->t.getResourceType(), t->t.getPartitionId().getPartitionId())); + return myResourceTableDao.findAll().stream().collect(MultimapCollector.toMultimap(t -> t.getResourceType(), t -> t.getPartitionId().getPartitionId())); }); assertThat(resourcesByType.get("Patient"), contains(4267)); @@ -382,7 +380,7 @@ public class PatientIdPartitionInterceptorTest extends BaseJpaR4SystemTest { Multimap resourcesByType = runInTransaction(() -> { logAllResources(); - return myResourceTableDao.findAll().stream().collect(MultimapCollector.toMultimap(t->t.getResourceType(), t->t.getPartitionId().getPartitionId())); + return myResourceTableDao.findAll().stream().collect(MultimapCollector.toMultimap(t -> t.getResourceType(), t -> t.getPartitionId().getPartitionId())); }); assertThat(resourcesByType.get("Patient"), contains(4267)); @@ -430,7 +428,7 @@ public class PatientIdPartitionInterceptorTest extends BaseJpaR4SystemTest { Multimap resourcesByType = runInTransaction(() -> { logAllResources(); - return myResourceTableDao.findAll().stream().collect(MultimapCollector.toMultimap(t->t.getResourceType(), t->t.getPartitionId().getPartitionId())); + return myResourceTableDao.findAll().stream().collect(MultimapCollector.toMultimap(t -> t.getResourceType(), t -> t.getPartitionId().getPartitionId())); }); assertThat(resourcesByType.get("Patient"), contains(4267)); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/BaseResourceProviderR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/BaseResourceProviderR4Test.java index 47def724852..72217fb569b 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/BaseResourceProviderR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/BaseResourceProviderR4Test.java @@ -24,6 +24,7 @@ import ca.uhn.fhir.rest.server.RestfulServer; import ca.uhn.fhir.rest.server.interceptor.CorsInterceptor; import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor; import ca.uhn.fhir.rest.server.provider.DeleteExpungeProvider; +import ca.uhn.fhir.rest.server.provider.ReindexProvider; import ca.uhn.fhir.test.utilities.JettyUtil; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; @@ -77,6 +78,8 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test { protected IPartitionDao myPartitionDao; @Autowired private DeleteExpungeProvider myDeleteExpungeProvider; + @Autowired + private ReindexProvider myReindexProvider; ResourceCountCache myResourceCountsCache; private TerminologyUploaderProvider myTerminologyUploaderProvider; @@ -109,7 +112,7 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test { myTerminologyUploaderProvider = myAppCtx.getBean(TerminologyUploaderProvider.class); myDaoRegistry = myAppCtx.getBean(DaoRegistry.class); - ourRestServer.registerProviders(mySystemProvider, myTerminologyUploaderProvider, myDeleteExpungeProvider); + ourRestServer.registerProviders(mySystemProvider, myTerminologyUploaderProvider, myDeleteExpungeProvider, myReindexProvider); ourRestServer.registerProvider(myAppCtx.getBean(GraphQLProvider.class)); ourRestServer.registerProvider(myAppCtx.getBean(DiffProvider.class)); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/MultitenantBatchOperationR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/MultitenantBatchOperationR4Test.java new file mode 100644 index 00000000000..994ad38f17f --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/MultitenantBatchOperationR4Test.java @@ -0,0 +1,230 @@ +package ca.uhn.fhir.jpa.provider.r4; + +import ca.uhn.fhir.context.RuntimeResourceDefinition; +import ca.uhn.fhir.interceptor.api.HookParams; +import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor; +import ca.uhn.fhir.interceptor.api.IPointcut; +import ca.uhn.fhir.interceptor.api.Pointcut; +import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.jpa.api.config.DaoConfig; +import ca.uhn.fhir.jpa.batch.BatchJobsConfig; +import ca.uhn.fhir.jpa.delete.job.ReindexTestHelper; +import ca.uhn.fhir.rest.api.CacheControlDirective; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.server.provider.ProviderConstants; +import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; +import ca.uhn.fhir.test.utilities.BatchJobHelper; +import org.hl7.fhir.instance.model.api.IIdType; +import org.hl7.fhir.r4.model.BooleanType; +import org.hl7.fhir.r4.model.Bundle; +import org.hl7.fhir.r4.model.DecimalType; +import org.hl7.fhir.r4.model.Observation; +import org.hl7.fhir.r4.model.Parameters; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.ArrayList; +import java.util.List; + +import static ca.uhn.fhir.jpa.model.util.JpaConstants.DEFAULT_PARTITION_NAME; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.isA; +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class MultitenantBatchOperationR4Test extends BaseMultitenantResourceProviderR4Test { + private static final Logger ourLog = LoggerFactory.getLogger(MultitenantBatchOperationR4Test.class); + + @Autowired + private BatchJobHelper myBatchJobHelper; + + @BeforeEach + @Override + public void before() throws Exception { + super.before(); + myDaoConfig.setAllowMultipleDelete(true); + myDaoConfig.setExpungeEnabled(true); + myDaoConfig.setDeleteExpungeEnabled(true); + } + + @AfterEach + @Override + public void after() throws Exception { + myDaoConfig.setAllowMultipleDelete(new DaoConfig().isAllowMultipleDelete()); + myDaoConfig.setExpungeEnabled(new DaoConfig().isExpungeEnabled()); + myDaoConfig.setDeleteExpungeEnabled(new DaoConfig().isDeleteExpungeEnabled()); + super.after(); + } + + @Test + public void testDeleteExpungeOperation() { + // Create patients + + IIdType idAT = createPatient(withTenant(TENANT_A), withActiveTrue()); + IIdType idAF = createPatient(withTenant(TENANT_A), withActiveFalse()); + IIdType idBT = createPatient(withTenant(TENANT_B), withActiveTrue()); + IIdType idBF = createPatient(withTenant(TENANT_B), withActiveFalse()); + + // validate setup + assertEquals(2, getAllPatientsInTenant(TENANT_A).getTotal()); + assertEquals(2, getAllPatientsInTenant(TENANT_B).getTotal()); + assertEquals(0, getAllPatientsInTenant(DEFAULT_PARTITION_NAME).getTotal()); + + Parameters input = new Parameters(); + input.addParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, "/Patient?active=false"); + + MyInterceptor interceptor = new MyInterceptor(); + myInterceptorRegistry.registerAnonymousInterceptor(Pointcut.STORAGE_PARTITION_SELECTED, interceptor); + // execute + + myTenantClientInterceptor.setTenantId(TENANT_B); + Parameters response = myClient + .operation() + .onServer() + .named(ProviderConstants.OPERATION_DELETE_EXPUNGE) + .withParameters(input) + .execute(); + + ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(response)); + myBatchJobHelper.awaitAllBulkJobCompletions(BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME); + assertThat(interceptor.requestPartitionIds, hasSize(1)); + RequestPartitionId partitionId = interceptor.requestPartitionIds.get(0); + assertEquals(TENANT_B_ID, partitionId.getFirstPartitionIdOrNull()); + assertEquals(TENANT_B, partitionId.getFirstPartitionNameOrNull()); + assertThat(interceptor.requestDetails.get(0), isA(ServletRequestDetails.class)); + assertEquals("Patient", interceptor.resourceDefs.get(0).getName()); + myInterceptorRegistry.unregisterInterceptor(interceptor); + + DecimalType jobIdPrimitive = (DecimalType) response.getParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_RESPONSE_JOB_ID); + Long jobId = jobIdPrimitive.getValue().longValue(); + + assertEquals(1, myBatchJobHelper.getReadCount(jobId)); + assertEquals(1, myBatchJobHelper.getWriteCount(jobId)); + + // validate only the false patient in TENANT_B is removed + assertEquals(2, getAllPatientsInTenant(TENANT_A).getTotal()); + assertEquals(1, getAllPatientsInTenant(TENANT_B).getTotal()); + assertEquals(0, getAllPatientsInTenant(DEFAULT_PARTITION_NAME).getTotal()); + + } + + @Test + public void testReindexEverything() { + ReindexTestHelper reindexTestHelper = new ReindexTestHelper(myFhirCtx, myDaoRegistry, mySearchParamRegistry); + myTenantClientInterceptor.setTenantId(TENANT_A); + IIdType obsFinalA = doCreateResource(reindexTestHelper.buildObservationWithAlleleExtension()); + + myTenantClientInterceptor.setTenantId(TENANT_B); + IIdType obsFinalB = doCreateResource(reindexTestHelper.buildObservationWithAlleleExtension()); + + reindexTestHelper.createAlleleSearchParameter(); + + // The searchparam value is on the observation, but it hasn't been indexed yet + myTenantClientInterceptor.setTenantId(TENANT_A); + assertThat(reindexTestHelper.getAlleleObservationIds(myClient), hasSize(0)); + myTenantClientInterceptor.setTenantId(TENANT_B); + assertThat(reindexTestHelper.getAlleleObservationIds(myClient), hasSize(0)); + // setup + Parameters input = new Parameters(); + Integer batchSize = 2401; + input.addParameter(ProviderConstants.OPERATION_REINDEX_PARAM_BATCH_SIZE, new DecimalType(batchSize)); + input.addParameter(ProviderConstants.OPERATION_REINDEX_PARAM_EVERYTHING, new BooleanType(true)); + + ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input)); + + // reindex all of Tenant A + myTenantClientInterceptor.setTenantId(TENANT_A); + Parameters response = myClient + .operation() + .onServer() + .named(ProviderConstants.OPERATION_REINDEX) + .withParameters(input) + .execute(); + ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(response)); + DecimalType jobId = (DecimalType) response.getParameter(ProviderConstants.OPERATION_REINDEX_RESPONSE_JOB_ID); + + myBatchJobHelper.awaitJobExecution(jobId.getValueAsNumber().longValue()); + + // validate + List alleleObservationIds = reindexTestHelper.getAlleleObservationIds(myClient); + // Only the one in the first tenant should be indexed + myTenantClientInterceptor.setTenantId(TENANT_A); + assertThat(reindexTestHelper.getAlleleObservationIds(myClient), hasSize(1)); + assertEquals(obsFinalA.getIdPart(), alleleObservationIds.get(0)); + myTenantClientInterceptor.setTenantId(TENANT_B); + assertThat(reindexTestHelper.getAlleleObservationIds(myClient), hasSize(0)); + } + + @Test + public void testReindexByUrl() { + ReindexTestHelper reindexTestHelper = new ReindexTestHelper(myFhirCtx, myDaoRegistry, mySearchParamRegistry); + myTenantClientInterceptor.setTenantId(TENANT_A); + IIdType obsFinalA = doCreateResource(reindexTestHelper.buildObservationWithAlleleExtension(Observation.ObservationStatus.FINAL)); + IIdType obsCancelledA = doCreateResource(reindexTestHelper.buildObservationWithAlleleExtension(Observation.ObservationStatus.CANCELLED)); + + myTenantClientInterceptor.setTenantId(TENANT_B); + IIdType obsFinalB = doCreateResource(reindexTestHelper.buildObservationWithAlleleExtension(Observation.ObservationStatus.FINAL)); + IIdType obsCancelledB = doCreateResource(reindexTestHelper.buildObservationWithAlleleExtension(Observation.ObservationStatus.CANCELLED)); + + reindexTestHelper.createAlleleSearchParameter(); + + // The searchparam value is on the observation, but it hasn't been indexed yet + myTenantClientInterceptor.setTenantId(TENANT_A); + assertThat(reindexTestHelper.getAlleleObservationIds(myClient), hasSize(0)); + myTenantClientInterceptor.setTenantId(TENANT_B); + assertThat(reindexTestHelper.getAlleleObservationIds(myClient), hasSize(0)); + + // setup + Parameters input = new Parameters(); + Integer batchSize = 2401; + input.addParameter(ProviderConstants.OPERATION_REINDEX_PARAM_BATCH_SIZE, new DecimalType(batchSize)); + input.addParameter(ProviderConstants.OPERATION_REINDEX_PARAM_URL, "Observation?status=final"); + + ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input)); + + // Reindex Tenant A by query url + myTenantClientInterceptor.setTenantId(TENANT_A); + Parameters response = myClient + .operation() + .onServer() + .named(ProviderConstants.OPERATION_REINDEX) + .withParameters(input) + .execute(); + ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(response)); + DecimalType jobId = (DecimalType) response.getParameter(ProviderConstants.OPERATION_REINDEX_RESPONSE_JOB_ID); + + myBatchJobHelper.awaitJobExecution(jobId.getValueAsNumber().longValue()); + + // validate + List alleleObservationIds = reindexTestHelper.getAlleleObservationIds(myClient); + // Only the one in the first tenant should be indexed + myTenantClientInterceptor.setTenantId(TENANT_A); + assertThat(reindexTestHelper.getAlleleObservationIds(myClient), hasSize(1)); + assertEquals(obsFinalA.getIdPart(), alleleObservationIds.get(0)); + myTenantClientInterceptor.setTenantId(TENANT_B); + assertThat(reindexTestHelper.getAlleleObservationIds(myClient), hasSize(0)); + } + + private Bundle getAllPatientsInTenant(String theTenantId) { + myTenantClientInterceptor.setTenantId(theTenantId); + + return myClient.search().forResource("Patient").cacheControl(new CacheControlDirective().setNoCache(true)).returnBundle(Bundle.class).execute(); + } + + private static class MyInterceptor implements IAnonymousInterceptor { + public List requestPartitionIds = new ArrayList<>(); + public List requestDetails = new ArrayList<>(); + public List resourceDefs = new ArrayList<>(); + + @Override + public void invoke(IPointcut thePointcut, HookParams theArgs) { + requestPartitionIds.add(theArgs.get(RequestPartitionId.class)); + requestDetails.add(theArgs.get(RequestDetails.class)); + resourceDefs.add(theArgs.get(RuntimeResourceDefinition.class)); + } + } +} diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/MultitenantDeleteExpungeR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/MultitenantDeleteExpungeR4Test.java deleted file mode 100644 index e03395e7ede..00000000000 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/MultitenantDeleteExpungeR4Test.java +++ /dev/null @@ -1,134 +0,0 @@ -package ca.uhn.fhir.jpa.provider.r4; - -import ca.uhn.fhir.context.RuntimeResourceDefinition; -import ca.uhn.fhir.interceptor.api.HookParams; -import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor; -import ca.uhn.fhir.interceptor.api.IPointcut; -import ca.uhn.fhir.interceptor.api.Pointcut; -import ca.uhn.fhir.interceptor.model.RequestPartitionId; -import ca.uhn.fhir.jpa.api.config.DaoConfig; -import ca.uhn.fhir.jpa.batch.BatchJobsConfig; -import ca.uhn.fhir.jpa.partition.SystemRequestDetails; -import ca.uhn.fhir.rest.api.CacheControlDirective; -import ca.uhn.fhir.rest.api.server.RequestDetails; -import ca.uhn.fhir.rest.server.provider.ProviderConstants; -import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; -import ca.uhn.fhir.test.utilities.BatchJobHelper; -import org.hl7.fhir.instance.model.api.IIdType; -import org.hl7.fhir.r4.model.Bundle; -import org.hl7.fhir.r4.model.DecimalType; -import org.hl7.fhir.r4.model.Parameters; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; - -import java.util.ArrayList; -import java.util.List; - -import static ca.uhn.fhir.jpa.model.util.JpaConstants.DEFAULT_PARTITION_NAME; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.isA; -import static org.junit.jupiter.api.Assertions.assertEquals; - -public class MultitenantDeleteExpungeR4Test extends BaseMultitenantResourceProviderR4Test { - private static final Logger ourLog = LoggerFactory.getLogger(MultitenantDeleteExpungeR4Test.class); - - @Autowired - private BatchJobHelper myBatchJobHelper; - - @BeforeEach - @Override - public void before() throws Exception { - super.before(); - myDaoConfig.setAllowMultipleDelete(true); - myDaoConfig.setExpungeEnabled(true); - myDaoConfig.setDeleteExpungeEnabled(true); - } - - @AfterEach - @Override - public void after() throws Exception { - myDaoConfig.setAllowMultipleDelete(new DaoConfig().isAllowMultipleDelete()); - myDaoConfig.setExpungeEnabled(new DaoConfig().isExpungeEnabled()); - myDaoConfig.setDeleteExpungeEnabled(new DaoConfig().isDeleteExpungeEnabled()); - super.after(); - } - - @Test - public void testDeleteExpungeOperation() { - // Create patients - - IIdType idAT = createPatient(withTenant(TENANT_A), withActiveTrue()); - IIdType idAF = createPatient(withTenant(TENANT_A), withActiveFalse()); - IIdType idBT = createPatient(withTenant(TENANT_B), withActiveTrue()); - IIdType idBF = createPatient(withTenant(TENANT_B), withActiveFalse()); - - // validate setup - assertEquals(2, getAllPatientsInTenant(TENANT_A).getTotal()); - assertEquals(2, getAllPatientsInTenant(TENANT_B).getTotal()); - assertEquals(0, getAllPatientsInTenant(DEFAULT_PARTITION_NAME).getTotal()); - - Parameters input = new Parameters(); - input.addParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, "/Patient?active=false"); - - MyInterceptor interceptor = new MyInterceptor(); - myInterceptorRegistry.registerAnonymousInterceptor(Pointcut.STORAGE_PARTITION_SELECTED, interceptor); - // execute - - myTenantClientInterceptor.setTenantId(TENANT_B); - Parameters response = myClient - .operation() - .onServer() - .named(ProviderConstants.OPERATION_DELETE_EXPUNGE) - .withParameters(input) - .execute(); - - ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(response)); - myBatchJobHelper.awaitAllBulkJobCompletions(BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME); - assertThat(interceptor.requestPartitionIds, hasSize(3)); - interceptor.requestPartitionIds.forEach(id -> assertEquals(TENANT_B_ID, id.getFirstPartitionIdOrNull())); - interceptor.requestPartitionIds.forEach(id -> assertEquals(TENANT_B, id.getFirstPartitionNameOrNull())); - assertThat(interceptor.requestDetails.get(0), isA(ServletRequestDetails.class)); - assertThat(interceptor.requestDetails.get(1), isA(SystemRequestDetails.class)); - assertThat(interceptor.requestDetails.get(2), isA(SystemRequestDetails.class)); - assertEquals("Patient", interceptor.resourceDefs.get(0).getName()); - assertEquals("Patient", interceptor.resourceDefs.get(1).getName()); - assertEquals("Patient", interceptor.resourceDefs.get(2).getName()); - myInterceptorRegistry.unregisterInterceptor(interceptor); - - DecimalType jobIdPrimitive = (DecimalType) response.getParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_RESPONSE_JOB_ID); - Long jobId = jobIdPrimitive.getValue().longValue(); - - assertEquals(1, myBatchJobHelper.getReadCount(jobId)); - assertEquals(1, myBatchJobHelper.getWriteCount(jobId)); - - // validate only the false patient in TENANT_B is removed - assertEquals(2, getAllPatientsInTenant(TENANT_A).getTotal()); - assertEquals(1, getAllPatientsInTenant(TENANT_B).getTotal()); - assertEquals(0, getAllPatientsInTenant(DEFAULT_PARTITION_NAME).getTotal()); - - } - - private Bundle getAllPatientsInTenant(String theTenantId) { - myTenantClientInterceptor.setTenantId(theTenantId); - - return myClient.search().forResource("Patient").cacheControl(new CacheControlDirective().setNoCache(true)).returnBundle(Bundle.class).execute(); - } - - private static class MyInterceptor implements IAnonymousInterceptor { - public List requestPartitionIds = new ArrayList<>(); - public List requestDetails = new ArrayList<>(); - public List resourceDefs = new ArrayList<>(); - - @Override - public void invoke(IPointcut thePointcut, HookParams theArgs) { - requestPartitionIds.add(theArgs.get(RequestPartitionId.class)); - requestDetails.add(theArgs.get(RequestDetails.class)); - resourceDefs.add(theArgs.get(RuntimeResourceDefinition.class)); - } - } -} diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImplTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImplTest.java index 66fc2d75e8c..c1277dd9378 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImplTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImplTest.java @@ -13,17 +13,20 @@ import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; import ca.uhn.fhir.jpa.entity.ResourceReindexJobEntity; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.sched.ISchedulerService; -import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import org.apache.commons.lang3.time.DateUtils; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.r4.model.Observation; import org.hl7.fhir.r4.model.Patient; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.ArgumentCaptor; import org.mockito.Captor; +import org.mockito.InjectMocks; import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.SliceImpl; import org.springframework.transaction.PlatformTransactionManager; @@ -50,16 +53,15 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; - +@ExtendWith(MockitoExtension.class) public class ResourceReindexingSvcImplTest extends BaseJpaTest { - private static FhirContext ourCtx = FhirContext.forCached(FhirVersionEnum.R4); + private static final FhirContext ourCtx = FhirContext.forCached(FhirVersionEnum.R4); @Mock private PlatformTransactionManager myTxManager; - private ResourceReindexingSvcImpl mySvc; - private DaoConfig myDaoConfig; + private final DaoConfig myDaoConfig = new DaoConfig(); @Mock private DaoRegistry myDaoRegistry; @@ -88,6 +90,10 @@ public class ResourceReindexingSvcImplTest extends BaseJpaTest { private TransactionStatus myTxStatus; @Mock private ISchedulerService mySchedulerService; + @InjectMocks + private final ResourceReindexer myResourceReindexer = new ResourceReindexer(ourCtx); + @InjectMocks + private final ResourceReindexingSvcImpl mySvc = new ResourceReindexingSvcImpl(); @Override protected FhirContext getContext() { @@ -101,22 +107,12 @@ public class ResourceReindexingSvcImplTest extends BaseJpaTest { @BeforeEach public void before() { - myDaoConfig = new DaoConfig(); myDaoConfig.setReindexThreadCount(2); - mySvc = new ResourceReindexingSvcImpl(); mySvc.setContextForUnitTest(ourCtx); mySvc.setDaoConfigForUnitTest(myDaoConfig); - mySvc.setDaoRegistryForUnitTest(myDaoRegistry); - mySvc.setForcedIdDaoForUnitTest(myForcedIdDao); - mySvc.setReindexJobDaoForUnitTest(myReindexJobDao); - mySvc.setResourceTableDaoForUnitTest(myResourceTableDao); - mySvc.setTxManagerForUnitTest(myTxManager); - mySvc.setSearchParamRegistryForUnitTest(mySearchParamRegistry); - mySvc.setSchedulerServiceForUnitTest(mySchedulerService); + mySvc.setResourceReindexerForUnitTest(myResourceReindexer); mySvc.start(); - - when(myTxManager.getTransaction(any())).thenReturn(myTxStatus); } @Test @@ -157,7 +153,6 @@ public class ResourceReindexingSvcImplTest extends BaseJpaTest { public void testMarkAsDeletedIfNothingIndexed() { mockNothingToExpunge(); mockSingleReindexingJob(null); - mockFetchFourResources(); // Mock resource fetch List values = Collections.emptyList(); when(myResourceTableDao.findIdsOfResourcesWithinUpdatedRangeOrderedFromOldest(any(), any(), any())).thenReturn(new SliceImpl<>(values)); @@ -197,6 +192,8 @@ public class ResourceReindexingSvcImplTest extends BaseJpaTest { mockSingleReindexingJob(null); mockFourResourcesNeedReindexing(); mockFetchFourResources(); + when(myDaoRegistry.getResourceDao(eq("Patient"))).thenReturn(myResourceDao); + when(myDaoRegistry.getResourceDao(eq(Patient.class))).thenReturn(myResourceDao); int count = mySvc.forceReindexingPass(); assertEquals(4, count); @@ -248,8 +245,6 @@ public class ResourceReindexingSvcImplTest extends BaseJpaTest { mockWhenResourceTableFindById(updatedTimes, resourceTypes); when(myDaoRegistry.getResourceDao(eq("Patient"))).thenReturn(myResourceDao); when(myDaoRegistry.getResourceDao(eq(Patient.class))).thenReturn(myResourceDao); - when(myDaoRegistry.getResourceDao(eq("Observation"))).thenReturn(myResourceDao); - when(myDaoRegistry.getResourceDao(eq(Observation.class))).thenReturn(myResourceDao); when(myResourceDao.readByPid(any(), anyBoolean())).thenAnswer(t->{ int idx = t.getArgument(0, ResourcePersistentId.class).getIdAsLong().intValue(); return resources.get(idx); @@ -277,6 +272,8 @@ public class ResourceReindexingSvcImplTest extends BaseJpaTest { @Test public void testReindexDeletedResource() { + // setup + when(myTxManager.getTransaction(any())).thenReturn(myTxStatus); mockNothingToExpunge(); mockSingleReindexingJob("Patient"); // Mock resource fetch @@ -294,15 +291,13 @@ public class ResourceReindexingSvcImplTest extends BaseJpaTest { ); mockWhenResourceTableFindById(updatedTimes, resourceTypes); when(myDaoRegistry.getResourceDao(eq("Patient"))).thenReturn(myResourceDao); - when(myDaoRegistry.getResourceDao(eq(Patient.class))).thenReturn(myResourceDao); - when(myDaoRegistry.getResourceDao(eq("Observation"))).thenReturn(myResourceDao); - when(myDaoRegistry.getResourceDao(eq(Observation.class))).thenReturn(myResourceDao); when(myResourceDao.readByPid(any(), anyBoolean())).thenReturn(null); - + // execute int count = mySvc.forceReindexingPass(); - assertEquals(0, count); + // verify + assertEquals(0, count); verify(myResourceTableDao, times(1)).updateIndexStatus(eq(0L), eq(BaseHapiFhirDao.INDEX_STATUS_INDEXING_FAILED)); } @@ -356,8 +351,6 @@ public class ResourceReindexingSvcImplTest extends BaseJpaTest { new Observation().setId("Observation/3/_history/1") ); mockWhenResourceTableFindById(updatedTimes, resourceTypes); - when(myDaoRegistry.getResourceDao(eq("Patient"))).thenReturn(myResourceDao); - when(myDaoRegistry.getResourceDao(eq(Patient.class))).thenReturn(myResourceDao); when(myDaoRegistry.getResourceDao(eq("Observation"))).thenReturn(myResourceDao); when(myDaoRegistry.getResourceDao(eq(Observation.class))).thenReturn(myResourceDao); when(myResourceDao.readByPid(any(), anyBoolean())).thenAnswer(t->{ diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/util/MultimapCollector.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/util/MultimapCollector.java index 66707836bde..d7e3d83cbc0 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/util/MultimapCollector.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/util/MultimapCollector.java @@ -3,7 +3,6 @@ package ca.uhn.fhir.jpa.util; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ListMultimap; -import com.google.common.collect.Multimap; import java.util.Set; import java.util.function.BiConsumer; @@ -18,47 +17,47 @@ import java.util.stream.Collector; public class MultimapCollector implements Collector, ListMultimap> { - private final Function keyGetter; - private final Function valueGetter; + private final Function keyGetter; + private final Function valueGetter; - public MultimapCollector(Function keyGetter, Function valueGetter) { - this.keyGetter = keyGetter; - this.valueGetter = valueGetter; - } + public MultimapCollector(Function keyGetter, Function valueGetter) { + this.keyGetter = keyGetter; + this.valueGetter = valueGetter; + } - public static MultimapCollector toMultimap(Function keyGetter, Function valueGetter) { - return new MultimapCollector<>(keyGetter, valueGetter); - } + public static MultimapCollector toMultimap(Function keyGetter, Function valueGetter) { + return new MultimapCollector<>(keyGetter, valueGetter); + } - public static MultimapCollector toMultimap(Function keyGetter) { - return new MultimapCollector<>(keyGetter, v -> v); - } + public static MultimapCollector toMultimap(Function keyGetter) { + return new MultimapCollector<>(keyGetter, v -> v); + } - @Override - public Supplier> supplier() { - return ArrayListMultimap::create; - } + @Override + public Supplier> supplier() { + return ArrayListMultimap::create; + } - @Override - public BiConsumer, T> accumulator() { - return (map, element) -> map.put(keyGetter.apply(element), valueGetter.apply(element)); - } + @Override + public BiConsumer, T> accumulator() { + return (map, element) -> map.put(keyGetter.apply(element), valueGetter.apply(element)); + } - @Override - public BinaryOperator> combiner() { - return (map1, map2) -> { - map1.putAll(map2); - return map1; - }; - } + @Override + public BinaryOperator> combiner() { + return (map1, map2) -> { + map1.putAll(map2); + return map1; + }; + } - @Override - public Function, ListMultimap> finisher() { - return map -> map; - } + @Override + public Function, ListMultimap> finisher() { + return map -> map; + } - @Override - public Set characteristics() { - return ImmutableSet.of(Characteristics.IDENTITY_FINISH); - } + @Override + public Set characteristics() { + return ImmutableSet.of(Characteristics.IDENTITY_FINISH); + } } diff --git a/hapi-fhir-jpaserver-base/src/test/resources/r4/load_bundle.json b/hapi-fhir-jpaserver-base/src/test/resources/r4/load_bundle.json index 101b34b853c..7c43c737d67 100644 --- a/hapi-fhir-jpaserver-base/src/test/resources/r4/load_bundle.json +++ b/hapi-fhir-jpaserver-base/src/test/resources/r4/load_bundle.json @@ -1,986 +1,986 @@ { - "resourceType": "Bundle", - "type": "transaction", - "entry": [ - { - "resource": { - "resourceType": "ExplanationOfBenefit", - "meta": { - "lastUpdated": "2021-06-30", - "profile": [ - "http://hl7.org/fhir/us/carin-bb/StructureDefinition/C4BB-ExplanationOfBenefit-Professional-NonClinician" - ] - }, - "identifier": [ - { - "type": { - "coding": [ - { - "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBIdentifierType", - "code": "payerid" - } - ] - }, - "system": "https://hl7.org/fhir/sid/payerid", - "value": "5824473976" - }, - { - "type": { - "coding": [ - { - "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBIdentifierType", - "code": "uc" - } - ] - }, - "system": "https://hl7.org/fhir/sid/claimid", - "value": "1234094" - } - ], - "status": "active", - "type": { - "coding": [ - { - "system": "http://terminology.hl7.org/CodeSystem/claim-type", - "code": "professional" - } - ] - }, - "use": "claim", - "patient": { - "reference": "Patient/d1a47e2c-509b-e326-deab-597e3f598ca5" - }, - "billablePeriod": { - "start": "2017-01-08", - "end": "2017-01-08" - }, - "created": "2017-01-11T00:00:00-08:00", - "insurer": { - "reference": "Organization/5954a17b-0779-334c-4f1c-e894e45d15fb" - }, - "provider": { - "reference": "Organization/68ae4f74-afdc-6242-c50e-02ef776d8e5d" - }, - "payee": { - "type": { - "coding": [ - { - "system": "http://terminology.hl7.org/CodeSystem/payeetype", - "code": "provider" - } - ], - "text": "Claim paid to VENDOR" - }, - "party": { - "reference": "Organization/68ae4f74-afdc-6242-c50e-02ef776d8e5d" - } - }, - "outcome": "complete", - "disposition": "PAID", - "careTeam": [ - { - "sequence": 1, - "provider": { - "reference": "Practitioner/23eccc61-ab67-bf8a-e464-6260f7989556" - }, - "responsible": false, - "role": { - "coding": [ - { - "system": "http://terminology.hl7.org/CodeSystem/claimcareteamrole", - "code": "primary" - } - ] - } - }, - { - "sequence": 2, - "provider": { - "reference": "Practitioner/dbbc9a06-f685-b481-d739-133755af138e" - }, - "responsible": false, - "role": { - "coding": [ - { - "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBClaimCareTeamRole", - "code": "referring" - } - ] - } - }, - { - "sequence": 3, - "provider": { - "reference": "Practitioner/39b9250c-0d01-cbb0-ea89-0de9c74af511" - }, - "responsible": true, - "role": { - "coding": [ - { - "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBClaimCareTeamRole", - "code": "performing" - } - ] - } - } - ], - "supportingInfo": [ - { - "sequence": 1, - "category": { - "coding": [ - { - "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBSupportingInfoType", - "code": "clmrecvddate" - } - ] - }, - "timingDate": "2017-01-11" - }, - { - "sequence": 2, - "category": { - "coding": [ - { - "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBSupportingInfoType", - "code": "billingnetworkcontractingstatus" - } - ] - }, - "code": { - "coding": [ - { - "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBPayerAdjudicationStatus", - "code": "other" - } - ] - } - } - ], - "diagnosis": [ - { - "sequence": 4, - "diagnosisCodeableConcept": { - "coding": [ - { - "system": "http://hl7.org/fhir/sid/icd-10-cm", - "code": "I27.2", - "display": "Other secondary pulmonary hypertension" - } - ], - "text": "Other secondary pulmonary hypertension" - } - } - ], - "procedure": [ - { - "sequence": 1, - "date": "2017-01-08T00:00:00-08:00", - "procedureCodeableConcept": { - "coding": [ - { - "system": "http://www.ama-assn.org/go/cpt", - "code": "99233", - "display": "Subsequent hospital care for severe problem" - } - ], - "text": "SBSQ HOSPITAL CARE/DAY 35 MINUTES" - } - } - ], - "insurance": [ - { - "focal": true, - "coverage": { - "reference": "urn:uuid:175dbf4a-7ee2-446d-9938-82eea27871a7" - } - } - ], - "item": [ - { - "sequence": 1, - "diagnosisSequence": [ - 4 - ], - "procedureSequence": [ - 1 - ], - "productOrService": { - "coding": [ - { - "system": "http://www.ama-assn.org/go/cpt", - "code": "99233", - "display": "Subsequent hospital care for severe problem" - } - ], - "text": "SBSQ HOSPITAL CARE/DAY 35 MINUTES" - }, - "servicedPeriod": { - "start": "2017-01-08", - "end": "2017-01-08" - }, - "locationCodeableConcept": { - "coding": [ - { - "system": "https://www.cms.gov/Medicare/Coding/place-of-service-codes/Place_of_Service_Code_Set", - "code": "99" - } - ] - }, - "quantity": { - "value": 1, - "unit": "Units", - "system": "http://unitsofmeasure.org", - "code": "[arb'U]" - }, - "net": { - "value": 317.00, - "currency": "USD" - }, - "adjudication": [ - { - "category": { - "coding": [ - { - "system": "http://terminology.hl7.org/CodeSystem/adjudication", - "code": "submitted" - } - ] - }, - "amount": { - "value": 317.00, - "currency": "USD" - } - }, - { - "category": { - "coding": [ - { - "system": "http://terminology.hl7.org/CodeSystem/adjudication", - "code": "benefit" - } - ] - }, - "amount": { - "value": 124.69, - "currency": "USD" - } - }, - { - "category": { - "coding": [ - { - "system": "http://terminology.hl7.org/CodeSystem/adjudication", - "code": "copay" - } - ] - }, - "amount": { - "value": 0.00, - "currency": "USD" - } - }, - { - "category": { - "coding": [ - { - "system": "http://terminology.hl7.org/CodeSystem/adjudication", - "code": "deductible" - } - ] - }, - "amount": { - "value": 124.69, - "currency": "USD" - } - }, - { - "category": { - "coding": [ - { - "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBAdjudication", - "code": "coinsurance" - } - ] - }, - "amount": { - "value": 0.00, - "currency": "USD" - } - }, - { - "category": { - "coding": [ - { - "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBAdjudication", - "code": "memberliability" - } - ] - }, - "amount": { - "value": 124.69, - "currency": "USD" - } - }, - { - "category": { - "coding": [ - { - "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBAdjudication", - "code": "noncovered" - } - ] - }, - "amount": { - "value": 0.00, - "currency": "USD" - } - }, - { - "category": { - "coding": [ - { - "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBAdjudication", - "code": "priorpayerpaid" - } - ] - }, - "amount": { - "value": 0.00, - "currency": "USD" - } - }, - { - "category": { - "coding": [ - { - "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBAdjudication", - "code": "paidtoprovider" - } - ] - }, - "amount": { - "value": 0.00, - "currency": "USD" - } - }, - { - "category": { - "coding": [ - { - "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBPayerAdjudicationStatus", - "code": "outofnetwork" - } - ] - }, - "amount": { - "value": 0.00, - "currency": "USD" - } - } - ] - } - ], - "total": [ - { - "category": { - "coding": [ - { - "system": "http://terminology.hl7.org/CodeSystem/adjudication", - "code": "submitted" - } - ] - }, - "amount": { - "value": 317.00, - "currency": "USD" - } - }, - { - "category": { - "coding": [ - { - "system": "http://terminology.hl7.org/CodeSystem/adjudication", - "code": "benefit" - } - ] - }, - "amount": { - "value": 124.69, - "currency": "USD" - } - }, - { - "category": { - "coding": [ - { - "system": "http://terminology.hl7.org/CodeSystem/adjudication", - "code": "copay" - } - ] - }, - "amount": { - "value": 0.00, - "currency": "USD" - } - }, - { - "category": { - "coding": [ - { - "system": "http://terminology.hl7.org/CodeSystem/adjudication", - "code": "deductible" - } - ] - }, - "amount": { - "value": 124.69, - "currency": "USD" - } - }, - { - "category": { - "coding": [ - { - "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBAdjudication", - "code": "coinsurance" - } - ] - }, - "amount": { - "value": 0.00, - "currency": "USD" - } - }, - { - "category": { - "coding": [ - { - "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBAdjudication", - "code": "memberliability" - } - ] - }, - "amount": { - "value": 124.69, - "currency": "USD" - } - }, - { - "category": { - "coding": [ - { - "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBAdjudication", - "code": "noncovered" - } - ] - }, - "amount": { - "value": 0.00, - "currency": "USD" - } - }, - { - "category": { - "coding": [ - { - "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBAdjudication", - "code": "priorpayerpaid" - } - ] - }, - "amount": { - "value": 0.00, - "currency": "USD" - } - }, - { - "category": { - "coding": [ - { - "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBAdjudication", - "code": "paidtoprovider" - } - ] - }, - "amount": { - "value": 0.00, - "currency": "USD" - } - } - ], - "payment": { - "date": "2017-02-02", - "amount": { - "value": 0.00, - "currency": "USD" - } - } - }, - "request": { - "method": "PUT", - "url": "ExplanationOfBenefit?identifier=5824473976" - } - }, - { - "resource": { - "resourceType": "Patient", - "id": "d1a47e2c-509b-e326-deab-597e3f598ca5", - "meta": { - "lastUpdated": "2021-06-30", - "profile": [ - "http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient" - ] - }, - "identifier": [ - { - "type": { - "coding": [ - { - "system": "http://terminology.hl7.org/CodeSystem/v2-0203", - "code": "MR" - } - ] - }, - "system": "https://example.org/front-door", - "value": "412563524-CO" - } - ], - "name": [ - { - "use": "usual", - "text": "HYOHWAN MGUIRRE", - "family": "MGUIRRE", - "given": [ - "HYOHWAN" - ] - } - ], - "telecom": [ - { - "system": "phone", - "value": "719-654-0220", - "use": "home" - } - ], - "gender": "unknown", - "birthDate": "1958-05-12", - "address": [ - { - "use": "home", - "type": "postal", - "line": [ - "20360 East 45Th Court", - "PO Box 523" - ], - "city": "COLORADO SPRINGS", - "postalCode": "80922-4166" - } - ] - }, - "request": { - "method": "PUT", - "url": "Patient/d1a47e2c-509b-e326-deab-597e3f598ca5" - } - }, - { - "fullUrl": "urn:uuid:175dbf4a-7ee2-446d-9938-82eea27871a7", - "resource": { - "resourceType": "Coverage", - "meta": { - "lastUpdated": "2021-06-30", - "profile": [ - "http://hl7.org/fhir/us/carin-bb/StructureDefinition/C4BB-Coverage" - ] - }, - "identifier": [ - { - "type": { - "coding": [ - { - "system": "http://terminology.hl7.org/CodeSystem/v2-0203", - "code": "FILL" - } - ] - }, - "system": "https://hl7.org/fhir/sid/coverageid", - "value": "412563524-CO-80001" - } - ], - "status": "active", - "type": { - "coding": [ - { - "system": "http://terminology.hl7.org/CodeSystem/v3-ActCode", - "code": "HMO", - "display": "health maintenance organization policy" - } - ], - "text": "HMO - HMO COMMERCIAL-HDHP-Signature" - }, - "subscriberId": "412563524", - "beneficiary": { - "reference": "Patient/d1a47e2c-509b-e326-deab-597e3f598ca5" - }, - "relationship": { - "coding": [ - { - "system": "http://terminology.hl7.org/CodeSystem/subscriber-relationship", - "code": "self", - "display": "Self" - } - ], - "text": "The Beneficiary is the Subscriber" - }, - "period": { - "start": "2016-01-01", - "end": "2017-07-01" - }, - "payor": [ - { - "reference": "Organization/5954a17b-0779-334c-4f1c-e894e45d15fb" - } - ], - "class": [ - { - "type": { - "coding": [ - { - "system": "http://terminology.hl7.org/CodeSystem/coverage-class", - "code": "group", - "display": "Group" - } - ], - "text": "An employee group" - }, - "value": "80001", - "name": "CS BRZ HDHP 5500/30%/0 ONX S-NON-MEDICARE" - } - ] - }, - "request": { - "method": "PUT", - "url": "Coverage?identifier=412563524-CO-80001" - } - }, - { - "resource": { - "resourceType": "Organization", - "id": "68ae4f74-afdc-6242-c50e-02ef776d8e5d", - "meta": { - "lastUpdated": "2021-06-30", - "profile": [ - "http://hl7.org/fhir/us/carin-bb/StructureDefinition/C4BB-Organization" - ] - }, - "identifier": [ - { - "type": { - "coding": [ - { - "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBIdentifierType", - "code": "npi" - } - ] - }, - "system": "http://hl7.org/fhir/sid/us-npi", - "value": "1407833767" - }, - { - "type": { - "coding": [ - { - "system": "http://terminology.hl7.org/CodeSystem/v2-0203", - "code": "TAX" - } - ] - }, - "system": "urn:oid:2.16.840.1.113883.4.4" - } - ], - "active": true, - "type": [ - { - "coding": [ - { - "system": "http://terminology.hl7.org/CodeSystem/organization-type", - "code": "prov" - } - ] - } - ], - "name": "PIKES PEAK NEPHROLOGY ASSOCIATES PC", - "address": [ - { - "use": "work", - "type": "physical", - "line": [ - "1914 LELARAY STREET" - ], - "city": "COLORADO SPRINGS", - "postalCode": "80909", - "country": "USA" - } - ] - }, - "request": { - "method": "PUT", - "url": "Organization/68ae4f74-afdc-6242-c50e-02ef776d8e5d" - } - }, - { - "resource": { - "resourceType": "Organization", - "id": "5954a17b-0779-334c-4f1c-e894e45d15fb", - "meta": { - "lastUpdated": "2021-06-30", - "profile": [ - "http://hl7.org/fhir/us/carin-bb/StructureDefinition/C4BB-Organization" - ] - }, - "identifier": [ - { - "use": "usual", - "type": { - "coding": [ - { - "system": "http://terminology.hl7.org/CodeSystem/v2-0203", - "code": "FILL" - } - ] - }, - "system": "https://hl7.org/fhir/sid/organizationid", - "value": "NATLTAP CO-KFHP-PAY-CO" - } - ], - "active": true, - "type": [ - { - "coding": [ - { - "system": "http://terminology.hl7.org/CodeSystem/organization-type", - "code": "pay", - "display": "Payer" - } - ] - } - ], - "name": "KAISER FOUNDATION HEALTHPLAN, INC", - "telecom": [ - { - "system": "phone", - "value": "1-800-382-4661", - "use": "work" - } - ], - "address": [ - { - "use": "work", - "type": "postal", - "line": [ - "NATIONAL CLAIMS ADMINISTRATION COLORADO", - "PO Box 629028" - ], - "city": "El Dorado Hills", - "state": "CA", - "postalCode": "95762-9028" - } - ] - }, - "request": { - "method": "PUT", - "url": "Organization/5954a17b-0779-334c-4f1c-e894e45d15fb" - } - }, - { - "resource": { - "resourceType": "Practitioner", - "id": "23eccc61-ab67-bf8a-e464-6260f7989556", - "meta": { - "lastUpdated": "2021-06-30", - "profile": [ - "http://hl7.org/fhir/us/core/StructureDefinition/us-core-practitioner" - ] - }, - "identifier": [ - { - "use": "usual", - "type": { - "coding": [ - { - "system": "http://terminology.hl7.org/CodeSystem/v2-0203", - "code": "NPI" - } - ] - }, - "system": "http://hl7.org/fhir/sid/us-npi", - "value": "1497983654" - } - ], - "name": [ - { - "use": "usual", - "text": "CASSIDY, HEATHER M (MD)", - "family": "CASSIDY", - "given": [ - "HEATHER" - ], - "suffix": [ - "MD" - ] - } - ], - "address": [ - { - "use": "work", - "line": [ - "Briargate", - "1405 Briargate Pkwy #141" - ], - "city": "Colorado Springs", - "postalCode": "80920" - } - ] - }, - "request": { - "method": "PUT", - "url": "Practitioner/23eccc61-ab67-bf8a-e464-6260f7989556" - } - }, - { - "resource": { - "resourceType": "Practitioner", - "id": "dbbc9a06-f685-b481-d739-133755af138e", - "meta": { - "lastUpdated": "2021-06-30", - "profile": [ - "http://hl7.org/fhir/us/core/StructureDefinition/us-core-practitioner" - ] - }, - "identifier": [ - { - "use": "usual", - "type": { - "coding": [ - { - "system": "http://terminology.hl7.org/CodeSystem/v2-0203", - "code": "NPI" - } - ] - }, - "system": "http://hl7.org/fhir/sid/us-npi", - "value": "1568467280" - }, - { - "use": "usual", - "type": { - "coding": [ - { - "system": "http://terminology.hl7.org/CodeSystem/v2-0203", - "code": "TAX" - } - ] - }, - "system": "urn:oid:2.16.840.1.113883.4.4", - "value": "311669909" - } - ], - "name": [ - { - "use": "usual", - "text": "MOHNSSEN, STEVEN R (MD)", - "family": "MOHNSSEN", - "given": [ - "STEVEN" - ], - "suffix": [ - "MD" - ] - } - ], - "address": [ - { - "use": "work", - "line": [ - "1725 E Boulder St", - "Ste 204" - ], - "city": "Colorado Springs", - "postalCode": "80909" - } - ] - }, - "request": { - "method": "PUT", - "url": "Practitioner/dbbc9a06-f685-b481-d739-133755af138e" - } - }, - { - "resource": { - "resourceType": "Practitioner", - "id": "39b9250c-0d01-cbb0-ea89-0de9c74af511", - "meta": { - "lastUpdated": "2021-06-30", - "profile": [ - "http://hl7.org/fhir/us/core/StructureDefinition/us-core-practitioner" - ] - }, - "identifier": [ - { - "use": "usual", - "type": { - "coding": [ - { - "system": "http://terminology.hl7.org/CodeSystem/v2-0203", - "code": "NPI" - } - ] - }, - "system": "http://hl7.org/fhir/sid/us-npi", - "value": "1679605265" - }, - { - "use": "usual", - "type": { - "coding": [ - { - "system": "http://terminology.hl7.org/CodeSystem/v2-0203", - "code": "TAX" - } - ] - }, - "system": "urn:oid:2.16.840.1.113883.4.4", - "value": "840629252" - } - ], - "name": [ - { - "use": "usual", - "text": "ROSS, MICHAEL D (MD)", - "family": "ROSS", - "given": [ - "MICHAEL" - ], - "suffix": [ - "MD" - ] - } - ], - "address": [ - { - "use": "work", - "line": [ - "1914 Lelaray St" - ], - "city": "Colorado Springs", - "postalCode": "80909" - } - ] - }, - "request": { - "method": "PUT", - "url": "Practitioner/39b9250c-0d01-cbb0-ea89-0de9c74af511" - } - } - ] + "resourceType": "Bundle", + "type": "transaction", + "entry": [ + { + "resource": { + "resourceType": "ExplanationOfBenefit", + "meta": { + "lastUpdated": "2021-06-30", + "profile": [ + "http://hl7.org/fhir/us/carin-bb/StructureDefinition/C4BB-ExplanationOfBenefit-Professional-NonClinician" + ] + }, + "identifier": [ + { + "type": { + "coding": [ + { + "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBIdentifierType", + "code": "payerid" + } + ] + }, + "system": "https://hl7.org/fhir/sid/payerid", + "value": "5824473976" + }, + { + "type": { + "coding": [ + { + "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBIdentifierType", + "code": "uc" + } + ] + }, + "system": "https://hl7.org/fhir/sid/claimid", + "value": "1234094" + } + ], + "status": "active", + "type": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/claim-type", + "code": "professional" + } + ] + }, + "use": "claim", + "patient": { + "reference": "Patient/d1a47e2c-509b-e326-deab-597e3f598ca5" + }, + "billablePeriod": { + "start": "2017-01-08", + "end": "2017-01-08" + }, + "created": "2017-01-11T00:00:00-08:00", + "insurer": { + "reference": "Organization/5954a17b-0779-334c-4f1c-e894e45d15fb" + }, + "provider": { + "reference": "Organization/68ae4f74-afdc-6242-c50e-02ef776d8e5d" + }, + "payee": { + "type": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/payeetype", + "code": "provider" + } + ], + "text": "Claim paid to VENDOR" + }, + "party": { + "reference": "Organization/68ae4f74-afdc-6242-c50e-02ef776d8e5d" + } + }, + "outcome": "complete", + "disposition": "PAID", + "careTeam": [ + { + "sequence": 1, + "provider": { + "reference": "Practitioner/23eccc61-ab67-bf8a-e464-6260f7989556" + }, + "responsible": false, + "role": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/claimcareteamrole", + "code": "primary" + } + ] + } + }, + { + "sequence": 2, + "provider": { + "reference": "Practitioner/dbbc9a06-f685-b481-d739-133755af138e" + }, + "responsible": false, + "role": { + "coding": [ + { + "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBClaimCareTeamRole", + "code": "referring" + } + ] + } + }, + { + "sequence": 3, + "provider": { + "reference": "Practitioner/39b9250c-0d01-cbb0-ea89-0de9c74af511" + }, + "responsible": true, + "role": { + "coding": [ + { + "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBClaimCareTeamRole", + "code": "performing" + } + ] + } + } + ], + "supportingInfo": [ + { + "sequence": 1, + "category": { + "coding": [ + { + "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBSupportingInfoType", + "code": "clmrecvddate" + } + ] + }, + "timingDate": "2017-01-11" + }, + { + "sequence": 2, + "category": { + "coding": [ + { + "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBSupportingInfoType", + "code": "billingnetworkcontractingstatus" + } + ] + }, + "code": { + "coding": [ + { + "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBPayerAdjudicationStatus", + "code": "other" + } + ] + } + } + ], + "diagnosis": [ + { + "sequence": 4, + "diagnosisCodeableConcept": { + "coding": [ + { + "system": "http://hl7.org/fhir/sid/icd-10-cm", + "code": "I27.2", + "display": "Other secondary pulmonary hypertension" + } + ], + "text": "Other secondary pulmonary hypertension" + } + } + ], + "procedure": [ + { + "sequence": 1, + "date": "2017-01-08T00:00:00-08:00", + "procedureCodeableConcept": { + "coding": [ + { + "system": "http://www.ama-assn.org/go/cpt", + "code": "99233", + "display": "Subsequent hospital care for severe problem" + } + ], + "text": "SBSQ HOSPITAL CARE/DAY 35 MINUTES" + } + } + ], + "insurance": [ + { + "focal": true, + "coverage": { + "reference": "urn:uuid:175dbf4a-7ee2-446d-9938-82eea27871a7" + } + } + ], + "item": [ + { + "sequence": 1, + "diagnosisSequence": [ + 4 + ], + "procedureSequence": [ + 1 + ], + "productOrService": { + "coding": [ + { + "system": "http://www.ama-assn.org/go/cpt", + "code": "99233", + "display": "Subsequent hospital care for severe problem" + } + ], + "text": "SBSQ HOSPITAL CARE/DAY 35 MINUTES" + }, + "servicedPeriod": { + "start": "2017-01-08", + "end": "2017-01-08" + }, + "locationCodeableConcept": { + "coding": [ + { + "system": "https://www.cms.gov/Medicare/Coding/place-of-service-codes/Place_of_Service_Code_Set", + "code": "99" + } + ] + }, + "quantity": { + "value": 1, + "unit": "Units", + "system": "http://unitsofmeasure.org", + "code": "[arb'U]" + }, + "net": { + "value": 317.00, + "currency": "USD" + }, + "adjudication": [ + { + "category": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/adjudication", + "code": "submitted" + } + ] + }, + "amount": { + "value": 317.00, + "currency": "USD" + } + }, + { + "category": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/adjudication", + "code": "benefit" + } + ] + }, + "amount": { + "value": 124.69, + "currency": "USD" + } + }, + { + "category": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/adjudication", + "code": "copay" + } + ] + }, + "amount": { + "value": 0.00, + "currency": "USD" + } + }, + { + "category": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/adjudication", + "code": "deductible" + } + ] + }, + "amount": { + "value": 124.69, + "currency": "USD" + } + }, + { + "category": { + "coding": [ + { + "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBAdjudication", + "code": "coinsurance" + } + ] + }, + "amount": { + "value": 0.00, + "currency": "USD" + } + }, + { + "category": { + "coding": [ + { + "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBAdjudication", + "code": "memberliability" + } + ] + }, + "amount": { + "value": 124.69, + "currency": "USD" + } + }, + { + "category": { + "coding": [ + { + "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBAdjudication", + "code": "noncovered" + } + ] + }, + "amount": { + "value": 0.00, + "currency": "USD" + } + }, + { + "category": { + "coding": [ + { + "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBAdjudication", + "code": "priorpayerpaid" + } + ] + }, + "amount": { + "value": 0.00, + "currency": "USD" + } + }, + { + "category": { + "coding": [ + { + "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBAdjudication", + "code": "paidtoprovider" + } + ] + }, + "amount": { + "value": 0.00, + "currency": "USD" + } + }, + { + "category": { + "coding": [ + { + "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBPayerAdjudicationStatus", + "code": "outofnetwork" + } + ] + }, + "amount": { + "value": 0.00, + "currency": "USD" + } + } + ] + } + ], + "total": [ + { + "category": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/adjudication", + "code": "submitted" + } + ] + }, + "amount": { + "value": 317.00, + "currency": "USD" + } + }, + { + "category": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/adjudication", + "code": "benefit" + } + ] + }, + "amount": { + "value": 124.69, + "currency": "USD" + } + }, + { + "category": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/adjudication", + "code": "copay" + } + ] + }, + "amount": { + "value": 0.00, + "currency": "USD" + } + }, + { + "category": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/adjudication", + "code": "deductible" + } + ] + }, + "amount": { + "value": 124.69, + "currency": "USD" + } + }, + { + "category": { + "coding": [ + { + "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBAdjudication", + "code": "coinsurance" + } + ] + }, + "amount": { + "value": 0.00, + "currency": "USD" + } + }, + { + "category": { + "coding": [ + { + "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBAdjudication", + "code": "memberliability" + } + ] + }, + "amount": { + "value": 124.69, + "currency": "USD" + } + }, + { + "category": { + "coding": [ + { + "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBAdjudication", + "code": "noncovered" + } + ] + }, + "amount": { + "value": 0.00, + "currency": "USD" + } + }, + { + "category": { + "coding": [ + { + "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBAdjudication", + "code": "priorpayerpaid" + } + ] + }, + "amount": { + "value": 0.00, + "currency": "USD" + } + }, + { + "category": { + "coding": [ + { + "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBAdjudication", + "code": "paidtoprovider" + } + ] + }, + "amount": { + "value": 0.00, + "currency": "USD" + } + } + ], + "payment": { + "date": "2017-02-02", + "amount": { + "value": 0.00, + "currency": "USD" + } + } + }, + "request": { + "method": "PUT", + "url": "ExplanationOfBenefit?identifier=5824473976" + } + }, + { + "resource": { + "resourceType": "Patient", + "id": "d1a47e2c-509b-e326-deab-597e3f598ca5", + "meta": { + "lastUpdated": "2021-06-30", + "profile": [ + "http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient" + ] + }, + "identifier": [ + { + "type": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/v2-0203", + "code": "MR" + } + ] + }, + "system": "https://example.org/front-door", + "value": "412563524-CO" + } + ], + "name": [ + { + "use": "usual", + "text": "HYOHWAN MGUIRRE", + "family": "MGUIRRE", + "given": [ + "HYOHWAN" + ] + } + ], + "telecom": [ + { + "system": "phone", + "value": "719-654-0220", + "use": "home" + } + ], + "gender": "unknown", + "birthDate": "1958-05-12", + "address": [ + { + "use": "home", + "type": "postal", + "line": [ + "20360 East 45Th Court", + "PO Box 523" + ], + "city": "COLORADO SPRINGS", + "postalCode": "80922-4166" + } + ] + }, + "request": { + "method": "PUT", + "url": "Patient/d1a47e2c-509b-e326-deab-597e3f598ca5" + } + }, + { + "fullUrl": "urn:uuid:175dbf4a-7ee2-446d-9938-82eea27871a7", + "resource": { + "resourceType": "Coverage", + "meta": { + "lastUpdated": "2021-06-30", + "profile": [ + "http://hl7.org/fhir/us/carin-bb/StructureDefinition/C4BB-Coverage" + ] + }, + "identifier": [ + { + "type": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/v2-0203", + "code": "FILL" + } + ] + }, + "system": "https://hl7.org/fhir/sid/coverageid", + "value": "412563524-CO-80001" + } + ], + "status": "active", + "type": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/v3-ActCode", + "code": "HMO", + "display": "health maintenance organization policy" + } + ], + "text": "HMO - HMO COMMERCIAL-HDHP-Signature" + }, + "subscriberId": "412563524", + "beneficiary": { + "reference": "Patient/d1a47e2c-509b-e326-deab-597e3f598ca5" + }, + "relationship": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/subscriber-relationship", + "code": "self", + "display": "Self" + } + ], + "text": "The Beneficiary is the Subscriber" + }, + "period": { + "start": "2016-01-01", + "end": "2017-07-01" + }, + "payor": [ + { + "reference": "Organization/5954a17b-0779-334c-4f1c-e894e45d15fb" + } + ], + "class": [ + { + "type": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/coverage-class", + "code": "group", + "display": "Group" + } + ], + "text": "An employee group" + }, + "value": "80001", + "name": "CS BRZ HDHP 5500/30%/0 ONX S-NON-MEDICARE" + } + ] + }, + "request": { + "method": "PUT", + "url": "Coverage?identifier=412563524-CO-80001" + } + }, + { + "resource": { + "resourceType": "Organization", + "id": "68ae4f74-afdc-6242-c50e-02ef776d8e5d", + "meta": { + "lastUpdated": "2021-06-30", + "profile": [ + "http://hl7.org/fhir/us/carin-bb/StructureDefinition/C4BB-Organization" + ] + }, + "identifier": [ + { + "type": { + "coding": [ + { + "system": "http://hl7.org/fhir/us/carin-bb/CodeSystem/C4BBIdentifierType", + "code": "npi" + } + ] + }, + "system": "http://hl7.org/fhir/sid/us-npi", + "value": "1407833767" + }, + { + "type": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/v2-0203", + "code": "TAX" + } + ] + }, + "system": "urn:oid:2.16.840.1.113883.4.4" + } + ], + "active": true, + "type": [ + { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/organization-type", + "code": "prov" + } + ] + } + ], + "name": "PIKES PEAK NEPHROLOGY ASSOCIATES PC", + "address": [ + { + "use": "work", + "type": "physical", + "line": [ + "1914 LELARAY STREET" + ], + "city": "COLORADO SPRINGS", + "postalCode": "80909", + "country": "USA" + } + ] + }, + "request": { + "method": "PUT", + "url": "Organization/68ae4f74-afdc-6242-c50e-02ef776d8e5d" + } + }, + { + "resource": { + "resourceType": "Organization", + "id": "5954a17b-0779-334c-4f1c-e894e45d15fb", + "meta": { + "lastUpdated": "2021-06-30", + "profile": [ + "http://hl7.org/fhir/us/carin-bb/StructureDefinition/C4BB-Organization" + ] + }, + "identifier": [ + { + "use": "usual", + "type": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/v2-0203", + "code": "FILL" + } + ] + }, + "system": "https://hl7.org/fhir/sid/organizationid", + "value": "NATLTAP CO-KFHP-PAY-CO" + } + ], + "active": true, + "type": [ + { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/organization-type", + "code": "pay", + "display": "Payer" + } + ] + } + ], + "name": "KAISER FOUNDATION HEALTHPLAN, INC", + "telecom": [ + { + "system": "phone", + "value": "1-800-382-4661", + "use": "work" + } + ], + "address": [ + { + "use": "work", + "type": "postal", + "line": [ + "NATIONAL CLAIMS ADMINISTRATION COLORADO", + "PO Box 629028" + ], + "city": "El Dorado Hills", + "state": "CA", + "postalCode": "95762-9028" + } + ] + }, + "request": { + "method": "PUT", + "url": "Organization/5954a17b-0779-334c-4f1c-e894e45d15fb" + } + }, + { + "resource": { + "resourceType": "Practitioner", + "id": "23eccc61-ab67-bf8a-e464-6260f7989556", + "meta": { + "lastUpdated": "2021-06-30", + "profile": [ + "http://hl7.org/fhir/us/core/StructureDefinition/us-core-practitioner" + ] + }, + "identifier": [ + { + "use": "usual", + "type": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/v2-0203", + "code": "NPI" + } + ] + }, + "system": "http://hl7.org/fhir/sid/us-npi", + "value": "1497983654" + } + ], + "name": [ + { + "use": "usual", + "text": "CASSIDY, HEATHER M (MD)", + "family": "CASSIDY", + "given": [ + "HEATHER" + ], + "suffix": [ + "MD" + ] + } + ], + "address": [ + { + "use": "work", + "line": [ + "Briargate", + "1405 Briargate Pkwy #141" + ], + "city": "Colorado Springs", + "postalCode": "80920" + } + ] + }, + "request": { + "method": "PUT", + "url": "Practitioner/23eccc61-ab67-bf8a-e464-6260f7989556" + } + }, + { + "resource": { + "resourceType": "Practitioner", + "id": "dbbc9a06-f685-b481-d739-133755af138e", + "meta": { + "lastUpdated": "2021-06-30", + "profile": [ + "http://hl7.org/fhir/us/core/StructureDefinition/us-core-practitioner" + ] + }, + "identifier": [ + { + "use": "usual", + "type": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/v2-0203", + "code": "NPI" + } + ] + }, + "system": "http://hl7.org/fhir/sid/us-npi", + "value": "1568467280" + }, + { + "use": "usual", + "type": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/v2-0203", + "code": "TAX" + } + ] + }, + "system": "urn:oid:2.16.840.1.113883.4.4", + "value": "311669909" + } + ], + "name": [ + { + "use": "usual", + "text": "MOHNSSEN, STEVEN R (MD)", + "family": "MOHNSSEN", + "given": [ + "STEVEN" + ], + "suffix": [ + "MD" + ] + } + ], + "address": [ + { + "use": "work", + "line": [ + "1725 E Boulder St", + "Ste 204" + ], + "city": "Colorado Springs", + "postalCode": "80909" + } + ] + }, + "request": { + "method": "PUT", + "url": "Practitioner/dbbc9a06-f685-b481-d739-133755af138e" + } + }, + { + "resource": { + "resourceType": "Practitioner", + "id": "39b9250c-0d01-cbb0-ea89-0de9c74af511", + "meta": { + "lastUpdated": "2021-06-30", + "profile": [ + "http://hl7.org/fhir/us/core/StructureDefinition/us-core-practitioner" + ] + }, + "identifier": [ + { + "use": "usual", + "type": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/v2-0203", + "code": "NPI" + } + ] + }, + "system": "http://hl7.org/fhir/sid/us-npi", + "value": "1679605265" + }, + { + "use": "usual", + "type": { + "coding": [ + { + "system": "http://terminology.hl7.org/CodeSystem/v2-0203", + "code": "TAX" + } + ] + }, + "system": "urn:oid:2.16.840.1.113883.4.4", + "value": "840629252" + } + ], + "name": [ + { + "use": "usual", + "text": "ROSS, MICHAEL D (MD)", + "family": "ROSS", + "given": [ + "MICHAEL" + ], + "suffix": [ + "MD" + ] + } + ], + "address": [ + { + "use": "work", + "line": [ + "1914 Lelaray St" + ], + "city": "Colorado Springs", + "postalCode": "80909" + } + ] + }, + "request": { + "method": "PUT", + "url": "Practitioner/39b9250c-0d01-cbb0-ea89-0de9c74af511" + } + } + ] } diff --git a/hapi-fhir-jpaserver-batch/pom.xml b/hapi-fhir-jpaserver-batch/pom.xml index 86a71548d81..fa552c7e6f3 100644 --- a/hapi-fhir-jpaserver-batch/pom.xml +++ b/hapi-fhir-jpaserver-batch/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-cql/pom.xml b/hapi-fhir-jpaserver-cql/pom.xml index fa531134acf..4d12fe6bed7 100644 --- a/hapi-fhir-jpaserver-cql/pom.xml +++ b/hapi-fhir-jpaserver-cql/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-mdm/pom.xml b/hapi-fhir-jpaserver-mdm/pom.xml index cb849b120a7..83b1b55e1f0 100644 --- a/hapi-fhir-jpaserver-mdm/pom.xml +++ b/hapi-fhir-jpaserver-mdm/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-migrate/pom.xml b/hapi-fhir-jpaserver-migrate/pom.xml index 2df96164ac4..c04559d2664 100644 --- a/hapi-fhir-jpaserver-migrate/pom.xml +++ b/hapi-fhir-jpaserver-migrate/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-model/pom.xml b/hapi-fhir-jpaserver-model/pom.xml index 63e2f0101db..1f41e99d0a4 100644 --- a/hapi-fhir-jpaserver-model/pom.xml +++ b/hapi-fhir-jpaserver-model/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-searchparam/pom.xml b/hapi-fhir-jpaserver-searchparam/pom.xml index 1eb524292eb..5abb5632a67 100755 --- a/hapi-fhir-jpaserver-searchparam/pom.xml +++ b/hapi-fhir-jpaserver-searchparam/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/MatchUrlService.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/MatchUrlService.java index c18bb8484fb..5b6a2f69eb6 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/MatchUrlService.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/MatchUrlService.java @@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.searchparam; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.context.RuntimeSearchParam; +import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; import ca.uhn.fhir.model.api.IQueryParameterAnd; @@ -161,11 +162,15 @@ public class MatchUrlService { return ReflectionUtil.newInstance(clazz); } - public ResourceSearch getResourceSearch(String theUrl) { + public ResourceSearch getResourceSearch(String theUrl, RequestPartitionId theRequestPartitionId) { RuntimeResourceDefinition resourceDefinition; resourceDefinition = UrlUtil.parseUrlResourceType(myFhirContext, theUrl); SearchParameterMap searchParameterMap = translateMatchUrl(theUrl, resourceDefinition); - return new ResourceSearch(resourceDefinition, searchParameterMap); + return new ResourceSearch(resourceDefinition, searchParameterMap, theRequestPartitionId); + } + + public ResourceSearch getResourceSearch(String theUrl) { + return getResourceSearch(theUrl, null); } public abstract static class Flag { diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/ResourceSearch.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/ResourceSearch.java index 01b53718c04..2d879bac1eb 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/ResourceSearch.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/ResourceSearch.java @@ -21,17 +21,21 @@ package ca.uhn.fhir.jpa.searchparam; */ import ca.uhn.fhir.context.RuntimeResourceDefinition; +import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import org.hl7.fhir.instance.model.api.IBaseResource; /** - * A resource type along with a search parameter map. Everything you need to perform a search! + * A resource type along with a search parameter map and partition id. Everything you need to perform a search! */ public class ResourceSearch { private final RuntimeResourceDefinition myRuntimeResourceDefinition; private final SearchParameterMap mySearchParameterMap; + private final RequestPartitionId myRequestPartitionId; - public ResourceSearch(RuntimeResourceDefinition theRuntimeResourceDefinition, SearchParameterMap theSearchParameterMap) { + public ResourceSearch(RuntimeResourceDefinition theRuntimeResourceDefinition, SearchParameterMap theSearchParameterMap, RequestPartitionId theRequestPartitionId) { myRuntimeResourceDefinition = theRuntimeResourceDefinition; mySearchParameterMap = theSearchParameterMap; + myRequestPartitionId = theRequestPartitionId; } public RuntimeResourceDefinition getRuntimeResourceDefinition() { @@ -49,4 +53,12 @@ public class ResourceSearch { public boolean isDeleteExpunge() { return mySearchParameterMap.isDeleteExpunge(); } + + public Class getResourceType() { + return myRuntimeResourceDefinition.getImplementingClass(); + } + + public RequestPartitionId getRequestPartitionId() { + return myRequestPartitionId; + } } diff --git a/hapi-fhir-jpaserver-subscription/pom.xml b/hapi-fhir-jpaserver-subscription/pom.xml index 26a70d3f837..5d5ea7ce06b 100644 --- a/hapi-fhir-jpaserver-subscription/pom.xml +++ b/hapi-fhir-jpaserver-subscription/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-utilities/pom.xml b/hapi-fhir-jpaserver-test-utilities/pom.xml index a0a3dddba47..1852210925d 100644 --- a/hapi-fhir-jpaserver-test-utilities/pom.xml +++ b/hapi-fhir-jpaserver-test-utilities/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml index eafc3940eb2..9b902d9c5b4 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml +++ b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-server-mdm/pom.xml b/hapi-fhir-server-mdm/pom.xml index cf82e372485..833378ffb1c 100644 --- a/hapi-fhir-server-mdm/pom.xml +++ b/hapi-fhir-server-mdm/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server-openapi/pom.xml b/hapi-fhir-server-openapi/pom.xml index 92fc3dd6623..cdba7e64de4 100644 --- a/hapi-fhir-server-openapi/pom.xml +++ b/hapi-fhir-server-openapi/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server/pom.xml b/hapi-fhir-server/pom.xml index f85703de5b2..a91187eb33a 100644 --- a/hapi-fhir-server/pom.xml +++ b/hapi-fhir-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IBundleProvider.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IBundleProvider.java index ab0a7cef6c1..5151818fca6 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IBundleProvider.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IBundleProvider.java @@ -10,6 +10,7 @@ import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Date; import java.util.List; +import java.util.stream.Collectors; /* * #%L @@ -206,7 +207,7 @@ public interface IBundleProvider { } /** - * Returns the value of {@link #size()} and throws a {@link NullPointerException} of it is null + * @return the value of {@link #size()} and throws a {@link NullPointerException} of it is null */ default int sizeOrThrowNpe() { Integer retVal = size(); @@ -214,4 +215,10 @@ public interface IBundleProvider { return retVal; } + /** + * @return the list of ids of all resources in the bundle + */ + default List getAllResourceIds() { + return getAllResources().stream().map(resource -> resource.getIdElement().getIdPart()).collect(Collectors.toList()); + } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/IDeleteExpungeJobSubmitter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/IDeleteExpungeJobSubmitter.java index 740e57cf3d5..b890f167bc8 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/IDeleteExpungeJobSubmitter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/IDeleteExpungeJobSubmitter.java @@ -20,19 +20,6 @@ package ca.uhn.fhir.rest.api.server.storage; * #L% */ -import ca.uhn.fhir.rest.api.server.RequestDetails; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersInvalidException; - -import java.util.List; - -public interface IDeleteExpungeJobSubmitter { - /** - * @param theBatchSize For each pass, when synchronously searching for resources, limit the number of matching resources to this number - * @param theTenantId The tenant to perform the searches on - * @param theUrlsToDeleteExpunge A list of strings of the form "/Patient?active=true" - * @return The Spring Batch JobExecution that was started to run this batch job - * @throws JobParametersInvalidException - */ - JobExecution submitJob(Integer theBatchSize, RequestDetails theRequest, List theUrlsToDeleteExpunge) throws JobParametersInvalidException; +// Tag interface for Spring auto-wiring +public interface IDeleteExpungeJobSubmitter extends IMultiUrlJobSubmitter { } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/IMultiUrlJobSubmitter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/IMultiUrlJobSubmitter.java new file mode 100644 index 00000000000..f39ef1c54f2 --- /dev/null +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/IMultiUrlJobSubmitter.java @@ -0,0 +1,37 @@ +package ca.uhn.fhir.rest.api.server.storage; + +/*- + * #%L + * HAPI FHIR - Server Framework + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.rest.api.server.RequestDetails; +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobParametersInvalidException; + +import java.util.List; + +public interface IMultiUrlJobSubmitter { + /** + * @param theBatchSize For each pass, when synchronously searching for resources, limit the number of matching resources to this number + * @param theUrlsToProcess A list of strings of the form "/Patient?active=true" + * @return The Spring Batch JobExecution that was started to run this batch job + * @throws JobParametersInvalidException + */ + JobExecution submitJob(Integer theBatchSize, List theUrlsToProcess, RequestDetails theRequest) throws JobParametersInvalidException; +} diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/IReindexJobSubmitter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/IReindexJobSubmitter.java new file mode 100644 index 00000000000..52373a8ffd4 --- /dev/null +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/IReindexJobSubmitter.java @@ -0,0 +1,30 @@ +package ca.uhn.fhir.rest.api.server.storage; + +/*- + * #%L + * HAPI FHIR - Server Framework + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.rest.api.server.RequestDetails; +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobParametersInvalidException; + +// Tag interface for Spring wiring +public interface IReindexJobSubmitter extends IMultiUrlJobSubmitter { + JobExecution submitEverythingJob(Integer theBatchSize, RequestDetails theRequest) throws JobParametersInvalidException; +} diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/BaseMultiUrlProcessor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/BaseMultiUrlProcessor.java new file mode 100644 index 00000000000..40c7564410a --- /dev/null +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/BaseMultiUrlProcessor.java @@ -0,0 +1,65 @@ +package ca.uhn.fhir.rest.server.provider; + +/*- + * #%L + * HAPI FHIR - Server Framework + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.api.server.storage.IMultiUrlJobSubmitter; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import ca.uhn.fhir.util.ParametersUtil; +import org.hl7.fhir.instance.model.api.IBaseParameters; +import org.hl7.fhir.instance.model.api.IPrimitiveType; +import org.jetbrains.annotations.Nullable; +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobParametersInvalidException; + +import java.math.BigDecimal; +import java.util.List; + +public class BaseMultiUrlProcessor { + protected final FhirContext myFhirContext; + private final IMultiUrlJobSubmitter myMultiUrlProcessorJobSubmitter; + + public BaseMultiUrlProcessor(FhirContext theFhirContext, IMultiUrlJobSubmitter theMultiUrlProcessorJobSubmitter) { + myMultiUrlProcessorJobSubmitter = theMultiUrlProcessorJobSubmitter; + myFhirContext = theFhirContext; + } + + protected IBaseParameters processUrls(List theUrlsToProcess, Integer theBatchSize, RequestDetails theRequestDetails) { + try { + JobExecution jobExecution = myMultiUrlProcessorJobSubmitter.submitJob(theBatchSize, theUrlsToProcess, theRequestDetails); + IBaseParameters retval = ParametersUtil.newInstance(myFhirContext); + ParametersUtil.addParameterToParametersLong(myFhirContext, retval, ProviderConstants.OPERATION_DELETE_EXPUNGE_RESPONSE_JOB_ID, jobExecution.getJobId()); + return retval; + } catch (JobParametersInvalidException e) { + throw new InvalidRequestException("Invalid job parameters: " + e.getMessage(), e); + } + } + + @Nullable + protected Integer getBatchSize(IPrimitiveType theBatchSize) { + Integer batchSize = null; + if (theBatchSize != null && !theBatchSize.isEmpty()) { + batchSize = theBatchSize.getValue().intValue(); + } + return batchSize; + } +} diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/DeleteExpungeProvider.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/DeleteExpungeProvider.java index a7530fe12c8..ee2feaee91d 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/DeleteExpungeProvider.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/DeleteExpungeProvider.java @@ -25,25 +25,16 @@ import ca.uhn.fhir.rest.annotation.Operation; import ca.uhn.fhir.rest.annotation.OperationParam; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter; -import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; -import ca.uhn.fhir.util.ParametersUtil; import org.hl7.fhir.instance.model.api.IBaseParameters; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersInvalidException; import java.math.BigDecimal; import java.util.List; import java.util.stream.Collectors; -public class DeleteExpungeProvider { - private final IDeleteExpungeJobSubmitter myDeleteExpungeJobSubmitter; - - private final FhirContext myFhirContext; - +public class DeleteExpungeProvider extends BaseMultiUrlProcessor { public DeleteExpungeProvider(FhirContext theFhirContext, IDeleteExpungeJobSubmitter theDeleteExpungeJobSubmitter) { - myDeleteExpungeJobSubmitter = theDeleteExpungeJobSubmitter; - myFhirContext = theFhirContext; + super(theFhirContext, theDeleteExpungeJobSubmitter); } @Operation(name = ProviderConstants.OPERATION_DELETE_EXPUNGE, idempotent = false) @@ -52,18 +43,7 @@ public class DeleteExpungeProvider { @OperationParam(name = ProviderConstants.OPERATION_DELETE_BATCH_SIZE, typeName = "decimal", min = 0, max = 1) IPrimitiveType theBatchSize, RequestDetails theRequestDetails ) { - try { - List urls = theUrlsToDeleteExpunge.stream().map(IPrimitiveType::getValue).collect(Collectors.toList()); - Integer batchSize = null; - if (theBatchSize != null && !theBatchSize.isEmpty()) { - batchSize = theBatchSize.getValue().intValue(); - } - JobExecution jobExecution = myDeleteExpungeJobSubmitter.submitJob(batchSize, theRequestDetails, urls); - IBaseParameters retval = ParametersUtil.newInstance(myFhirContext); - ParametersUtil.addParameterToParametersLong(myFhirContext, retval, ProviderConstants.OPERATION_DELETE_EXPUNGE_RESPONSE_JOB_ID, jobExecution.getJobId()); - return retval; - } catch (JobParametersInvalidException e) { - throw new InvalidRequestException("Invalid job parameters: " + e.getMessage(), e); - } + List urls = theUrlsToDeleteExpunge.stream().map(IPrimitiveType::getValue).collect(Collectors.toList()); + return super.processUrls(urls, getBatchSize(theBatchSize), theRequestDetails); } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ProviderConstants.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ProviderConstants.java index 45891fcac2b..ffbe7d04e64 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ProviderConstants.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ProviderConstants.java @@ -147,4 +147,29 @@ public class ProviderConstants { * The Spring Batch job id of the delete expunge job created by a $delete-expunge operation */ public static final String OPERATION_DELETE_EXPUNGE_RESPONSE_JOB_ID = "jobId"; + + /** + * Operation name for the $delete-expunge operation + */ + public static final String OPERATION_REINDEX = "$reindex"; + + /** + * url of resources to delete for the $delete-expunge operation + */ + public static final String OPERATION_REINDEX_PARAM_URL = "url"; + + /** + * Number of resources to delete at a time for the $delete-expunge operation + */ + public static final String OPERATION_REINDEX_PARAM_BATCH_SIZE = "batchSize"; + + /** + * Whether all resource types should be reindexed + */ + public static final String OPERATION_REINDEX_PARAM_EVERYTHING = "everything"; + + /** + * The Spring Batch job id of the delete expunge job created by a $delete-expunge operation + */ + public static final String OPERATION_REINDEX_RESPONSE_JOB_ID = "jobId"; } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ReindexProvider.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ReindexProvider.java new file mode 100644 index 00000000000..587e5caf999 --- /dev/null +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ReindexProvider.java @@ -0,0 +1,79 @@ +package ca.uhn.fhir.rest.server.provider; + +/*- + * #%L + * HAPI FHIR - Server Framework + * %% + * Copyright (C) 2014 - 2021 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.rest.annotation.Operation; +import ca.uhn.fhir.rest.annotation.OperationParam; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.api.server.storage.IReindexJobSubmitter; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import ca.uhn.fhir.util.ParametersUtil; +import org.hl7.fhir.instance.model.api.IBaseParameters; +import org.hl7.fhir.instance.model.api.IPrimitiveType; +import org.jetbrains.annotations.Nullable; +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobParametersInvalidException; + +import java.math.BigDecimal; +import java.util.List; +import java.util.stream.Collectors; + +public class ReindexProvider extends BaseMultiUrlProcessor { + private final IReindexJobSubmitter myReindexJobSubmitter; + + public ReindexProvider(FhirContext theFhirContext, IReindexJobSubmitter theReindexJobSubmitter) { + super(theFhirContext, theReindexJobSubmitter); + myReindexJobSubmitter = theReindexJobSubmitter; + } + + @Operation(name = ProviderConstants.OPERATION_REINDEX, idempotent = false) + public IBaseParameters Reindex( + @OperationParam(name = ProviderConstants.OPERATION_REINDEX_PARAM_URL, typeName = "string", min = 0, max = 1) List> theUrlsToReindex, + @OperationParam(name = ProviderConstants.OPERATION_REINDEX_PARAM_BATCH_SIZE, typeName = "decimal", min = 0, max = 1) IPrimitiveType theBatchSize, + @OperationParam(name = ProviderConstants.OPERATION_REINDEX_PARAM_EVERYTHING, typeName = "boolean", min = 0, max = 1) IPrimitiveType theEverything, + RequestDetails theRequestDetails + ) { + Boolean everything = theEverything != null && theEverything.getValue(); + @Nullable Integer batchSize = getBatchSize(theBatchSize); + if (everything) { + return processEverything(batchSize, theRequestDetails); + } else if (theUrlsToReindex != null && !theUrlsToReindex.isEmpty()) { + List urls = theUrlsToReindex.stream().map(IPrimitiveType::getValue).collect(Collectors.toList()); + return super.processUrls(urls, batchSize, theRequestDetails); + } else { + throw new InvalidRequestException(ProviderConstants.OPERATION_REINDEX + " must specify either everything=true or provide at least one value for " + ProviderConstants.OPERATION_REINDEX_PARAM_URL); + } + } + + private IBaseParameters processEverything(Integer theBatchSize, RequestDetails theRequestDetails) { + try { + JobExecution jobExecution = myReindexJobSubmitter.submitEverythingJob(theBatchSize, theRequestDetails); + IBaseParameters retval = ParametersUtil.newInstance(myFhirContext); + ParametersUtil.addParameterToParametersLong(myFhirContext, retval, ProviderConstants.OPERATION_DELETE_EXPUNGE_RESPONSE_JOB_ID, jobExecution.getJobId()); + return retval; + } catch (JobParametersInvalidException e) { + throw new InvalidRequestException("Invalid job parameters: " + e.getMessage(), e); + } + } + + +} diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml index b36ce8feaca..c7afc583561 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml index 704dbbf5fc7..b24fea10b4f 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT hapi-fhir-spring-boot-sample-client-apache diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml index 8cc8cc8cab5..2e90f42c2fe 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT hapi-fhir-spring-boot-sample-client-okhttp diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml index 18115eb2b87..8a0f3eea1ea 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT hapi-fhir-spring-boot-sample-server-jersey diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml index f3beb03fa68..4502bab171f 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT hapi-fhir-spring-boot-samples diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml index a9f45157a3b..3eefdc44e57 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/pom.xml b/hapi-fhir-spring-boot/pom.xml index 4473e7e4c6d..cb8df8fedce 100644 --- a/hapi-fhir-spring-boot/pom.xml +++ b/hapi-fhir-spring-boot/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-structures-dstu2.1/pom.xml b/hapi-fhir-structures-dstu2.1/pom.xml index fb9d06a31df..b83f33489d7 100644 --- a/hapi-fhir-structures-dstu2.1/pom.xml +++ b/hapi-fhir-structures-dstu2.1/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu2/pom.xml b/hapi-fhir-structures-dstu2/pom.xml index d0301a3b43c..7cdb806aaa5 100644 --- a/hapi-fhir-structures-dstu2/pom.xml +++ b/hapi-fhir-structures-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu3/pom.xml b/hapi-fhir-structures-dstu3/pom.xml index e38052d8518..02594a84d1e 100644 --- a/hapi-fhir-structures-dstu3/pom.xml +++ b/hapi-fhir-structures-dstu3/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-hl7org-dstu2/pom.xml b/hapi-fhir-structures-hl7org-dstu2/pom.xml index efd6c7a915d..2336d6b6424 100644 --- a/hapi-fhir-structures-hl7org-dstu2/pom.xml +++ b/hapi-fhir-structures-hl7org-dstu2/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4/pom.xml b/hapi-fhir-structures-r4/pom.xml index c89f53634fd..7eefade758f 100644 --- a/hapi-fhir-structures-r4/pom.xml +++ b/hapi-fhir-structures-r4/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/provider/BatchProviderTest.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/provider/BatchProviderTest.java new file mode 100644 index 00000000000..a5730264c75 --- /dev/null +++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/provider/BatchProviderTest.java @@ -0,0 +1,201 @@ +package ca.uhn.fhir.rest.server.provider; + +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter; +import ca.uhn.fhir.rest.api.server.storage.IReindexJobSubmitter; +import ca.uhn.fhir.rest.server.BaseR4ServerTest; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import org.hl7.fhir.r4.model.BooleanType; +import org.hl7.fhir.r4.model.DecimalType; +import org.hl7.fhir.r4.model.Parameters; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobInstance; +import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.JobParametersInvalidException; + +import javax.annotation.Nonnull; +import java.util.ArrayList; +import java.util.List; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.hasSize; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; + +public class BatchProviderTest extends BaseR4ServerTest { + public static final long TEST_JOB_ID = 123L; + public static final String TEST_JOB_NAME = "jobName"; + private static final Logger ourLog = LoggerFactory.getLogger(BatchProviderTest.class); + private final MyMultiUrlJobSubmitter myDeleteExpungeJobSubmitter = new MyMultiUrlJobSubmitter(ProviderConstants.OPERATION_DELETE_EXPUNGE); + private final MyMultiUrlJobSubmitter myReindexJobSubmitter = new MyMultiUrlJobSubmitter(ProviderConstants.OPERATION_REINDEX); + private Parameters myReturnParameters; + + @BeforeEach + public void reset() { + myReturnParameters = new Parameters(); + myReturnParameters.addParameter("success", true); + myDeleteExpungeJobSubmitter.reset(); + myReindexJobSubmitter.reset(); + } + + @Test + public void testDeleteExpunge() throws Exception { + // setup + Parameters input = new Parameters(); + String url1 = "Observation?status=active"; + String url2 = "Patient?active=false"; + Integer batchSize = 2401; + input.addParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, url1); + input.addParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, url2); + input.addParameter(ProviderConstants.OPERATION_DELETE_BATCH_SIZE, new DecimalType(batchSize)); + + ourLog.info(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input)); + + DeleteExpungeProvider provider = new DeleteExpungeProvider(myCtx, myDeleteExpungeJobSubmitter); + startServer(provider); + + Parameters response = myClient + .operation() + .onServer() + .named(ProviderConstants.OPERATION_DELETE_EXPUNGE) + .withParameters(input) + .execute(); + + ourLog.info(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(response)); + DecimalType jobId = (DecimalType) response.getParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_RESPONSE_JOB_ID); + assertEquals(TEST_JOB_ID, jobId.getValue().longValue()); + assertThat(myDeleteExpungeJobSubmitter.calledWithUrls, hasSize(2)); + assertEquals(url1, myDeleteExpungeJobSubmitter.calledWithUrls.get(0)); + assertEquals(url2, myDeleteExpungeJobSubmitter.calledWithUrls.get(1)); + assertEquals(batchSize, myDeleteExpungeJobSubmitter.calledWithBatchSize); + assertNotNull(myDeleteExpungeJobSubmitter.calledWithRequestDetails); + assertFalse(myDeleteExpungeJobSubmitter.everything); + } + + @Test + public void testReindex() throws Exception { + // setup + Parameters input = new Parameters(); + String url1 = "Observation?status=active"; + String url2 = "Patient?active=false"; + Integer batchSize = 2401; + input.addParameter(ProviderConstants.OPERATION_REINDEX_PARAM_URL, url1); + input.addParameter(ProviderConstants.OPERATION_REINDEX_PARAM_URL, url2); + input.addParameter(ProviderConstants.OPERATION_REINDEX_PARAM_BATCH_SIZE, new DecimalType(batchSize)); + + ourLog.info(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input)); + + ReindexProvider provider = new ReindexProvider(myCtx, myReindexJobSubmitter); + startServer(provider); + + Parameters response = myClient + .operation() + .onServer() + .named(ProviderConstants.OPERATION_REINDEX) + .withParameters(input) + .execute(); + + ourLog.info(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(response)); + DecimalType jobId = (DecimalType) response.getParameter(ProviderConstants.OPERATION_REINDEX_RESPONSE_JOB_ID); + assertEquals(TEST_JOB_ID, jobId.getValue().longValue()); + assertThat(myReindexJobSubmitter.calledWithUrls, hasSize(2)); + assertEquals(url1, myReindexJobSubmitter.calledWithUrls.get(0)); + assertEquals(url2, myReindexJobSubmitter.calledWithUrls.get(1)); + assertEquals(batchSize, myReindexJobSubmitter.calledWithBatchSize); + assertNotNull(myReindexJobSubmitter.calledWithRequestDetails); + assertFalse(myReindexJobSubmitter.everything); + + // bad params + input = new Parameters(); + batchSize = 2401; + input.addParameter(ProviderConstants.OPERATION_REINDEX_PARAM_BATCH_SIZE, new DecimalType(batchSize)); + + ourLog.info(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input)); + try { + response = myClient + .operation() + .onServer() + .named(ProviderConstants.OPERATION_REINDEX) + .withParameters(input) + .execute(); + fail(); + } catch (InvalidRequestException e) { + assertEquals("HTTP 400 Bad Request: $reindex must specify either everything=true or provide at least one value for url", e.getMessage()); + } + } + + @Test + public void testReindexEverything() throws Exception { + // setup + Parameters input = new Parameters(); + Integer batchSize = 2401; + input.addParameter(ProviderConstants.OPERATION_REINDEX_PARAM_BATCH_SIZE, new DecimalType(batchSize)); + input.addParameter(ProviderConstants.OPERATION_REINDEX_PARAM_EVERYTHING, new BooleanType(true)); + + ourLog.info(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input)); + + ReindexProvider provider = new ReindexProvider(myCtx, myReindexJobSubmitter); + startServer(provider); + + Parameters response = myClient + .operation() + .onServer() + .named(ProviderConstants.OPERATION_REINDEX) + .withParameters(input) + .execute(); + + ourLog.info(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(response)); + DecimalType jobId = (DecimalType) response.getParameter(ProviderConstants.OPERATION_REINDEX_RESPONSE_JOB_ID); + assertEquals(TEST_JOB_ID, jobId.getValue().longValue()); + assertThat(myReindexJobSubmitter.calledWithUrls, hasSize(0)); + assertEquals(batchSize, myReindexJobSubmitter.calledWithBatchSize); + assertNotNull(myReindexJobSubmitter.calledWithRequestDetails); + assertTrue(myReindexJobSubmitter.everything); + } + + private class MyMultiUrlJobSubmitter implements IReindexJobSubmitter, IDeleteExpungeJobSubmitter { + public final String operationName; + public Integer calledWithBatchSize; + public RequestDetails calledWithRequestDetails; + public List calledWithUrls; + public boolean everything; + + public MyMultiUrlJobSubmitter(String theOperationName) { + operationName = theOperationName; + } + + @Override + public JobExecution submitJob(Integer theBatchSize, List theUrlsToProcess, RequestDetails theRequestDetails) { + calledWithBatchSize = theBatchSize; + calledWithRequestDetails = theRequestDetails; + calledWithUrls = theUrlsToProcess; + everything = false; + return buildJobExecution(); + } + + @Nonnull + private JobExecution buildJobExecution() { + JobInstance instance = new JobInstance(TEST_JOB_ID, TEST_JOB_NAME); + return new JobExecution(instance, new JobParameters()); + } + + public void reset() { + calledWithUrls = new ArrayList<>(); + } + + @Override + public JobExecution submitEverythingJob(Integer theBatchSize, RequestDetails theRequestDetails) throws JobParametersInvalidException { + calledWithBatchSize = theBatchSize; + calledWithRequestDetails = theRequestDetails; + everything = true; + return buildJobExecution(); + } + } +} diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/provider/DeleteExpungeProviderTest.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/provider/DeleteExpungeProviderTest.java deleted file mode 100644 index b583c2ae630..00000000000 --- a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/provider/DeleteExpungeProviderTest.java +++ /dev/null @@ -1,87 +0,0 @@ -package ca.uhn.fhir.rest.server.provider; - -import ca.uhn.fhir.rest.api.server.RequestDetails; -import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter; -import ca.uhn.fhir.rest.server.BaseR4ServerTest; -import org.hl7.fhir.r4.model.DecimalType; -import org.hl7.fhir.r4.model.Parameters; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; - -import java.util.ArrayList; -import java.util.List; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.hasSize; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; - -public class DeleteExpungeProviderTest extends BaseR4ServerTest { - private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeProviderTest.class); - private final MyDeleteExpungeJobSubmitter myTestJobSubmitter = new MyDeleteExpungeJobSubmitter(); - private Parameters myReturnParameters; - - @BeforeEach - public void reset() { - myReturnParameters = new Parameters(); - myReturnParameters.addParameter("success", true); - myTestJobSubmitter.reset(); - } - - @Test - public void testDeleteExpunge() throws Exception { - // setup - Parameters input = new Parameters(); - String url1 = "Observation?status=active"; - String url2 = "Patient?active=false"; - Integer batchSize = 2401; - input.addParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, url1); - input.addParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, url2); - input.addParameter(ProviderConstants.OPERATION_DELETE_BATCH_SIZE, new DecimalType(batchSize)); - - ourLog.info(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input)); - - DeleteExpungeProvider provider = new DeleteExpungeProvider(myCtx, myTestJobSubmitter); - startServer(provider); - - Parameters response = myClient - .operation() - .onServer() - .named(ProviderConstants.OPERATION_DELETE_EXPUNGE) - .withParameters(input) - .execute(); - - ourLog.info(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(response)); - DecimalType jobId = (DecimalType) response.getParameter(ProviderConstants.OPERATION_DELETE_EXPUNGE_RESPONSE_JOB_ID); - assertEquals(123L, jobId.getValue().longValue()); - assertThat(myTestJobSubmitter.calledWithUrls, hasSize(2)); - assertEquals(url1, myTestJobSubmitter.calledWithUrls.get(0)); - assertEquals(url2, myTestJobSubmitter.calledWithUrls.get(1)); - assertEquals(batchSize, myTestJobSubmitter.calledWithBatchSize); - assertNotNull(myTestJobSubmitter.calledWithRequestDetails); - } - - private class MyDeleteExpungeJobSubmitter implements IDeleteExpungeJobSubmitter { - public Integer calledWithBatchSize; - public RequestDetails calledWithRequestDetails; - public List calledWithUrls; - - @Override - public JobExecution submitJob(Integer theBatchSize, RequestDetails theRequestDetails, List theUrlsToExpungeDelete) { - calledWithBatchSize = theBatchSize; - calledWithRequestDetails = theRequestDetails; - calledWithUrls = theUrlsToExpungeDelete; - JobInstance instance = new JobInstance(123L, "jobName"); - return new JobExecution(instance, new JobParameters()); - } - - public void reset() { - calledWithUrls = new ArrayList<>(); - } - } -} diff --git a/hapi-fhir-structures-r5/pom.xml b/hapi-fhir-structures-r5/pom.xml index 4f8d961d1c2..7433f736ca4 100644 --- a/hapi-fhir-structures-r5/pom.xml +++ b/hapi-fhir-structures-r5/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-test-utilities/pom.xml b/hapi-fhir-test-utilities/pom.xml index f713ecc36f7..f8e8123360d 100644 --- a/hapi-fhir-test-utilities/pom.xml +++ b/hapi-fhir-test-utilities/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-testpage-overlay/pom.xml b/hapi-fhir-testpage-overlay/pom.xml index a36dfe3bfbd..aadf42b3288 100644 --- a/hapi-fhir-testpage-overlay/pom.xml +++ b/hapi-fhir-testpage-overlay/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-validation-resources-dstu2.1/pom.xml b/hapi-fhir-validation-resources-dstu2.1/pom.xml index 919393e2a02..5ca303426af 100644 --- a/hapi-fhir-validation-resources-dstu2.1/pom.xml +++ b/hapi-fhir-validation-resources-dstu2.1/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu2/pom.xml b/hapi-fhir-validation-resources-dstu2/pom.xml index d1b6baee0bf..fd686b16ab9 100644 --- a/hapi-fhir-validation-resources-dstu2/pom.xml +++ b/hapi-fhir-validation-resources-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu3/pom.xml b/hapi-fhir-validation-resources-dstu3/pom.xml index 0d1669f88d7..c744cde1e55 100644 --- a/hapi-fhir-validation-resources-dstu3/pom.xml +++ b/hapi-fhir-validation-resources-dstu3/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r4/pom.xml b/hapi-fhir-validation-resources-r4/pom.xml index 588091ec831..5abee688c58 100644 --- a/hapi-fhir-validation-resources-r4/pom.xml +++ b/hapi-fhir-validation-resources-r4/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r5/pom.xml b/hapi-fhir-validation-resources-r5/pom.xml index 63a22dbf8f7..a7dfaab543c 100644 --- a/hapi-fhir-validation-resources-r5/pom.xml +++ b/hapi-fhir-validation-resources-r5/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation/pom.xml b/hapi-fhir-validation/pom.xml index 94bfad3c883..1bb96cf32a0 100644 --- a/hapi-fhir-validation/pom.xml +++ b/hapi-fhir-validation/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-tinder-plugin/pom.xml b/hapi-tinder-plugin/pom.xml index fdc720e7155..925d1e571b0 100644 --- a/hapi-tinder-plugin/pom.xml +++ b/hapi-tinder-plugin/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../pom.xml @@ -58,37 +58,37 @@ ca.uhn.hapi.fhir hapi-fhir-structures-dstu3 - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-structures-hl7org-dstu2 - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-structures-r4 - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-structures-r5 - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-validation-resources-dstu2 - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-validation-resources-dstu3 - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ca.uhn.hapi.fhir hapi-fhir-validation-resources-r4 - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT org.apache.velocity diff --git a/hapi-tinder-test/pom.xml b/hapi-tinder-test/pom.xml index 8b526787b72..d146b6d212f 100644 --- a/hapi-tinder-test/pom.xml +++ b/hapi-tinder-test/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../pom.xml diff --git a/pom.xml b/pom.xml index 82e14f52e6c..910eeb19a09 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir pom - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT HAPI-FHIR An open-source implementation of the FHIR specification in Java. https://hapifhir.io diff --git a/restful-server-example/pom.xml b/restful-server-example/pom.xml index 801090855cd..33b1327122f 100644 --- a/restful-server-example/pom.xml +++ b/restful-server-example/pom.xml @@ -8,7 +8,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../pom.xml diff --git a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml index 7151713a632..a98aefd37d4 100644 --- a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml +++ b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-client/pom.xml b/tests/hapi-fhir-base-test-mindeps-client/pom.xml index 3613124778d..ebf3603865b 100644 --- a/tests/hapi-fhir-base-test-mindeps-client/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-server/pom.xml b/tests/hapi-fhir-base-test-mindeps-server/pom.xml index be1470a93b0..b78b5fb465a 100644 --- a/tests/hapi-fhir-base-test-mindeps-server/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 5.5.0-PRE7-SNAPSHOT + 5.5.0-PRE8-SNAPSHOT ../../pom.xml