From 4a751cbfc5a43f28bc0f70e765731fcc525ae6cc Mon Sep 17 00:00:00 2001 From: James Agnew Date: Tue, 17 Sep 2019 16:01:35 -0400 Subject: [PATCH] Integrate Bulk Export (#1487) * Start working on subscription processor * Work on new scheduler * Test fixes * Scheduler refactoring * Fix test failure * One more test fix * Updates to scheduler * More scheduler work * Tests now all passing * Ongoing work on export * Ongoing scheduler work * Ongoing testing * Work on export task * Sync master * Ongoing work * Bump xml patch version * Work on provider * Work on bulk * Work on export scheduler * More test fies * More test fixes * Compile fix * Reduce logging * Improve logging * Reuse bulk export jobs * Export provider * Improve logging in bulk export * Work on bulk export service * One more bugfix * Ongoing work on Bulk Data * Add changelog --- .../android/client/GenericClientDstu3IT.java | 4 +- .../java/ca/uhn/fhir/rest/api/Constants.java | 14 + .../ca/uhn/fhir/rest/api/PreferHeader.java | 61 ++ .../uhn/fhir/rest/api/PreferReturnEnum.java | 54 -- .../uhn/fhir/rest/gclient/ICreateTyped.java | 4 +- .../fhir/rest/gclient/IPatchExecutable.java | 4 +- .../fhir/rest/gclient/IUpdateExecutable.java | 4 +- .../main/java/ca/uhn/fhir/util/ArrayUtil.java | 25 + .../main/java/ca/uhn/fhir/util/StopWatch.java | 10 +- hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml | 17 +- .../ca/uhn/fhir/jpa/demo/CommonConfig.java | 20 + .../ca/uhn/fhir/jpa/demo/ContextHolder.java | 20 + .../uhn/fhir/jpa/demo/FhirServerConfig.java | 20 + .../fhir/jpa/demo/FhirServerConfigDstu3.java | 20 + .../uhn/fhir/jpa/demo/FhirServerConfigR4.java | 20 + .../uhn/fhir/jpa/demo/FhirTesterConfig.java | 20 + .../ca/uhn/fhir/jpa/demo/JpaServerDemo.java | 20 + .../okhttp/GenericOkHttpClientDstu2Test.java | 8 +- .../rest/client/apache/ResourceEntity.java | 20 + .../fhir/rest/client/impl/GenericClient.java | 20 +- .../server/AbstractJaxRsPageProvider.java | 4 +- .../server/AbstractJaxRsResourceProvider.java | 5 +- .../client/GenericJaxRsClientDstu2Test.java | 8 +- .../client/GenericJaxRsClientDstu3Test.java | 8 +- ...bstractJaxRsResourceProviderDstu3Test.java | 9 +- .../AbstractJaxRsResourceProviderTest.java | 9 +- .../JaxRsPatientProviderDstu3Test.java | 6 +- .../example/JaxRsPatientProviderR4Test.java | 8 +- .../example/JaxRsPatientProviderTest.java | 6 +- hapi-fhir-jpaserver-base/pom.xml | 7 +- .../fhir/jpa/bulk/BulkDataExportProvider.java | 158 +++++ .../fhir/jpa/bulk/BulkDataExportSvcImpl.java | 447 ++++++++++++++ .../fhir/jpa/bulk/BulkExportResponseJson.java | 110 ++++ .../uhn/fhir/jpa/bulk/BulkJobStatusEnum.java | 10 + .../uhn/fhir/jpa/bulk/IBulkDataExportSvc.java | 114 ++++ .../ca/uhn/fhir/jpa/config/BaseConfig.java | 50 +- .../fhir/jpa/dao/BaseHapiFhirResourceDao.java | 14 +- .../java/ca/uhn/fhir/jpa/dao/DaoRegistry.java | 13 +- .../ca/uhn/fhir/jpa/dao/IFhirResourceDao.java | 2 + .../fhir/jpa/dao/TransactionProcessor.java | 9 +- .../dao/data/IBulkExportCollectionDao.java | 34 ++ .../data/IBulkExportCollectionFileDao.java | 28 + .../fhir/jpa/dao/data/IBulkExportJobDao.java | 47 ++ .../ca/uhn/fhir/jpa/dao/data/ISearchDao.java | 8 +- .../fhir/jpa/dao/expunge/ExpungeService.java | 4 - .../fhir/jpa/dao/expunge/PartitionRunner.java | 10 + .../entity/BulkExportCollectionEntity.java | 65 ++ .../BulkExportCollectionFileEntity.java | 33 ++ .../fhir/jpa/entity/BulkExportJobEntity.java | 157 +++++ .../java/ca/uhn/fhir/jpa/entity/Search.java | 20 +- .../sched/AutowiringSpringBeanJobFactory.java | 29 + .../uhn/fhir/jpa/sched/QuartzTableSeeder.java | 18 + .../fhir/jpa/sched/SchedulerServiceImpl.java | 559 ++++++++++++++++++ .../search/StaleSearchDeletingSvcImpl.java | 28 +- .../fhir/jpa/search/WarmSearchDefinition.java | 8 + .../search/cache/BaseSearchCacheSvcImpl.java | 29 +- .../cache/DatabaseSearchCacheSvcImpl.java | 4 +- .../reindex/IResourceReindexingSvc.java | 5 - .../reindex/ResourceReindexingSvcImpl.java | 37 +- .../jpa/search/warm/CacheWarmingSvcImpl.java | 51 +- .../jpa/search/warm/ICacheWarmingSvc.java | 3 - .../ISubscriptionTriggeringSvc.java | 2 + .../SubscriptionTriggeringSvcImpl.java | 46 +- .../jpa/term/BaseHapiTerminologySvcImpl.java | 51 +- .../java/ca/uhn/fhir/jpa/util/JsonUtil.java | 75 +++ .../uhn/fhir/jpa/util/ResourceCountCache.java | 96 ++- .../fhir/jpa/util/SingleItemLoadingCache.java | 101 ---- .../java/ca/uhn/fhir/jpa/util/TestUtil.java | 2 +- .../jpa/bulk/BulkDataExportProviderTest.java | 242 ++++++++ .../jpa/bulk/BulkDataExportSvcImplR4Test.java | 254 ++++++++ .../java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java | 4 +- .../fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java | 5 +- .../fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java | 5 +- ...ourceDaoDstu3ReferentialIntegrityTest.java | 39 +- ...eDaoDstu3SearchWithLuceneDisabledTest.java | 5 +- .../jpa/dao/expunge/PartitionRunnerTest.java | 4 +- .../ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java | 5 +- .../jpa/dao/r4/ConsentEventsDaoR4Test.java | 21 +- .../r4/FhirResourceDaoR4CacheWarmingTest.java | 2 +- ...urceDaoR4SearchWithLuceneDisabledTest.java | 6 +- ...hirResourceDaoR4UniqueSearchParamTest.java | 3 +- .../dao/r4/SearchParamExtractorR4Test.java | 5 + .../ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java | 5 +- ...sentInterceptorResourceProviderR4Test.java | 4 +- ...oviderOnlySomeResourcesProvidedR4Test.java | 12 +- .../provider/r4/ResourceProviderR4Test.java | 200 ++++--- .../r4/StaleSearchDeletingSvcR4Test.java | 38 +- .../jpa/sched/SchedulerServiceImplTest.java | 207 +++++++ .../subscription/SubscriptionTestUtil.java | 3 - .../email/EmailSubscriptionDstu2Test.java | 1 + .../InMemorySubscriptionMatcherR4Test.java | 13 + .../SubscriptionTriggeringDstu3Test.java | 10 +- ...eTest.java => ResourceCountCacheTest.java} | 39 +- .../tasks/HapiFhirJpaMigrationTasks.java | 10 + hapi-fhir-jpaserver-model/pom.xml | 5 + .../fhir/jpa/model/entity/ResourceTable.java | 4 +- .../jpa/model/sched/FireAtIntervalJob.java | 45 ++ .../jpa/model/sched/ISchedulerService.java | 21 + .../model/sched/ScheduledJobDefinition.java | 51 ++ .../uhn/fhir/jpa/model/util/JpaConstants.java | 33 ++ .../jpa/model/entity/ResourceTableTest.java | 20 + hapi-fhir-jpaserver-searchparam/pom.xml | 7 +- .../jpa/searchparam/SearchParameterMap.java | 9 + .../registry/BaseSearchParamRegistry.java | 29 +- .../registry/ISearchParamRegistry.java | 2 + .../fhir/jpa/searchparam/retry/Retrier.java | 17 +- .../SearchParamExtractorDstu3Test.java | 13 +- .../module/cache/SubscriptionConstants.java | 2 +- .../SubscriptionDeliveryHandlerFactory.java | 1 + .../module/cache/SubscriptionLoader.java | 51 +- .../config/TestSubscriptionDstu3Config.java | 6 + .../ca/uhn/fhirtest/TestRestfulServer.java | 6 + .../interceptor/AnalyticsInterceptor.java | 112 ++-- .../fhir/rest/api/server/IRestfulServer.java | 4 +- .../uhn/fhir/rest/server/RestfulServer.java | 14 +- .../fhir/rest/server/RestfulServerUtils.java | 78 +-- .../BaseOutcomeReturningMethodBinding.java | 8 +- .../fhir/rest/server/method/MethodUtil.java | 5 +- .../server/method/OperationParameter.java | 64 +- .../rest/server/RestfulServerUtilsTest.java | 30 + .../rest/client/GenericClientDstu2_1Test.java | 13 +- .../rest/client/GenericClientDstu2Test.java | 8 +- .../rest/client/GenericClientDstu3Test.java | 12 +- .../fhir/rest/client/GenericClientR4Test.java | 12 +- .../ca/uhn/fhir/rest/server/CreateR4Test.java | 6 +- .../ca/uhn/fhir/rest/server/PreferTest.java | 18 +- .../karaf/client/FhirClientTest.java | 4 +- pom.xml | 5 + src/changes/changes.xml | 11 + 129 files changed, 4152 insertions(+), 648 deletions(-) create mode 100644 hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/PreferHeader.java delete mode 100644 hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/PreferReturnEnum.java create mode 100644 hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ArrayUtil.java create mode 100644 hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ResourceEntity.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProvider.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImpl.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/BulkExportResponseJson.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/BulkJobStatusEnum.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/IBulkDataExportSvc.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportCollectionDao.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportCollectionFileDao.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportJobDao.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportCollectionEntity.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportCollectionFileEntity.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/sched/AutowiringSpringBeanJobFactory.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/sched/QuartzTableSeeder.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/sched/SchedulerServiceImpl.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/WarmSearchDefinition.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/JsonUtil.java delete mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SingleItemLoadingCache.java create mode 100644 hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java create mode 100644 hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java create mode 100644 hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/sched/SchedulerServiceImplTest.java rename hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/util/{SingleItemLoadingCacheTest.java => ResourceCountCacheTest.java} (55%) create mode 100644 hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/sched/FireAtIntervalJob.java create mode 100644 hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/sched/ISchedulerService.java create mode 100644 hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/sched/ScheduledJobDefinition.java create mode 100644 hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceTableTest.java create mode 100644 hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/RestfulServerUtilsTest.java rename {hapi-fhir-structures-dstu2 => hapi-fhir-structures-r4}/src/test/java/ca/uhn/fhir/rest/server/PreferTest.java (95%) diff --git a/hapi-fhir-android/src/test/java/ca/uhn/fhir/android/client/GenericClientDstu3IT.java b/hapi-fhir-android/src/test/java/ca/uhn/fhir/android/client/GenericClientDstu3IT.java index 791dc58b9fa..e11b26a920a 100644 --- a/hapi-fhir-android/src/test/java/ca/uhn/fhir/android/client/GenericClientDstu3IT.java +++ b/hapi-fhir-android/src/test/java/ca/uhn/fhir/android/client/GenericClientDstu3IT.java @@ -8,6 +8,7 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Date; +import ca.uhn.fhir.rest.api.PreferHeader; import org.hl7.fhir.dstu3.model.*; import org.junit.*; import org.mockito.ArgumentCaptor; @@ -20,7 +21,6 @@ import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.parser.IParser; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.MethodOutcome; -import ca.uhn.fhir.rest.api.PreferReturnEnum; import ca.uhn.fhir.rest.client.api.IGenericClient; import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum; import ca.uhn.fhir.rest.client.exceptions.FhirClientConnectionException; @@ -246,7 +246,7 @@ public class GenericClientDstu3IT { Patient pt = new Patient(); pt.getText().setDivAsString("A PATIENT"); - MethodOutcome outcome = client.create().resource(pt).prefer(PreferReturnEnum.REPRESENTATION).execute(); + MethodOutcome outcome = client.create().resource(pt).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); assertNull(outcome.getOperationOutcome()); assertNotNull(outcome.getResource()); diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/Constants.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/Constants.java index e82661f5ce4..4fe703209db 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/Constants.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/Constants.java @@ -220,17 +220,31 @@ public class Constants { public static final String CASCADE_DELETE = "delete"; public static final int MAX_RESOURCE_NAME_LENGTH = 100; public static final String CACHE_CONTROL_PRIVATE = "private"; + public static final String CT_FHIR_NDJSON = "application/fhir+ndjson"; + public static final String CT_APP_NDJSON = "application/ndjson"; + public static final String CT_NDJSON = "ndjson"; + public static final Set CTS_NDJSON; + public static final String HEADER_PREFER_RESPOND_ASYNC = "respond-async"; public static final int STATUS_HTTP_412_PAYLOAD_TOO_LARGE = 413; public static final String OPERATION_NAME_GRAPHQL = "$graphql"; /** * Note that this constant is used in a number of places including DB column lengths! Be careful if you decide to change it. */ public static final int REQUEST_ID_LENGTH = 16; + public static final int STATUS_HTTP_202_ACCEPTED = 202; + public static final String HEADER_X_PROGRESS = "X-Progress"; + public static final String HEADER_RETRY_AFTER = "Retry-After"; static { CHARSET_UTF8 = StandardCharsets.UTF_8; CHARSET_US_ASCII = StandardCharsets.ISO_8859_1; + HashSet ctsNdjson = new HashSet<>(); + ctsNdjson.add(CT_FHIR_NDJSON); + ctsNdjson.add(CT_APP_NDJSON); + ctsNdjson.add(CT_NDJSON); + CTS_NDJSON = Collections.unmodifiableSet(ctsNdjson); + HashMap statusNames = new HashMap<>(); statusNames.put(200, "OK"); statusNames.put(201, "Created"); diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/PreferHeader.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/PreferHeader.java new file mode 100644 index 00000000000..9a0459e6ba5 --- /dev/null +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/PreferHeader.java @@ -0,0 +1,61 @@ +package ca.uhn.fhir.rest.api; + +import javax.annotation.Nullable; +import java.util.HashMap; + +public class PreferHeader { + + private PreferReturnEnum myReturn; + private boolean myRespondAsync; + + public @Nullable + PreferReturnEnum getReturn() { + return myReturn; + } + + public PreferHeader setReturn(PreferReturnEnum theReturn) { + myReturn = theReturn; + return this; + } + + public boolean getRespondAsync() { + return myRespondAsync; + } + + public PreferHeader setRespondAsync(boolean theRespondAsync) { + myRespondAsync = theRespondAsync; + return this; + } + + /** + * Represents values for "return" value as provided in the the HTTP Prefer header. + */ + public enum PreferReturnEnum { + + REPRESENTATION("representation"), MINIMAL("minimal"), OPERATION_OUTCOME("OperationOutcome"); + + private static HashMap ourValues; + private String myHeaderValue; + + PreferReturnEnum(String theHeaderValue) { + myHeaderValue = theHeaderValue; + } + + public String getHeaderValue() { + return myHeaderValue; + } + + public static PreferReturnEnum fromHeaderValue(String theHeaderValue) { + if (ourValues == null) { + HashMap values = new HashMap<>(); + for (PreferReturnEnum next : PreferReturnEnum.values()) { + values.put(next.getHeaderValue(), next); + } + ourValues = values; + } + return ourValues.get(theHeaderValue); + } + + } + +} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/PreferReturnEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/PreferReturnEnum.java deleted file mode 100644 index a3bbaeee9f6..00000000000 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/PreferReturnEnum.java +++ /dev/null @@ -1,54 +0,0 @@ -package ca.uhn.fhir.rest.api; - -/* - * #%L - * HAPI FHIR - Core Library - * %% - * Copyright (C) 2014 - 2019 University Health Network - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.HashMap; - -/** - * Represents values for "return" value as provided in the the HTTP Prefer header. - */ -public enum PreferReturnEnum { - - REPRESENTATION("representation"), MINIMAL("minimal"), OPERATION_OUTCOME("OperationOutcome"); - - private String myHeaderValue; - private static HashMap ourValues; - - private PreferReturnEnum(String theHeaderValue) { - myHeaderValue = theHeaderValue; - } - - public static PreferReturnEnum fromHeaderValue(String theHeaderValue) { - if (ourValues == null) { - HashMap values = new HashMap(); - for (PreferReturnEnum next : PreferReturnEnum.values()) { - values.put(next.getHeaderValue(), next); - } - ourValues = values; - } - return ourValues.get(theHeaderValue); - } - - public String getHeaderValue() { - return myHeaderValue; - } - -} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICreateTyped.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICreateTyped.java index ea53ca9e005..a126d94e912 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICreateTyped.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICreateTyped.java @@ -21,7 +21,7 @@ package ca.uhn.fhir.rest.gclient; */ import ca.uhn.fhir.rest.api.MethodOutcome; -import ca.uhn.fhir.rest.api.PreferReturnEnum; +import ca.uhn.fhir.rest.api.PreferHeader; public interface ICreateTyped extends IClientExecutable { @@ -47,6 +47,6 @@ public interface ICreateTyped extends IClientExecutable{ @@ -32,6 +32,6 @@ public interface IPatchExecutable extends IClientExecutable{ @@ -32,6 +32,6 @@ public interface IUpdateExecutable extends IClientExecutable commaSeparatedListToCleanSet(String theValueAsString) { + Set resourceTypes; + resourceTypes = Arrays.stream(split(theValueAsString, ",")) + .map(t->trim(t)) + .filter(t->isNotBlank(t)) + .collect(Collectors.toSet()); + return resourceTypes; + } +} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/StopWatch.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/StopWatch.java index 419fce6fea6..a3fc0c475d5 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/StopWatch.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/StopWatch.java @@ -63,6 +63,9 @@ public class StopWatch { myStarted = theStart.getTime(); } + public StopWatch(long theL) { + } + private void addNewlineIfContentExists(StringBuilder theB) { if (theB.length() > 0) { theB.append("\n"); @@ -231,7 +234,12 @@ public class StopWatch { double denominator = ((double) millisElapsed) / ((double) periodMillis); - return (double) theNumOperations / denominator; + double throughput = (double) theNumOperations / denominator; + if (throughput > theNumOperations) { + throughput = theNumOperations; + } + + return throughput; } public void restart() { diff --git a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml index 5715d52ddca..c360b7279ee 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml @@ -5,9 +5,9 @@ if you are using this file as a basis for your own project. --> ca.uhn.hapi.fhir - hapi-fhir-cli + hapi-deployable-pom 4.1.0-SNAPSHOT - ../pom.xml + ../../hapi-deployable-pom hapi-fhir-cli-jpaserver @@ -131,6 +131,10 @@ Saxon-HE net.sf.saxon + + org.glassfish.jaxb + jaxb-core + @@ -183,15 +187,6 @@ - - - org.apache.maven.plugins - maven-deploy-plugin - - true - - - org.apache.maven.plugins diff --git a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/CommonConfig.java b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/CommonConfig.java index a2ceab17980..9f928a55e1d 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/CommonConfig.java +++ b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/CommonConfig.java @@ -1,5 +1,25 @@ package ca.uhn.fhir.jpa.demo; +/*- + * #%L + * HAPI FHIR - Command Line Client - Server WAR + * %% + * Copyright (C) 2014 - 2019 University Health Network + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import ca.uhn.fhir.jpa.dao.DaoConfig; import ca.uhn.fhir.jpa.model.entity.ModelConfig; import ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory; diff --git a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/ContextHolder.java b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/ContextHolder.java index 0f051150151..ac49c36ca9d 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/ContextHolder.java +++ b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/ContextHolder.java @@ -1,5 +1,25 @@ package ca.uhn.fhir.jpa.demo; +/*- + * #%L + * HAPI FHIR - Command Line Client - Server WAR + * %% + * Copyright (C) 2014 - 2019 University Health Network + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.dao.DaoConfig; import org.apache.commons.cli.ParseException; diff --git a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/FhirServerConfig.java b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/FhirServerConfig.java index 25b109aa52a..3c6e817faa2 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/FhirServerConfig.java +++ b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/FhirServerConfig.java @@ -1,5 +1,25 @@ package ca.uhn.fhir.jpa.demo; +/*- + * #%L + * HAPI FHIR - Command Line Client - Server WAR + * %% + * Copyright (C) 2014 - 2019 University Health Network + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import ca.uhn.fhir.jpa.config.BaseJavaConfigDstu2; import ca.uhn.fhir.jpa.dao.DaoConfig; import ca.uhn.fhir.jpa.util.SubscriptionsRequireManualActivationInterceptorDstu2; diff --git a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/FhirServerConfigDstu3.java b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/FhirServerConfigDstu3.java index aa3071c6302..513b151d8c4 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/FhirServerConfigDstu3.java +++ b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/FhirServerConfigDstu3.java @@ -1,5 +1,25 @@ package ca.uhn.fhir.jpa.demo; +/*- + * #%L + * HAPI FHIR - Command Line Client - Server WAR + * %% + * Copyright (C) 2014 - 2019 University Health Network + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import ca.uhn.fhir.jpa.config.BaseJavaConfigDstu3; import ca.uhn.fhir.jpa.dao.DaoConfig; import ca.uhn.fhir.jpa.model.entity.ModelConfig; diff --git a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/FhirServerConfigR4.java b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/FhirServerConfigR4.java index e9f42c2f03b..a16f46e335c 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/FhirServerConfigR4.java +++ b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/FhirServerConfigR4.java @@ -1,5 +1,25 @@ package ca.uhn.fhir.jpa.demo; +/*- + * #%L + * HAPI FHIR - Command Line Client - Server WAR + * %% + * Copyright (C) 2014 - 2019 University Health Network + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import ca.uhn.fhir.jpa.config.BaseJavaConfigR4; import ca.uhn.fhir.jpa.dao.DaoConfig; import ca.uhn.fhir.jpa.util.SubscriptionsRequireManualActivationInterceptorR4; diff --git a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/FhirTesterConfig.java b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/FhirTesterConfig.java index 87b599cdb2e..9d4016113a2 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/FhirTesterConfig.java +++ b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/FhirTesterConfig.java @@ -1,5 +1,25 @@ package ca.uhn.fhir.jpa.demo; +/*- + * #%L + * HAPI FHIR - Command Line Client - Server WAR + * %% + * Copyright (C) 2014 - 2019 University Health Network + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; diff --git a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/JpaServerDemo.java b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/JpaServerDemo.java index 96a42a220ff..2fce553f18b 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/JpaServerDemo.java +++ b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/JpaServerDemo.java @@ -1,5 +1,25 @@ package ca.uhn.fhir.jpa.demo; +/*- + * #%L + * HAPI FHIR - Command Line Client - Server WAR + * %% + * Copyright (C) 2014 - 2019 University Health Network + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; diff --git a/hapi-fhir-client-okhttp/src/test/java/ca/uhn/fhir/okhttp/GenericOkHttpClientDstu2Test.java b/hapi-fhir-client-okhttp/src/test/java/ca/uhn/fhir/okhttp/GenericOkHttpClientDstu2Test.java index 1c460b63a1a..54561e7527d 100644 --- a/hapi-fhir-client-okhttp/src/test/java/ca/uhn/fhir/okhttp/GenericOkHttpClientDstu2Test.java +++ b/hapi-fhir-client-okhttp/src/test/java/ca/uhn/fhir/okhttp/GenericOkHttpClientDstu2Test.java @@ -364,11 +364,11 @@ public class GenericOkHttpClientDstu2Test { Patient p = new Patient(); p.addName().addFamily("FOOFAMILY"); - client.create().resource(p).prefer(PreferReturnEnum.MINIMAL).execute(); + client.create().resource(p).prefer(PreferHeader.PreferReturnEnum.MINIMAL).execute(); assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size()); assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_MINIMAL, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue()); - client.create().resource(p).prefer(PreferReturnEnum.REPRESENTATION).execute(); + client.create().resource(p).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size()); assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_REPRESENTATION, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue()); } @@ -1735,11 +1735,11 @@ public class GenericOkHttpClientDstu2Test { p.setId(new IdDt("1")); p.addName().addFamily("FOOFAMILY"); - client.update().resource(p).prefer(PreferReturnEnum.MINIMAL).execute(); + client.update().resource(p).prefer(PreferHeader.PreferReturnEnum.MINIMAL).execute(); assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size()); assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_MINIMAL, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue()); - client.update().resource(p).prefer(PreferReturnEnum.REPRESENTATION).execute(); + client.update().resource(p).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size()); assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_REPRESENTATION, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue()); } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ResourceEntity.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ResourceEntity.java new file mode 100644 index 00000000000..31b6803cbae --- /dev/null +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ResourceEntity.java @@ -0,0 +1,20 @@ +package ca.uhn.fhir.rest.client.apache; + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.rest.api.Constants; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.hl7.fhir.instance.model.api.IBaseResource; + +import java.nio.charset.UnsupportedCharsetException; + +/** + * Apache HttpClient request content entity where the body is a FHIR resource, that will + * be encoded as JSON by default + */ +public class ResourceEntity extends StringEntity { + + public ResourceEntity(FhirContext theContext, IBaseResource theResource) throws UnsupportedCharsetException { + super(theContext.newJsonParser().encodeResourceToString(theResource), ContentType.parse(Constants.CT_FHIR_JSON_NEW)); + } +} diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/GenericClient.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/GenericClient.java index 7838055f2c5..bb3a22a09f6 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/GenericClient.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/GenericClient.java @@ -533,7 +533,7 @@ public class GenericClient extends BaseClient implements IGenericClient { private class CreateInternal extends BaseSearch implements ICreate, ICreateTyped, ICreateWithQuery, ICreateWithQueryTyped { private boolean myConditional; - private PreferReturnEnum myPrefer; + private PreferHeader.PreferReturnEnum myPrefer; private IBaseResource myResource; private String myResourceBody; private String mySearchUrl; @@ -580,7 +580,7 @@ public class GenericClient extends BaseClient implements IGenericClient { } @Override - public ICreateTyped prefer(PreferReturnEnum theReturn) { + public ICreateTyped prefer(PreferHeader.PreferReturnEnum theReturn) { myPrefer = theReturn; return this; } @@ -1380,13 +1380,13 @@ public class GenericClient extends BaseClient implements IGenericClient { } private final class OutcomeResponseHandler implements IClientResponseHandler { - private PreferReturnEnum myPrefer; + private PreferHeader.PreferReturnEnum myPrefer; private OutcomeResponseHandler() { super(); } - private OutcomeResponseHandler(PreferReturnEnum thePrefer) { + private OutcomeResponseHandler(PreferHeader.PreferReturnEnum thePrefer) { this(); myPrefer = thePrefer; } @@ -1396,7 +1396,7 @@ public class GenericClient extends BaseClient implements IGenericClient { MethodOutcome response = MethodUtil.process2xxResponse(myContext, theResponseStatusCode, theResponseMimeType, theResponseInputStream, theHeaders); response.setCreatedUsingStatusCode(theResponseStatusCode); - if (myPrefer == PreferReturnEnum.REPRESENTATION) { + if (myPrefer == PreferHeader.PreferReturnEnum.REPRESENTATION) { if (response.getResource() == null) { if (response.getId() != null && isNotBlank(response.getId().getValue()) && response.getId().hasBaseUrl()) { ourLog.info("Server did not return resource for Prefer-representation, going to fetch: {}", response.getId().getValue()); @@ -1418,7 +1418,7 @@ public class GenericClient extends BaseClient implements IGenericClient { private IIdType myId; private String myPatchBody; private PatchTypeEnum myPatchType; - private PreferReturnEnum myPrefer; + private PreferHeader.PreferReturnEnum myPrefer; private String myResourceType; private String mySearchUrl; @@ -1476,7 +1476,7 @@ public class GenericClient extends BaseClient implements IGenericClient { } @Override - public IPatchExecutable prefer(PreferReturnEnum theReturn) { + public IPatchExecutable prefer(PreferHeader.PreferReturnEnum theReturn) { myPrefer = theReturn; return this; } @@ -2048,7 +2048,7 @@ public class GenericClient extends BaseClient implements IGenericClient { private boolean myConditional; private IIdType myId; - private PreferReturnEnum myPrefer; + private PreferHeader.PreferReturnEnum myPrefer; private IBaseResource myResource; private String myResourceBody; private String mySearchUrl; @@ -2102,7 +2102,7 @@ public class GenericClient extends BaseClient implements IGenericClient { } @Override - public IUpdateExecutable prefer(PreferReturnEnum theReturn) { + public IUpdateExecutable prefer(PreferHeader.PreferReturnEnum theReturn) { myPrefer = theReturn; return this; } @@ -2282,7 +2282,7 @@ public class GenericClient extends BaseClient implements IGenericClient { params.get(parameterName).add(parameterValue); } - private static void addPreferHeader(PreferReturnEnum thePrefer, BaseHttpClientInvocation theInvocation) { + private static void addPreferHeader(PreferHeader.PreferReturnEnum thePrefer, BaseHttpClientInvocation theInvocation) { if (thePrefer != null) { theInvocation.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RETURN + '=' + thePrefer.getHeaderValue()); } diff --git a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsPageProvider.java b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsPageProvider.java index 131187d4197..64f0cc7d236 100644 --- a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsPageProvider.java +++ b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsPageProvider.java @@ -112,8 +112,8 @@ public abstract class AbstractJaxRsPageProvider extends AbstractJaxRsProvider im } @Override - public PreferReturnEnum getDefaultPreferReturn() { - return PreferReturnEnum.REPRESENTATION; + public PreferHeader.PreferReturnEnum getDefaultPreferReturn() { + return PreferHeader.PreferReturnEnum.REPRESENTATION; } } diff --git a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsResourceProvider.java b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsResourceProvider.java index 3a8aac982a9..0c3d2a6544e 100644 --- a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsResourceProvider.java +++ b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsResourceProvider.java @@ -28,7 +28,6 @@ import javax.ws.rs.*; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; -import ca.uhn.fhir.interceptor.api.IInterceptorService; import org.hl7.fhir.instance.model.api.IBaseResource; import ca.uhn.fhir.context.FhirContext; @@ -371,8 +370,8 @@ implements IRestfulServer, IResourceProvider { } @Override - public PreferReturnEnum getDefaultPreferReturn() { - return PreferReturnEnum.REPRESENTATION; + public PreferHeader.PreferReturnEnum getDefaultPreferReturn() { + return PreferHeader.PreferReturnEnum.REPRESENTATION; } /** diff --git a/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/client/GenericJaxRsClientDstu2Test.java b/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/client/GenericJaxRsClientDstu2Test.java index a91c7d798e4..d2f3d7137ae 100644 --- a/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/client/GenericJaxRsClientDstu2Test.java +++ b/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/client/GenericJaxRsClientDstu2Test.java @@ -301,12 +301,12 @@ public class GenericJaxRsClientDstu2Test { Patient p = new Patient(); p.addName().addFamily("FOOFAMILY"); - client.create().resource(p).prefer(PreferReturnEnum.MINIMAL).execute(); + client.create().resource(p).prefer(PreferHeader.PreferReturnEnum.MINIMAL).execute(); assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size()); assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_MINIMAL, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue()); - client.create().resource(p).prefer(PreferReturnEnum.REPRESENTATION).execute(); + client.create().resource(p).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size()); assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_REPRESENTATION, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue()); @@ -1927,12 +1927,12 @@ public class GenericJaxRsClientDstu2Test { p.setId(new IdDt("1")); p.addName().addFamily("FOOFAMILY"); - client.update().resource(p).prefer(PreferReturnEnum.MINIMAL).execute(); + client.update().resource(p).prefer(PreferHeader.PreferReturnEnum.MINIMAL).execute(); assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size()); assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_MINIMAL, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue()); - client.update().resource(p).prefer(PreferReturnEnum.REPRESENTATION).execute(); + client.update().resource(p).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size()); assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_REPRESENTATION, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue()); diff --git a/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/client/GenericJaxRsClientDstu3Test.java b/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/client/GenericJaxRsClientDstu3Test.java index 2a6a2733ea9..2a704277757 100644 --- a/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/client/GenericJaxRsClientDstu3Test.java +++ b/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/client/GenericJaxRsClientDstu3Test.java @@ -321,12 +321,12 @@ public class GenericJaxRsClientDstu3Test { Patient p = new Patient(); p.addName().setFamily("FOOFAMILY"); - client.create().resource(p).prefer(PreferReturnEnum.MINIMAL).execute(); + client.create().resource(p).prefer(PreferHeader.PreferReturnEnum.MINIMAL).execute(); assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size()); assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_MINIMAL, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue()); - client.create().resource(p).prefer(PreferReturnEnum.REPRESENTATION).execute(); + client.create().resource(p).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size()); assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_REPRESENTATION, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue()); @@ -1980,12 +1980,12 @@ public class GenericJaxRsClientDstu3Test { p.setId(new IdType("1")); p.addName().setFamily("FOOFAMILY"); - client.update().resource(p).prefer(PreferReturnEnum.MINIMAL).execute(); + client.update().resource(p).prefer(PreferHeader.PreferReturnEnum.MINIMAL).execute(); assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size()); assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_MINIMAL, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue()); - client.update().resource(p).prefer(PreferReturnEnum.REPRESENTATION).execute(); + client.update().resource(p).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); assertEquals(1, ourRequestHeaders.get(Constants.HEADER_PREFER).size()); assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_REPRESENTATION, ourRequestHeaders.get(Constants.HEADER_PREFER).get(0).getValue()); diff --git a/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsResourceProviderDstu3Test.java b/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsResourceProviderDstu3Test.java index 2749badea8b..345231b2427 100644 --- a/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsResourceProviderDstu3Test.java +++ b/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsResourceProviderDstu3Test.java @@ -7,10 +7,7 @@ import ca.uhn.fhir.jaxrs.server.test.TestJaxRsConformanceRestProviderDstu3; import ca.uhn.fhir.jaxrs.server.test.TestJaxRsMockPageProviderDstu3; import ca.uhn.fhir.jaxrs.server.test.TestJaxRsMockPatientRestProviderDstu3; import ca.uhn.fhir.model.primitive.UriDt; -import ca.uhn.fhir.rest.api.EncodingEnum; -import ca.uhn.fhir.rest.api.MethodOutcome; -import ca.uhn.fhir.rest.api.PreferReturnEnum; -import ca.uhn.fhir.rest.api.SearchStyleEnum; +import ca.uhn.fhir.rest.api.*; import ca.uhn.fhir.rest.client.api.IGenericClient; import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum; import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor; @@ -138,7 +135,7 @@ public class AbstractJaxRsResourceProviderDstu3Test { client.setEncoding(EncodingEnum.JSON); MethodOutcome response = client.create().resource(toCreate).conditional() - .where(Patient.IDENTIFIER.exactly().identifier("2")).prefer(PreferReturnEnum.REPRESENTATION).execute(); + .where(Patient.IDENTIFIER.exactly().identifier("2")).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); assertEquals("myIdentifier", patientCaptor.getValue().getIdentifier().get(0).getValue()); IBaseResource resource = response.getResource(); @@ -161,7 +158,7 @@ public class AbstractJaxRsResourceProviderDstu3Test { when(mock.create(patientCaptor.capture(), isNull(String.class))).thenReturn(outcome); client.setEncoding(EncodingEnum.JSON); - final MethodOutcome response = client.create().resource(toCreate).prefer(PreferReturnEnum.REPRESENTATION) + final MethodOutcome response = client.create().resource(toCreate).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION) .execute(); IBaseResource resource = response.getResource(); compareResultId(1, resource); diff --git a/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsResourceProviderTest.java b/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsResourceProviderTest.java index 09a3f051dd0..75c5331038d 100644 --- a/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsResourceProviderTest.java +++ b/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsResourceProviderTest.java @@ -13,10 +13,7 @@ import ca.uhn.fhir.model.primitive.DateDt; import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.primitive.StringDt; import ca.uhn.fhir.model.primitive.UriDt; -import ca.uhn.fhir.rest.api.EncodingEnum; -import ca.uhn.fhir.rest.api.MethodOutcome; -import ca.uhn.fhir.rest.api.PreferReturnEnum; -import ca.uhn.fhir.rest.api.SearchStyleEnum; +import ca.uhn.fhir.rest.api.*; import ca.uhn.fhir.rest.client.api.IGenericClient; import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum; import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor; @@ -134,7 +131,7 @@ public class AbstractJaxRsResourceProviderTest { client.setEncoding(EncodingEnum.JSON); MethodOutcome response = client.create().resource(toCreate).conditional() - .where(Patient.IDENTIFIER.exactly().identifier("2")).prefer(PreferReturnEnum.REPRESENTATION).execute(); + .where(Patient.IDENTIFIER.exactly().identifier("2")).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); assertEquals("myIdentifier", patientCaptor.getValue().getIdentifierFirstRep().getValue()); IResource resource = (IResource) response.getResource(); @@ -157,7 +154,7 @@ public class AbstractJaxRsResourceProviderTest { when(mock.create(patientCaptor.capture(), isNull(String.class))).thenReturn(outcome); client.setEncoding(EncodingEnum.JSON); - final MethodOutcome response = client.create().resource(toCreate).prefer(PreferReturnEnum.REPRESENTATION) + final MethodOutcome response = client.create().resource(toCreate).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION) .execute(); IResource resource = (IResource) response.getResource(); compareResultId(1, resource); diff --git a/hapi-fhir-jaxrsserver-example/src/test/java/ca/uhn/fhir/jaxrs/server/example/JaxRsPatientProviderDstu3Test.java b/hapi-fhir-jaxrsserver-example/src/test/java/ca/uhn/fhir/jaxrs/server/example/JaxRsPatientProviderDstu3Test.java index 9c3db5e1244..a475778725e 100644 --- a/hapi-fhir-jaxrsserver-example/src/test/java/ca/uhn/fhir/jaxrs/server/example/JaxRsPatientProviderDstu3Test.java +++ b/hapi-fhir-jaxrsserver-example/src/test/java/ca/uhn/fhir/jaxrs/server/example/JaxRsPatientProviderDstu3Test.java @@ -138,7 +138,7 @@ public class JaxRsPatientProviderDstu3Test { existing.setId((IdType) null); existing.getName().add(new HumanName().setFamily("Created Patient 54")); client.setEncoding(EncodingEnum.JSON); - final MethodOutcome results = client.create().resource(existing).prefer(PreferReturnEnum.REPRESENTATION).execute(); + final MethodOutcome results = client.create().resource(existing).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); System.out.println(results.getId()); final Patient patient = (Patient) results.getResource(); System.out.println(patient); @@ -154,7 +154,7 @@ public class JaxRsPatientProviderDstu3Test { existing.setId((IdType) null); existing.getName().add(new HumanName().setFamily("Created Patient 54")); client.setEncoding(EncodingEnum.XML); - final MethodOutcome results = client.create().resource(existing).prefer(PreferReturnEnum.REPRESENTATION).execute(); + final MethodOutcome results = client.create().resource(existing).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); System.out.println(results.getId()); final Patient patient = (Patient) results.getResource(); @@ -187,7 +187,7 @@ public class JaxRsPatientProviderDstu3Test { public void testDeletePatient() { final Patient existing = new Patient(); existing.getName().add(new HumanName().setFamily("Created Patient XYZ")); - final MethodOutcome results = client.create().resource(existing).prefer(PreferReturnEnum.REPRESENTATION).execute(); + final MethodOutcome results = client.create().resource(existing).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); System.out.println(results.getId()); final Patient patient = (Patient) results.getResource(); client.delete().resourceById(patient.getIdElement()).execute(); diff --git a/hapi-fhir-jaxrsserver-example/src/test/java/ca/uhn/fhir/jaxrs/server/example/JaxRsPatientProviderR4Test.java b/hapi-fhir-jaxrsserver-example/src/test/java/ca/uhn/fhir/jaxrs/server/example/JaxRsPatientProviderR4Test.java index 9fdae9b45b9..761cd388b19 100644 --- a/hapi-fhir-jaxrsserver-example/src/test/java/ca/uhn/fhir/jaxrs/server/example/JaxRsPatientProviderR4Test.java +++ b/hapi-fhir-jaxrsserver-example/src/test/java/ca/uhn/fhir/jaxrs/server/example/JaxRsPatientProviderR4Test.java @@ -7,7 +7,7 @@ import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.valueset.BundleEntryTransactionMethodEnum; import ca.uhn.fhir.rest.api.EncodingEnum; import ca.uhn.fhir.rest.api.MethodOutcome; -import ca.uhn.fhir.rest.api.PreferReturnEnum; +import ca.uhn.fhir.rest.api.PreferHeader; import ca.uhn.fhir.rest.api.SearchStyleEnum; import ca.uhn.fhir.rest.client.api.IGenericClient; import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum; @@ -152,7 +152,7 @@ public class JaxRsPatientProviderR4Test { existing.setId((IdDt) null); existing.getNameFirstRep().setFamily("Created Patient 54"); client.setEncoding(EncodingEnum.JSON); - final MethodOutcome results = client.create().resource(existing).prefer(PreferReturnEnum.REPRESENTATION).execute(); + final MethodOutcome results = client.create().resource(existing).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); System.out.println(results.getId()); final Patient patient = (Patient) results.getResource(); System.out.println(patient); @@ -167,7 +167,7 @@ public class JaxRsPatientProviderR4Test { existing.setId((IdDt) null); existing.getNameFirstRep().setFamily("Created Patient 54"); client.setEncoding(EncodingEnum.XML); - final MethodOutcome results = client.create().resource(existing).prefer(PreferReturnEnum.REPRESENTATION).execute(); + final MethodOutcome results = client.create().resource(existing).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); System.out.println(results.getId()); final Patient patient = (Patient) results.getResource(); @@ -199,7 +199,7 @@ public class JaxRsPatientProviderR4Test { public void testDeletePatient() { final Patient existing = new Patient(); existing.getNameFirstRep().setFamily("Created Patient XYZ"); - final MethodOutcome results = client.create().resource(existing).prefer(PreferReturnEnum.REPRESENTATION).execute(); + final MethodOutcome results = client.create().resource(existing).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); System.out.println(results.getId()); final Patient patient = (Patient) results.getResource(); client.delete().resource(patient).execute(); diff --git a/hapi-fhir-jaxrsserver-example/src/test/java/ca/uhn/fhir/jaxrs/server/example/JaxRsPatientProviderTest.java b/hapi-fhir-jaxrsserver-example/src/test/java/ca/uhn/fhir/jaxrs/server/example/JaxRsPatientProviderTest.java index 215ba2bf300..71803f9756f 100644 --- a/hapi-fhir-jaxrsserver-example/src/test/java/ca/uhn/fhir/jaxrs/server/example/JaxRsPatientProviderTest.java +++ b/hapi-fhir-jaxrsserver-example/src/test/java/ca/uhn/fhir/jaxrs/server/example/JaxRsPatientProviderTest.java @@ -149,7 +149,7 @@ public class JaxRsPatientProviderTest { existing.setId((IdDt) null); existing.getNameFirstRep().addFamily("Created Patient 54"); client.setEncoding(EncodingEnum.JSON); - final MethodOutcome results = client.create().resource(existing).prefer(PreferReturnEnum.REPRESENTATION).execute(); + final MethodOutcome results = client.create().resource(existing).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); System.out.println(results.getId()); final Patient patient = (Patient) results.getResource(); System.out.println(patient); @@ -164,7 +164,7 @@ public class JaxRsPatientProviderTest { existing.setId((IdDt) null); existing.getNameFirstRep().addFamily("Created Patient 54"); client.setEncoding(EncodingEnum.XML); - final MethodOutcome results = client.create().resource(existing).prefer(PreferReturnEnum.REPRESENTATION).execute(); + final MethodOutcome results = client.create().resource(existing).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); System.out.println(results.getId()); final Patient patient = (Patient) results.getResource(); @@ -196,7 +196,7 @@ public class JaxRsPatientProviderTest { public void testDeletePatient() { final Patient existing = new Patient(); existing.getNameFirstRep().addFamily("Created Patient XYZ"); - final MethodOutcome results = client.create().resource(existing).prefer(PreferReturnEnum.REPRESENTATION).execute(); + final MethodOutcome results = client.create().resource(existing).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); System.out.println(results.getId()); final Patient patient = (Patient) results.getResource(); client.delete().resourceById(patient.getId()).execute(); diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml index 951c0cbd193..11ab97ac90f 100644 --- a/hapi-fhir-jpaserver-base/pom.xml +++ b/hapi-fhir-jpaserver-base/pom.xml @@ -309,6 +309,11 @@ provided + + org.quartz-scheduler + quartz + + @@ -700,7 +705,7 @@ ca.uhn.fhir.jpa.config ca.uhn.fhir.jpa.rp.dstu2 hapi-fhir-server-resourceproviders-dstu2.xml - + diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProvider.java new file mode 100644 index 00000000000..42052861acd --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProvider.java @@ -0,0 +1,158 @@ +package ca.uhn.fhir.jpa.bulk; + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.jpa.model.util.JpaConstants; +import ca.uhn.fhir.jpa.util.JsonUtil; +import ca.uhn.fhir.rest.annotation.Operation; +import ca.uhn.fhir.rest.annotation.OperationParam; +import ca.uhn.fhir.rest.api.Constants; +import ca.uhn.fhir.rest.api.PreferHeader; +import ca.uhn.fhir.rest.server.RestfulServerUtils; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; +import ca.uhn.fhir.util.ArrayUtil; +import ca.uhn.fhir.util.OperationOutcomeUtil; +import com.google.common.annotations.VisibleForTesting; +import org.apache.commons.lang3.StringUtils; +import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; +import org.hl7.fhir.instance.model.api.IPrimitiveType; +import org.hl7.fhir.r4.model.InstantType; +import org.springframework.beans.factory.annotation.Autowired; + +import javax.servlet.http.HttpServletResponse; +import java.io.IOException; +import java.util.Date; +import java.util.Set; + +public class BulkDataExportProvider { + + @Autowired + private IBulkDataExportSvc myBulkDataExportSvc; + @Autowired + private FhirContext myFhirContext; + + @VisibleForTesting + public void setFhirContextForUnitTest(FhirContext theFhirContext) { + myFhirContext = theFhirContext; + } + + @VisibleForTesting + public void setBulkDataExportSvcForUnitTests(IBulkDataExportSvc theBulkDataExportSvc) { + myBulkDataExportSvc = theBulkDataExportSvc; + } + + /** + * $export + */ + @Operation(name = JpaConstants.OPERATION_EXPORT, global = false /* set to true once we can handle this */, manualResponse = true, idempotent = true) + public void export( + @OperationParam(name = JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, min = 0, max = 1, typeName = "string") IPrimitiveType theOutputFormat, + @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE, min = 0, max = 1, typeName = "string") IPrimitiveType theType, + @OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType theSince, + @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = 1, typeName = "string") IPrimitiveType theTypeFilter, + ServletRequestDetails theRequestDetails + ) { + + String preferHeader = theRequestDetails.getHeader(Constants.HEADER_PREFER); + PreferHeader prefer = RestfulServerUtils.parsePreferHeader(null, preferHeader); + if (prefer.getRespondAsync() == false) { + throw new InvalidRequestException("Must request async processing for $export"); + } + + String outputFormat = theOutputFormat != null ? theOutputFormat.getValueAsString() : null; + + Set resourceTypes = null; + if (theType != null) { + resourceTypes = ArrayUtil.commaSeparatedListToCleanSet(theType.getValueAsString()); + } + + Date since = null; + if (theSince != null) { + since = theSince.getValue(); + } + + Set filters = null; + if (theTypeFilter != null) { + filters = ArrayUtil.commaSeparatedListToCleanSet(theTypeFilter.getValueAsString()); + } + + IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(outputFormat, resourceTypes, since, filters); + + String serverBase = getServerBase(theRequestDetails); + String pollLocation = serverBase + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + outcome.getJobId(); + + HttpServletResponse response = theRequestDetails.getServletResponse(); + + // Add standard headers + theRequestDetails.getServer().addHeadersToResponse(response); + + // Successful 202 Accepted + response.addHeader(Constants.HEADER_CONTENT_LOCATION, pollLocation); + response.setStatus(Constants.STATUS_HTTP_202_ACCEPTED); + } + + /** + * $export-poll-status + */ + @Operation(name = JpaConstants.OPERATION_EXPORT_POLL_STATUS, manualResponse = true, idempotent = true) + public void exportPollStatus( + @OperationParam(name = JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID, typeName = "string", min = 0, max = 1) IPrimitiveType theJobId, + ServletRequestDetails theRequestDetails + ) throws IOException { + + HttpServletResponse response = theRequestDetails.getServletResponse(); + theRequestDetails.getServer().addHeadersToResponse(response); + + IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobStatusOrThrowResourceNotFound(theJobId.getValueAsString()); + + switch (status.getStatus()) { + case SUBMITTED: + case BUILDING: + + response.setStatus(Constants.STATUS_HTTP_202_ACCEPTED); + response.addHeader(Constants.HEADER_X_PROGRESS, "Build in progress - Status set to " + status.getStatus() + " at " + new InstantType(status.getStatusTime()).getValueAsString()); + response.addHeader(Constants.HEADER_RETRY_AFTER, "120"); + break; + + case COMPLETE: + + response.setStatus(Constants.STATUS_HTTP_200_OK); + response.setContentType(Constants.CT_JSON); + + // Create a JSON response + BulkExportResponseJson bulkResponseDocument = new BulkExportResponseJson(); + bulkResponseDocument.setTransactionTime(status.getStatusTime()); + bulkResponseDocument.setRequest(status.getRequest()); + for (IBulkDataExportSvc.FileEntry nextFile : status.getFiles()) { + String serverBase = getServerBase(theRequestDetails); + String nextUrl = serverBase + "/" + nextFile.getResourceId().toUnqualifiedVersionless().getValue(); + bulkResponseDocument + .addOutput() + .setType(nextFile.getResourceType()) + .setUrl(nextUrl); + } + JsonUtil.serialize(bulkResponseDocument, response.getWriter()); + response.getWriter().close(); + break; + + case ERROR: + + response.setStatus(Constants.STATUS_HTTP_500_INTERNAL_ERROR); + response.setContentType(Constants.CT_FHIR_JSON); + + // Create an OperationOutcome response + IBaseOperationOutcome oo = OperationOutcomeUtil.newInstance(myFhirContext); + OperationOutcomeUtil.addIssue(myFhirContext, oo, "error", status.getStatusMessage(), null, null); + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToWriter(oo, response.getWriter()); + response.getWriter().close(); + + } + + + } + + private String getServerBase(ServletRequestDetails theRequestDetails) { + return StringUtils.removeEnd(theRequestDetails.getServerBaseForRequest(), "/"); + } + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImpl.java new file mode 100644 index 00000000000..e6c7c77371a --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImpl.java @@ -0,0 +1,447 @@ +package ca.uhn.fhir.jpa.bulk; + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.jpa.dao.DaoRegistry; +import ca.uhn.fhir.jpa.dao.IFhirResourceDao; +import ca.uhn.fhir.jpa.dao.IResultIterator; +import ca.uhn.fhir.jpa.dao.ISearchBuilder; +import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao; +import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao; +import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao; +import ca.uhn.fhir.jpa.entity.BulkExportCollectionEntity; +import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity; +import ca.uhn.fhir.jpa.entity.BulkExportJobEntity; +import ca.uhn.fhir.jpa.model.sched.FireAtIntervalJob; +import ca.uhn.fhir.jpa.model.sched.ISchedulerService; +import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; +import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; +import ca.uhn.fhir.jpa.model.util.JpaConstants; +import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; +import ca.uhn.fhir.jpa.util.ExpungeOptions; +import ca.uhn.fhir.parser.IParser; +import ca.uhn.fhir.rest.api.Constants; +import ca.uhn.fhir.rest.param.DateRangeParam; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; +import ca.uhn.fhir.util.BinaryUtil; +import ca.uhn.fhir.util.StopWatch; +import com.google.common.collect.Sets; +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang3.time.DateUtils; +import org.hl7.fhir.instance.model.api.IBaseBinary; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; +import org.hl7.fhir.r4.model.InstantType; +import org.quartz.DisallowConcurrentExecution; +import org.quartz.JobExecutionContext; +import org.quartz.PersistJobDataAfterExecution; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.support.TransactionTemplate; + +import javax.annotation.PostConstruct; +import javax.transaction.Transactional; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.util.*; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; + +import static ca.uhn.fhir.util.UrlUtil.escapeUrlParam; +import static org.apache.commons.lang3.StringUtils.isNotBlank; + +public class BulkDataExportSvcImpl implements IBulkDataExportSvc { + + private static final long REFRESH_INTERVAL = 10 * DateUtils.MILLIS_PER_SECOND; + private static final Logger ourLog = LoggerFactory.getLogger(BulkDataExportSvcImpl.class); + private int myReuseBulkExportForMillis = (int) (60 * DateUtils.MILLIS_PER_MINUTE); + + @Autowired + private IBulkExportJobDao myBulkExportJobDao; + @Autowired + private IBulkExportCollectionDao myBulkExportCollectionDao; + @Autowired + private IBulkExportCollectionFileDao myBulkExportCollectionFileDao; + @Autowired + private ISchedulerService mySchedulerService; + @Autowired + private DaoRegistry myDaoRegistry; + @Autowired + private FhirContext myContext; + @Autowired + private PlatformTransactionManager myTxManager; + private TransactionTemplate myTxTemplate; + + private long myFileMaxChars = 500 * FileUtils.ONE_KB; + private int myRetentionPeriod = (int) DateUtils.MILLIS_PER_DAY; + + /** + * This method is called by the scheduler to run a pass of the + * generator + */ + @Transactional(value = Transactional.TxType.NEVER) + @Override + public synchronized void buildExportFiles() { + + Optional jobToProcessOpt = myTxTemplate.execute(t -> { + Pageable page = PageRequest.of(0, 1); + Slice submittedJobs = myBulkExportJobDao.findByStatus(page, BulkJobStatusEnum.SUBMITTED); + if (submittedJobs.isEmpty()) { + return Optional.empty(); + } + return Optional.of(submittedJobs.getContent().get(0)); + }); + + if (!jobToProcessOpt.isPresent()) { + return; + } + + String jobUuid = jobToProcessOpt.get().getJobId(); + + try { + myTxTemplate.execute(t -> { + processJob(jobUuid); + return null; + }); + } catch (Exception e) { + ourLog.error("Failure while preparing bulk export extract", e); + myTxTemplate.execute(t -> { + Optional submittedJobs = myBulkExportJobDao.findByJobId(jobUuid); + if (submittedJobs.isPresent()) { + BulkExportJobEntity jobEntity = submittedJobs.get(); + jobEntity.setStatus(BulkJobStatusEnum.ERROR); + jobEntity.setStatusMessage(e.getMessage()); + myBulkExportJobDao.save(jobEntity); + } + return null; + }); + } + + } + + + /** + * This method is called by the scheduler to run a pass of the + * generator + */ + @Transactional(value = Transactional.TxType.NEVER) + @Override + public void purgeExpiredFiles() { + Optional jobToDelete = myTxTemplate.execute(t -> { + Pageable page = PageRequest.of(0, 1); + Slice submittedJobs = myBulkExportJobDao.findByExpiry(page, new Date()); + if (submittedJobs.isEmpty()) { + return Optional.empty(); + } + return Optional.of(submittedJobs.getContent().get(0)); + }); + + if (jobToDelete.isPresent()) { + + ourLog.info("Deleting bulk export job: {}", jobToDelete.get().getJobId()); + + myTxTemplate.execute(t -> { + + BulkExportJobEntity job = myBulkExportJobDao.getOne(jobToDelete.get().getId()); + for (BulkExportCollectionEntity nextCollection : job.getCollections()) { + for (BulkExportCollectionFileEntity nextFile : nextCollection.getFiles()) { + + ourLog.info("Purging bulk data file: {}", nextFile.getResourceId()); + getBinaryDao().delete(toId(nextFile.getResourceId())); + getBinaryDao().forceExpungeInExistingTransaction(toId(nextFile.getResourceId()), new ExpungeOptions().setExpungeDeletedResources(true).setExpungeOldVersions(true), null); + myBulkExportCollectionFileDao.delete(nextFile); + + } + + myBulkExportCollectionDao.delete(nextCollection); + } + + myBulkExportJobDao.delete(job); + return null; + }); + + } + + } + + private void processJob(String theJobUuid) { + + Optional jobOpt = myBulkExportJobDao.findByJobId(theJobUuid); + if (!jobOpt.isPresent()) { + ourLog.info("Job appears to be deleted"); + return; + } + + StopWatch jobStopwatch = new StopWatch(); + AtomicInteger jobResourceCounter = new AtomicInteger(); + + BulkExportJobEntity job = jobOpt.get(); + ourLog.info("Bulk export starting generation for batch export job: {}", job); + + for (BulkExportCollectionEntity nextCollection : job.getCollections()) { + + String nextType = nextCollection.getResourceType(); + IFhirResourceDao dao = myDaoRegistry.getResourceDao(nextType); + + ourLog.info("Bulk export assembling export of type {} for job {}", nextType, theJobUuid); + + ISearchBuilder sb = dao.newSearchBuilder(); + Class nextTypeClass = myContext.getResourceDefinition(nextType).getImplementingClass(); + sb.setType(nextTypeClass, nextType); + + SearchParameterMap map = new SearchParameterMap(); + map.setLoadSynchronous(true); + if (job.getSince() != null) { + map.setLastUpdated(new DateRangeParam(job.getSince(), null)); + } + + IResultIterator resultIterator = sb.createQuery(map, new SearchRuntimeDetails(null, theJobUuid), null); + storeResultsToFiles(nextCollection, sb, resultIterator, jobResourceCounter, jobStopwatch); + + + } + + job.setStatus(BulkJobStatusEnum.COMPLETE); + updateExpiry(job); + myBulkExportJobDao.save(job); + + ourLog.info("Bulk export completed job in {}: {}", jobStopwatch, job); + + } + + private void storeResultsToFiles(BulkExportCollectionEntity theExportCollection, ISearchBuilder theSearchBuilder, IResultIterator theResultIterator, AtomicInteger theJobResourceCounter, StopWatch theJobStopwatch) { + + try (IResultIterator query = theResultIterator) { + if (!query.hasNext()) { + return; + } + + AtomicInteger fileCounter = new AtomicInteger(0); + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + OutputStreamWriter writer = new OutputStreamWriter(outputStream, Constants.CHARSET_UTF8); + IParser parser = myContext.newJsonParser().setPrettyPrint(false); + + List pidsSpool = new ArrayList<>(); + List resourcesSpool = new ArrayList<>(); + while (query.hasNext()) { + pidsSpool.add(query.next()); + fileCounter.incrementAndGet(); + theJobResourceCounter.incrementAndGet(); + + if (pidsSpool.size() >= 10 || !query.hasNext()) { + + theSearchBuilder.loadResourcesByPid(pidsSpool, Collections.emptyList(), resourcesSpool, false, null); + + for (IBaseResource nextFileResource : resourcesSpool) { + parser.encodeResourceToWriter(nextFileResource, writer); + writer.append("\n"); + } + + pidsSpool.clear(); + resourcesSpool.clear(); + + if (outputStream.size() >= myFileMaxChars || !query.hasNext()) { + Optional createdId = flushToFiles(theExportCollection, fileCounter, outputStream); + createdId.ifPresent(theIIdType -> ourLog.info("Created resource {} for bulk export file containing {} resources of type {} - Total {} resources ({}/sec)", theIIdType.toUnqualifiedVersionless().getValue(), fileCounter.get(), theExportCollection.getResourceType(), theJobResourceCounter.get(), theJobStopwatch.formatThroughput(theJobResourceCounter.get(), TimeUnit.SECONDS))); + fileCounter.set(0); + } + + } + } + + } catch (IOException e) { + throw new InternalErrorException(e); + } + } + + private Optional flushToFiles(BulkExportCollectionEntity theCollection, AtomicInteger theCounter, ByteArrayOutputStream theOutputStream) { + if (theOutputStream.size() > 0) { + IBaseBinary binary = BinaryUtil.newBinary(myContext); + binary.setContentType(Constants.CT_FHIR_NDJSON); + binary.setContent(theOutputStream.toByteArray()); + + IIdType createdId = getBinaryDao().create(binary).getResource().getIdElement(); + + BulkExportCollectionFileEntity file = new BulkExportCollectionFileEntity(); + theCollection.getFiles().add(file); + file.setCollection(theCollection); + file.setResource(createdId.getIdPart()); + myBulkExportCollectionFileDao.saveAndFlush(file); + theOutputStream.reset(); + + return Optional.of(createdId); + } + + return Optional.empty(); + } + + @SuppressWarnings("unchecked") + private IFhirResourceDao getBinaryDao() { + return myDaoRegistry.getResourceDao("Binary"); + } + + @PostConstruct + public void start() { + ourLog.info("Bulk export service starting with refresh interval {}", StopWatch.formatMillis(REFRESH_INTERVAL)); + myTxTemplate = new TransactionTemplate(myTxManager); + + ScheduledJobDefinition jobDetail = new ScheduledJobDefinition(); + jobDetail.setId(BulkDataExportSvcImpl.class.getName()); + jobDetail.setJobClass(BulkDataExportSvcImpl.SubmitJob.class); + mySchedulerService.scheduleFixedDelay(REFRESH_INTERVAL, true, jobDetail); + } + + @Transactional + @Override + public JobInfo submitJob(String theOutputFormat, Set theResourceTypes, Date theSince, Set theFilters) { + String outputFormat = Constants.CT_FHIR_NDJSON; + if (isNotBlank(theOutputFormat)) { + outputFormat = theOutputFormat; + } + if (!Constants.CTS_NDJSON.contains(outputFormat)) { + throw new InvalidRequestException("Invalid output format: " + theOutputFormat); + } + + StringBuilder requestBuilder = new StringBuilder(); + requestBuilder.append("/").append(JpaConstants.OPERATION_EXPORT); + requestBuilder.append("?").append(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT).append("=").append(escapeUrlParam(outputFormat)); + Set resourceTypes = theResourceTypes; + if (resourceTypes != null) { + requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_TYPE).append("=").append(String.join(",", resourceTypes)); + } + Date since = theSince; + if (since != null) { + requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_SINCE).append("=").append(new InstantType(since).setTimeZoneZulu(true).getValueAsString()); + } + if (theFilters != null && theFilters.size() > 0) { + requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_TYPE).append("=").append(String.join(",", theFilters)); + } + String request = requestBuilder.toString(); + + Date cutoff = DateUtils.addMilliseconds(new Date(), -myReuseBulkExportForMillis); + Pageable page = PageRequest.of(0, 10); + Slice existing = myBulkExportJobDao.findExistingJob(page, request, cutoff, BulkJobStatusEnum.ERROR); + if (existing.isEmpty() == false) { + return toSubmittedJobInfo(existing.iterator().next()); + } + + if (theResourceTypes == null || resourceTypes.isEmpty()) { + // This is probably not a useful default, but having the default be "download the whole + // server" seems like a risky default too. We'll deal with that by having the default involve + // only returning a small time span + resourceTypes = myContext.getResourceNames(); + if (since == null) { + since = DateUtils.addDays(new Date(), -1); + } + } + + BulkExportJobEntity job = new BulkExportJobEntity(); + job.setJobId(UUID.randomUUID().toString()); + job.setStatus(BulkJobStatusEnum.SUBMITTED); + job.setSince(since); + job.setCreated(new Date()); + job.setRequest(request); + + updateExpiry(job); + myBulkExportJobDao.save(job); + + for (String nextType : resourceTypes) { + if (!myDaoRegistry.isResourceTypeSupported(nextType)) { + throw new InvalidRequestException("Unknown or unsupported resource type: " + nextType); + } + + BulkExportCollectionEntity collection = new BulkExportCollectionEntity(); + collection.setJob(job); + collection.setResourceType(nextType); + job.getCollections().add(collection); + myBulkExportCollectionDao.save(collection); + } + + ourLog.info("Bulk export job submitted: {}", job.toString()); + + return toSubmittedJobInfo(job); + } + + private JobInfo toSubmittedJobInfo(BulkExportJobEntity theJob) { + return new JobInfo().setJobId(theJob.getJobId()); + } + + + private void updateExpiry(BulkExportJobEntity theJob) { + theJob.setExpiry(DateUtils.addMilliseconds(new Date(), myRetentionPeriod)); + } + + @Transactional + @Override + public JobInfo getJobStatusOrThrowResourceNotFound(String theJobId) { + BulkExportJobEntity job = myBulkExportJobDao + .findByJobId(theJobId) + .orElseThrow(() -> new ResourceNotFoundException(theJobId)); + + JobInfo retVal = new JobInfo(); + retVal.setJobId(theJobId); + retVal.setStatus(job.getStatus()); + retVal.setStatus(job.getStatus()); + retVal.setStatusTime(job.getStatusTime()); + retVal.setStatusMessage(job.getStatusMessage()); + retVal.setRequest(job.getRequest()); + + if (job.getStatus() == BulkJobStatusEnum.COMPLETE) { + for (BulkExportCollectionEntity nextCollection : job.getCollections()) { + for (BulkExportCollectionFileEntity nextFile : nextCollection.getFiles()) { + retVal.addFile() + .setResourceType(nextCollection.getResourceType()) + .setResourceId(toQualifiedBinaryId(nextFile.getResourceId())); + } + } + } + + return retVal; + } + + private IIdType toId(String theResourceId) { + IIdType retVal = myContext.getVersion().newIdType(); + retVal.setValue(theResourceId); + return retVal; + } + + private IIdType toQualifiedBinaryId(String theIdPart) { + IIdType retVal = myContext.getVersion().newIdType(); + retVal.setParts(null, "Binary", theIdPart, null); + + return retVal; + } + + @Override + @Transactional + public synchronized void cancelAndPurgeAllJobs() { + myBulkExportCollectionFileDao.deleteAll(); + myBulkExportCollectionDao.deleteAll(); + myBulkExportJobDao.deleteAll(); + } + + @DisallowConcurrentExecution + @PersistJobDataAfterExecution + public static class SubmitJob extends FireAtIntervalJob { + @Autowired + private IBulkDataExportSvc myTarget; + + public SubmitJob() { + super(REFRESH_INTERVAL); + } + + @Override + protected void doExecute(JobExecutionContext theContext) { + myTarget.buildExportFiles(); + } + } + + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/BulkExportResponseJson.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/BulkExportResponseJson.java new file mode 100644 index 00000000000..5eb968a0e84 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/BulkExportResponseJson.java @@ -0,0 +1,110 @@ +package ca.uhn.fhir.jpa.bulk; + + +import ca.uhn.fhir.jpa.util.JsonDateDeserializer; +import ca.uhn.fhir.jpa.util.JsonDateSerializer; +import com.fasterxml.jackson.annotation.JsonAutoDetect; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + +@JsonInclude(JsonInclude.Include.NON_DEFAULT) +@JsonAutoDetect(creatorVisibility = JsonAutoDetect.Visibility.NONE, fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE) +public class BulkExportResponseJson { + + @JsonProperty("transactionTime") + @JsonSerialize(using = JsonDateSerializer.class) + @JsonDeserialize(using = JsonDateDeserializer.class) + private Date myTransactionTime; + + @JsonProperty("request") + private String myRequest; + @JsonProperty("requiresAccessToken") + private Boolean myRequiresAccessToken; + @JsonProperty("output") + private List myOutput; + @JsonProperty("error") + private List myError; + + public Date getTransactionTime() { + return myTransactionTime; + } + + public BulkExportResponseJson setTransactionTime(Date theTransactionTime) { + myTransactionTime = theTransactionTime; + return this; + } + + public String getRequest() { + return myRequest; + } + + public BulkExportResponseJson setRequest(String theRequest) { + myRequest = theRequest; + return this; + } + + public Boolean getRequiresAccessToken() { + return myRequiresAccessToken; + } + + public BulkExportResponseJson setRequiresAccessToken(Boolean theRequiresAccessToken) { + myRequiresAccessToken = theRequiresAccessToken; + return this; + } + + public List getOutput() { + if (myOutput == null) { + myOutput = new ArrayList<>(); + } + return myOutput; + } + + public List getError() { + if (myError == null) { + myError = new ArrayList<>(); + } + return myError; + } + + public Output addOutput() { + Output retVal = new Output(); + getOutput().add(retVal); + return retVal; + } + + @JsonInclude(JsonInclude.Include.NON_NULL) + @JsonAutoDetect(creatorVisibility = JsonAutoDetect.Visibility.NONE, fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE) + public static class Output { + + @JsonProperty("type") + private String myType; + @JsonProperty("url") + private String myUrl; + + public String getType() { + return myType; + } + + public Output setType(String theType) { + myType = theType; + return this; + } + + public String getUrl() { + return myUrl; + } + + public Output setUrl(String theUrl) { + myUrl = theUrl; + return this; + } + + } + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/BulkJobStatusEnum.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/BulkJobStatusEnum.java new file mode 100644 index 00000000000..9937e1bd274 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/BulkJobStatusEnum.java @@ -0,0 +1,10 @@ +package ca.uhn.fhir.jpa.bulk; + +public enum BulkJobStatusEnum { + + SUBMITTED, + BUILDING, + COMPLETE, + ERROR + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/IBulkDataExportSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/IBulkDataExportSvc.java new file mode 100644 index 00000000000..b2a8f73a232 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/IBulkDataExportSvc.java @@ -0,0 +1,114 @@ +package ca.uhn.fhir.jpa.bulk; + +import org.hl7.fhir.instance.model.api.IIdType; + +import javax.transaction.Transactional; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.Set; + +public interface IBulkDataExportSvc { + void buildExportFiles(); + + @Transactional(value = Transactional.TxType.NEVER) + void purgeExpiredFiles(); + + JobInfo submitJob(String theOutputFormat, Set theResourceTypes, Date theSince, Set theFilters); + + JobInfo getJobStatusOrThrowResourceNotFound(String theJobId); + + void cancelAndPurgeAllJobs(); + + class JobInfo { + private String myJobId; + private BulkJobStatusEnum myStatus; + private List myFiles; + private String myRequest; + private Date myStatusTime; + private String myStatusMessage; + + public String getRequest() { + return myRequest; + } + + public void setRequest(String theRequest) { + myRequest = theRequest; + } + + public Date getStatusTime() { + return myStatusTime; + } + + public JobInfo setStatusTime(Date theStatusTime) { + myStatusTime = theStatusTime; + return this; + } + + public String getJobId() { + return myJobId; + } + + public JobInfo setJobId(String theJobId) { + myJobId = theJobId; + return this; + } + + public List getFiles() { + if (myFiles == null) { + myFiles = new ArrayList<>(); + } + return myFiles; + } + + public BulkJobStatusEnum getStatus() { + return myStatus; + } + + public JobInfo setStatus(BulkJobStatusEnum theStatus) { + myStatus = theStatus; + return this; + } + + public String getStatusMessage() { + return myStatusMessage; + } + + public JobInfo setStatusMessage(String theStatusMessage) { + myStatusMessage = theStatusMessage; + return this; + } + + public FileEntry addFile() { + FileEntry retVal = new FileEntry(); + getFiles().add(retVal); + return retVal; + } + } + + + class FileEntry { + private String myResourceType; + private IIdType myResourceId; + + public String getResourceType() { + return myResourceType; + } + + public FileEntry setResourceType(String theResourceType) { + myResourceType = theResourceType; + return this; + } + + public IIdType getResourceId() { + return myResourceId; + } + + public FileEntry setResourceId(IIdType theResourceId) { + myResourceId = theResourceId; + return this; + } + } + + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java index b996327a717..0cc22e0ceeb 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java @@ -6,11 +6,17 @@ import ca.uhn.fhir.interceptor.api.IInterceptorService; import ca.uhn.fhir.interceptor.executor.InterceptorService; import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider; import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor; +import ca.uhn.fhir.jpa.bulk.BulkDataExportSvcImpl; +import ca.uhn.fhir.jpa.bulk.BulkDataExportProvider; +import ca.uhn.fhir.jpa.bulk.IBulkDataExportSvc; import ca.uhn.fhir.jpa.dao.DaoRegistry; import ca.uhn.fhir.jpa.graphql.JpaStorageServices; import ca.uhn.fhir.jpa.interceptor.JpaConsentContextServices; +import ca.uhn.fhir.jpa.model.sched.ISchedulerService; import ca.uhn.fhir.jpa.provider.SubscriptionTriggeringProvider; import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider; +import ca.uhn.fhir.jpa.sched.AutowiringSpringBeanJobFactory; +import ca.uhn.fhir.jpa.sched.SchedulerServiceImpl; import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; import ca.uhn.fhir.jpa.search.IStaleSearchDeletingSvc; import ca.uhn.fhir.jpa.search.StaleSearchDeletingSvcImpl; @@ -29,7 +35,6 @@ import ca.uhn.fhir.jpa.subscription.module.matcher.InMemorySubscriptionMatcher; import ca.uhn.fhir.rest.server.interceptor.consent.IConsentContextServices; import org.hibernate.jpa.HibernatePersistenceProvider; import org.hl7.fhir.utilities.graphql.IGraphQLStorageServices; -import org.springframework.beans.factory.annotation.Autowire; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.*; import org.springframework.core.env.Environment; @@ -38,15 +43,10 @@ import org.springframework.dao.annotation.PersistenceExceptionTranslationPostPro import org.springframework.data.jpa.repository.config.EnableJpaRepositories; import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; import org.springframework.scheduling.TaskScheduler; -import org.springframework.scheduling.annotation.EnableScheduling; -import org.springframework.scheduling.annotation.SchedulingConfigurer; import org.springframework.scheduling.concurrent.ConcurrentTaskScheduler; import org.springframework.scheduling.concurrent.ScheduledExecutorFactoryBean; -import org.springframework.scheduling.config.ScheduledTaskRegistrar; import org.springframework.web.socket.config.annotation.WebSocketConfigurer; -import javax.annotation.Nonnull; - /* * #%L * HAPI FHIR JPA Server @@ -69,7 +69,6 @@ import javax.annotation.Nonnull; @Configuration -@EnableScheduling @EnableJpaRepositories(basePackages = "ca.uhn.fhir.jpa.dao.data") @ComponentScan(basePackages = "ca.uhn.fhir.jpa", excludeFilters = { @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, value = BaseConfig.class), @@ -77,8 +76,7 @@ import javax.annotation.Nonnull; @ComponentScan.Filter(type = FilterType.REGEX, pattern = ".*\\.test\\..*"), @ComponentScan.Filter(type = FilterType.REGEX, pattern = ".*Test.*"), @ComponentScan.Filter(type = FilterType.REGEX, pattern = "ca.uhn.fhir.jpa.subscription.module.standalone.*")}) - -public abstract class BaseConfig implements SchedulingConfigurer { +public abstract class BaseConfig { public static final String TASK_EXECUTOR_NAME = "hapiJpaTaskExecutor"; public static final String GRAPHQL_PROVIDER_NAME = "myGraphQLProvider"; @@ -86,18 +84,12 @@ public abstract class BaseConfig implements SchedulingConfigurer { @Autowired protected Environment myEnv; - - @Override - public void configureTasks(@Nonnull ScheduledTaskRegistrar theTaskRegistrar) { - theTaskRegistrar.setTaskScheduler(taskScheduler()); - } - @Bean("myDaoRegistry") public DaoRegistry daoRegistry() { return new DaoRegistry(); } - @Bean(autowire = Autowire.BY_TYPE) + @Bean public DatabaseBackedPagingProvider databaseBackedPagingProvider() { return new DatabaseBackedPagingProvider(); } @@ -226,7 +218,7 @@ public abstract class BaseConfig implements SchedulingConfigurer { * Subclasses may override */ protected boolean isSupported(String theResourceType) { - return daoRegistry().getResourceDaoIfExists(theResourceType) != null; + return daoRegistry().getResourceDaoOrNull(theResourceType) != null; } @Bean @@ -241,6 +233,30 @@ public abstract class BaseConfig implements SchedulingConfigurer { return retVal; } + @Bean + public ISchedulerService schedulerService() { + return new SchedulerServiceImpl(); + } + + @Bean + public AutowiringSpringBeanJobFactory schedulerJobFactory() { + return new AutowiringSpringBeanJobFactory(); + } + + @Bean + @Lazy + public IBulkDataExportSvc bulkDataExportSvc() { + return new BulkDataExportSvcImpl(); + } + + @Bean + @Lazy + public BulkDataExportProvider bulkDataExportProvider() { + return new BulkDataExportProvider(); + } + + + public static void configureEntityManagerFactory(LocalContainerEntityManagerFactoryBean theFactory, FhirContext theCtx) { theFactory.setJpaDialect(hibernateJpaDialect(theCtx.getLocalizer())); theFactory.setPackagesToScan("ca.uhn.fhir.jpa.model.entity", "ca.uhn.fhir.jpa.entity"); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java index 82994c5e145..0995f04063a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java @@ -80,7 +80,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; @Transactional(propagation = Propagation.REQUIRED) public abstract class BaseHapiFhirResourceDao extends BaseHapiFhirDao implements IFhirResourceDao { - private static final Logger ourLog = LoggerFactory.getLogger(BaseHapiFhirResourceDao.class); + private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiFhirResourceDao.class); @Autowired protected PlatformTransactionManager myPlatformTransactionManager; @@ -551,10 +551,22 @@ public abstract class BaseHapiFhirResourceDao extends B myEntityManager.merge(entity); } + private void validateExpungeEnabled() { + if (!myDaoConfig.isExpungeEnabled()) { + throw new MethodNotAllowedException("$expunge is not enabled on this server"); + } + } + @Override @Transactional(propagation = Propagation.NEVER) public ExpungeOutcome expunge(IIdType theId, ExpungeOptions theExpungeOptions, RequestDetails theRequest) { + validateExpungeEnabled(); + return forceExpungeInExistingTransaction(theId, theExpungeOptions, theRequest); + } + @Override + @Transactional(propagation = Propagation.SUPPORTS) + public ExpungeOutcome forceExpungeInExistingTransaction(IIdType theId, ExpungeOptions theExpungeOptions, RequestDetails theRequest) { TransactionTemplate txTemplate = new TransactionTemplate(myPlatformTransactionManager); BaseHasResource entity = txTemplate.execute(t -> readEntity(theId, theRequest)); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoRegistry.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoRegistry.java index 220e6873ac2..4878f90e584 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoRegistry.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/DaoRegistry.java @@ -41,6 +41,9 @@ public class DaoRegistry implements ApplicationContextAware, IDaoRegistry { @Autowired private FhirContext myContext; + private volatile Map> myResourceNameToResourceDao; + private volatile IFhirSystemDao mySystemDao; + private Set mySupportedResourceTypes; /** * Constructor @@ -49,11 +52,6 @@ public class DaoRegistry implements ApplicationContextAware, IDaoRegistry { super(); } - private volatile Map> myResourceNameToResourceDao; - private volatile IFhirSystemDao mySystemDao; - - private Set mySupportedResourceTypes; - public void setSupportedResourceTypes(Collection theSupportedResourceTypes) { HashSet supportedResourceTypes = new HashSet<>(); if (theSupportedResourceTypes != null) { @@ -138,7 +136,10 @@ public class DaoRegistry implements ApplicationContextAware, IDaoRegistry { @Override public boolean isResourceTypeSupported(String theResourceType) { - return mySupportedResourceTypes == null || mySupportedResourceTypes.contains(theResourceType); + if (mySupportedResourceTypes == null) { + return getResourceDaoOrNull(theResourceType) != null; + } + return mySupportedResourceTypes.contains(theResourceType); } private void init() { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFhirResourceDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFhirResourceDao.java index 88cbd75d8ee..843eb14b43a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFhirResourceDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFhirResourceDao.java @@ -116,6 +116,8 @@ public interface IFhirResourceDao extends IDao { ExpungeOutcome expunge(IIdType theIIdType, ExpungeOptions theExpungeOptions, RequestDetails theRequest); + ExpungeOutcome forceExpungeInExistingTransaction(IIdType theId, ExpungeOptions theExpungeOptions, RequestDetails theRequest); + Class getResourceType(); IBundleProvider history(Date theSince, Date theUntil, RequestDetails theRequestDetails); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessor.java index f71fbcfeee9..93cafb602e6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessor.java @@ -185,9 +185,9 @@ public class TransactionProcessor { if (theRequestDetails != null) { if (outcome.getResource() != null) { String prefer = theRequestDetails.getHeader(Constants.HEADER_PREFER); - PreferReturnEnum preferReturn = RestfulServerUtils.parsePreferHeader(null, prefer); + PreferHeader.PreferReturnEnum preferReturn = RestfulServerUtils.parsePreferHeader(null, prefer).getReturn(); if (preferReturn != null) { - if (preferReturn == PreferReturnEnum.REPRESENTATION) { + if (preferReturn == PreferHeader.PreferReturnEnum.REPRESENTATION) { outcome.fireResourceViewCallbacks(); myVersionAdapter.setResource(newEntry, outcome.getResource()); } @@ -211,7 +211,10 @@ public class TransactionProcessor { String nextReplacementIdPart = nextReplacementId.getValueAsString(); if (isUrn(nextTemporaryId) && nextTemporaryIdPart.length() > URN_PREFIX.length()) { matchUrl = matchUrl.replace(nextTemporaryIdPart, nextReplacementIdPart); - matchUrl = matchUrl.replace(UrlUtil.escapeUrlParam(nextTemporaryIdPart), nextReplacementIdPart); + String escapedUrlParam = UrlUtil.escapeUrlParam(nextTemporaryIdPart); + if (isNotBlank(escapedUrlParam)) { + matchUrl = matchUrl.replace(escapedUrlParam, nextReplacementIdPart); + } } } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportCollectionDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportCollectionDao.java new file mode 100644 index 00000000000..7681ea58511 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportCollectionDao.java @@ -0,0 +1,34 @@ +package ca.uhn.fhir.jpa.dao.data; + +import ca.uhn.fhir.jpa.entity.BulkExportCollectionEntity; +import ca.uhn.fhir.jpa.entity.BulkExportJobEntity; +import org.springframework.data.domain.Slice; +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.query.Param; + +import java.util.Optional; + +/* + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2019 University Health Network + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +public interface IBulkExportCollectionDao extends JpaRepository { + // nothing currently +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportCollectionFileDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportCollectionFileDao.java new file mode 100644 index 00000000000..2efdacccfbb --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportCollectionFileDao.java @@ -0,0 +1,28 @@ +package ca.uhn.fhir.jpa.dao.data; + +import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity; +import org.springframework.data.jpa.repository.JpaRepository; + +/* + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2019 University Health Network + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +public interface IBulkExportCollectionFileDao extends JpaRepository { + // nothing currently +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportJobDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportJobDao.java new file mode 100644 index 00000000000..6fab05c1324 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkExportJobDao.java @@ -0,0 +1,47 @@ +package ca.uhn.fhir.jpa.dao.data; + +import ca.uhn.fhir.jpa.bulk.BulkJobStatusEnum; +import ca.uhn.fhir.jpa.entity.BulkExportJobEntity; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.query.Param; + +import java.util.Date; +import java.util.Optional; + +/* + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2019 University Health Network + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +public interface IBulkExportJobDao extends JpaRepository { + + @Query("SELECT j FROM BulkExportJobEntity j WHERE j.myJobId = :jobid") + Optional findByJobId(@Param("jobid") String theUuid); + + @Query("SELECT j FROM BulkExportJobEntity j WHERE j.myStatus = :status") + Slice findByStatus(Pageable thePage, @Param("status") BulkJobStatusEnum theSubmitted); + + @Query("SELECT j FROM BulkExportJobEntity j WHERE j.myExpiry < :cutoff") + Slice findByExpiry(Pageable thePage, @Param("cutoff") Date theCutoff); + + @Query("SELECT j FROM BulkExportJobEntity j WHERE j.myRequest = :request AND j.myCreated > :createdAfter AND j.myStatus <> :status") + Slice findExistingJob(Pageable thePage, @Param("request") String theRequest, @Param("createdAfter") Date theCreatedAfter, @Param("status") BulkJobStatusEnum theNotStatus); +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchDao.java index 3acc1a39d04..678bf85a96c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchDao.java @@ -37,11 +37,11 @@ public interface ISearchDao extends JpaRepository { @Query("SELECT s FROM Search s LEFT OUTER JOIN FETCH s.myIncludes WHERE s.myUuid = :uuid") Optional findByUuidAndFetchIncludes(@Param("uuid") String theUuid); - @Query("SELECT s.myId FROM Search s WHERE s.mySearchLastReturned < :cutoff") - Slice findWhereLastReturnedBefore(@Param("cutoff") Date theCutoff, Pageable thePage); + @Query("SELECT s.myId FROM Search s WHERE (s.mySearchLastReturned < :cutoff) AND (s.myExpiryOrNull IS NULL OR s.myExpiryOrNull < :now)") + Slice findWhereLastReturnedBefore(@Param("cutoff") Date theCutoff, @Param("now") Date theNow, Pageable thePage); - @Query("SELECT s FROM Search s WHERE s.myResourceType = :type AND mySearchQueryStringHash = :hash AND s.myCreated > :cutoff AND s.myDeleted = false") - Collection find(@Param("type") String theResourceType, @Param("hash") int theHashCode, @Param("cutoff") Date theCreatedCutoff); + @Query("SELECT s FROM Search s WHERE s.myResourceType = :type AND mySearchQueryStringHash = :hash AND (s.myCreated > :cutoff) AND s.myDeleted = false") + Collection findWithCutoffOrExpiry(@Param("type") String theResourceType, @Param("hash") int theHashCode, @Param("cutoff") Date theCreatedCutoff); @Modifying @Query("UPDATE Search s SET s.mySearchLastReturned = :last WHERE s.myId = :pid") diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeService.java index 238862cbd44..83dc6c8e45f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeService.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeService.java @@ -49,10 +49,6 @@ public abstract class ExpungeService { public ExpungeOutcome expunge(String theResourceName, Long theResourceId, Long theVersion, ExpungeOptions theExpungeOptions, RequestDetails theRequest) { ourLog.info("Expunge: ResourceName[{}] Id[{}] Version[{}] Options[{}]", theResourceName, theResourceId, theVersion, theExpungeOptions); - if (!myConfig.isExpungeEnabled()) { - throw new MethodNotAllowedException("$expunge is not enabled on this server"); - } - if (theExpungeOptions.getLimit() < 1) { throw new InvalidRequestException("Expunge limit may not be less than 1. Received expunge limit " + theExpungeOptions.getLimit() + "."); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/PartitionRunner.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/PartitionRunner.java index 60186d6e4ec..b36ef9876a7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/PartitionRunner.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/PartitionRunner.java @@ -55,6 +55,16 @@ public class PartitionRunner { return; } + if (callableTasks.size() == 1) { + try { + callableTasks.get(0).call(); + return; + } catch (Exception e) { + ourLog.error("Error while expunging.", e); + throw new InternalErrorException(e); + } + } + ExecutorService executorService = buildExecutor(callableTasks.size()); try { List> futures = executorService.invokeAll(callableTasks); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportCollectionEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportCollectionEntity.java new file mode 100644 index 00000000000..05b76de4b46 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportCollectionEntity.java @@ -0,0 +1,65 @@ +package ca.uhn.fhir.jpa.entity; + +import ca.uhn.fhir.jpa.model.entity.ResourceTable; + +import javax.persistence.*; +import java.util.ArrayList; +import java.util.Collection; + +@Entity +@Table(name = "HFJ_BLK_EXPORT_COLLECTION") +public class BulkExportCollectionEntity { + + @Id + @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_BLKEXCOL_PID") + @SequenceGenerator(name = "SEQ_BLKEXCOL_PID", sequenceName = "SEQ_BLKEXCOL_PID") + @Column(name = "PID") + private Long myId; + @ManyToOne + @JoinColumn(name = "JOB_PID", referencedColumnName = "PID", nullable = false, foreignKey = @ForeignKey(name="FK_BLKEXCOL_JOB")) + private BulkExportJobEntity myJob; + @Column(name = "RES_TYPE", length = ResourceTable.RESTYPE_LEN, nullable = false) + private String myResourceType; + @Column(name = "TYPE_FILTER", length = 1000, nullable = true) + private String myFilter; + @Version + @Column(name = "OPTLOCK", nullable = false) + private int myVersion; + @OneToMany(fetch = FetchType.LAZY, mappedBy = "myCollection") + private Collection myFiles; + + public void setJob(BulkExportJobEntity theJob) { + myJob = theJob; + } + + public String getResourceType() { + return myResourceType; + } + + public void setResourceType(String theResourceType) { + myResourceType = theResourceType; + } + + public String getFilter() { + return myFilter; + } + + public void setFilter(String theFilter) { + myFilter = theFilter; + } + + public int getVersion() { + return myVersion; + } + + public void setVersion(int theVersion) { + myVersion = theVersion; + } + + public Collection getFiles() { + if (myFiles == null) { + myFiles = new ArrayList<>(); + } + return myFiles; + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportCollectionFileEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportCollectionFileEntity.java new file mode 100644 index 00000000000..dbeeda6e455 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportCollectionFileEntity.java @@ -0,0 +1,33 @@ +package ca.uhn.fhir.jpa.entity; + +import ca.uhn.fhir.jpa.model.entity.ForcedId; + +import javax.persistence.*; + +@Entity +@Table(name = "HFJ_BLK_EXPORT_COLFILE") +public class BulkExportCollectionFileEntity { + + @Id + @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_BLKEXCOLFILE_PID") + @SequenceGenerator(name = "SEQ_BLKEXCOLFILE_PID", sequenceName = "SEQ_BLKEXCOLFILE_PID") + @Column(name = "PID") + private Long myId; + @ManyToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "COLLECTION_PID", referencedColumnName = "PID", nullable = false, foreignKey = @ForeignKey(name="FK_BLKEXCOLFILE_COLLECT")) + private BulkExportCollectionEntity myCollection; + @Column(name = "RES_ID", length = ForcedId.MAX_FORCED_ID_LENGTH, nullable = false) + private String myResourceId; + + public void setCollection(BulkExportCollectionEntity theCollection) { + myCollection = theCollection; + } + + public void setResource(String theResourceId) { + myResourceId = theResourceId; + } + + public String getResourceId() { + return myResourceId; + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java new file mode 100644 index 00000000000..8b09a959d2b --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java @@ -0,0 +1,157 @@ +package ca.uhn.fhir.jpa.entity; + +import ca.uhn.fhir.jpa.bulk.BulkJobStatusEnum; +import org.apache.commons.lang3.builder.ToStringBuilder; +import org.hl7.fhir.r5.model.InstantType; + +import javax.persistence.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Date; + +import static org.apache.commons.lang3.StringUtils.isNotBlank; + +@Entity +@Table(name = "HFJ_BLK_EXPORT_JOB", uniqueConstraints = { + @UniqueConstraint(name = "IDX_BLKEX_JOB_ID", columnNames = "JOB_ID") +}, indexes = { + @Index(name = "IDX_BLKEX_EXPTIME", columnList = "EXP_TIME") +}) +public class BulkExportJobEntity { + + public static final int REQUEST_LENGTH = 500; + public static final int STATUS_MESSAGE_LEN = 500; + @Id + @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_BLKEXJOB_PID") + @SequenceGenerator(name = "SEQ_BLKEXJOB_PID", sequenceName = "SEQ_BLKEXJOB_PID") + @Column(name = "PID") + private Long myId; + + @Column(name = "JOB_ID", length = Search.UUID_COLUMN_LENGTH, nullable = false) + private String myJobId; + + @Enumerated(EnumType.STRING) + @Column(name = "JOB_STATUS", length = 10, nullable = false) + private BulkJobStatusEnum myStatus; + @Temporal(TemporalType.TIMESTAMP) + @Column(name = "CREATED_TIME", nullable = false) + private Date myCreated; + @Temporal(TemporalType.TIMESTAMP) + @Column(name = "STATUS_TIME", nullable = false) + private Date myStatusTime; + @Temporal(TemporalType.TIMESTAMP) + @Column(name = "EXP_TIME", nullable = false) + private Date myExpiry; + @Column(name = "REQUEST", nullable = false, length = REQUEST_LENGTH) + private String myRequest; + @OneToMany(fetch = FetchType.LAZY, mappedBy = "myJob") + private Collection myCollections; + @Version + @Column(name = "OPTLOCK", nullable = false) + private int myVersion; + @Temporal(TemporalType.TIMESTAMP) + @Column(name = "EXP_SINCE", nullable = true) + private Date mySince; + @Column(name = "STATUS_MESSAGE", nullable = true, length = STATUS_MESSAGE_LEN) + private String myStatusMessage; + + public Date getCreated() { + return myCreated; + } + + public void setCreated(Date theCreated) { + myCreated = theCreated; + } + + public String getStatusMessage() { + return myStatusMessage; + } + + public void setStatusMessage(String theStatusMessage) { + myStatusMessage = theStatusMessage; + } + + public String getRequest() { + return myRequest; + } + + public void setRequest(String theRequest) { + myRequest = theRequest; + } + + public void setExpiry(Date theExpiry) { + myExpiry = theExpiry; + } + + public Collection getCollections() { + if (myCollections == null) { + myCollections = new ArrayList<>(); + } + return myCollections; + } + + public String getJobId() { + return myJobId; + } + + public void setJobId(String theJobId) { + myJobId = theJobId; + } + + @Override + public String toString() { + ToStringBuilder b = new ToStringBuilder(this); + if (isNotBlank(myJobId)) { + b.append("jobId", myJobId); + } + if (myStatus != null) { + b.append("status", myStatus + " " + new InstantType(myStatusTime).getValueAsString()); + } + b.append("created", new InstantType(myExpiry).getValueAsString()); + b.append("expiry", new InstantType(myExpiry).getValueAsString()); + b.append("request", myRequest); + b.append("since", mySince); + if (isNotBlank(myStatusMessage)) { + b.append("statusMessage", myStatusMessage); + } + return b.toString(); + } + + public BulkJobStatusEnum getStatus() { + return myStatus; + } + + public void setStatus(BulkJobStatusEnum theStatus) { + if (myStatus != theStatus) { + myStatusTime = new Date(); + myStatus = theStatus; + } + } + + public Date getStatusTime() { + return myStatusTime; + } + + public int getVersion() { + return myVersion; + } + + public void setVersion(int theVersion) { + myVersion = theVersion; + } + + public Long getId() { + return myId; + } + + public Date getSince() { + if (mySince != null) { + return new Date(mySince.getTime()); + } + return null; + } + + public void setSince(Date theSince) { + mySince = theSince; + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Search.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Search.java index 10908546890..4649d8b8a7e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Search.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Search.java @@ -59,6 +59,9 @@ public class Search implements ICachedSearchDetails, Serializable { private Integer myFailureCode; @Column(name = "FAILURE_MESSAGE", length = FAILURE_MESSAGE_LENGTH, nullable = true) private String myFailureMessage; + @Temporal(TemporalType.TIMESTAMP) + @Column(name = "EXPIRY_OR_NULL", nullable = true) + private Date myExpiryOrNull; @Id @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SEARCH") @SequenceGenerator(name = "SEQ_SEARCH", sequenceName = "SEQ_SEARCH") @@ -108,7 +111,6 @@ public class Search implements ICachedSearchDetails, Serializable { @Lob @Column(name = "SEARCH_PARAM_MAP", nullable = true) private byte[] mySearchParameterMap; - /** * Constructor */ @@ -116,6 +118,14 @@ public class Search implements ICachedSearchDetails, Serializable { super(); } + public Date getExpiryOrNull() { + return myExpiryOrNull; + } + + public void setExpiryOrNull(Date theExpiryOrNull) { + myExpiryOrNull = theExpiryOrNull; + } + public Boolean getDeleted() { return myDeleted; } @@ -230,11 +240,15 @@ public class Search implements ICachedSearchDetails, Serializable { } public void setSearchQueryString(String theSearchQueryString) { - if (theSearchQueryString != null && theSearchQueryString.length() > MAX_SEARCH_QUERY_STRING) { - mySearchQueryString = null; + if (theSearchQueryString == null || theSearchQueryString.length() > MAX_SEARCH_QUERY_STRING) { + // We want this field to always have a wide distribution of values in order + // to avoid optimizers avoiding using it if it has lots of nulls, so in the + // case of null, just put a value that will never be hit + mySearchQueryString = UUID.randomUUID().toString(); } else { mySearchQueryString = theSearchQueryString; } + mySearchQueryStringHash = mySearchQueryString.hashCode(); } public SearchTypeEnum getSearchType() { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/sched/AutowiringSpringBeanJobFactory.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/sched/AutowiringSpringBeanJobFactory.java new file mode 100644 index 00000000000..6e2c37c1c7a --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/sched/AutowiringSpringBeanJobFactory.java @@ -0,0 +1,29 @@ +package ca.uhn.fhir.jpa.sched; + +import org.quartz.spi.TriggerFiredBundle; +import org.springframework.beans.factory.config.AutowireCapableBeanFactory; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationContextAware; +import org.springframework.scheduling.quartz.SpringBeanJobFactory; + +public class AutowiringSpringBeanJobFactory extends SpringBeanJobFactory implements ApplicationContextAware { + + private transient AutowireCapableBeanFactory myBeanFactory; + private ApplicationContext myAppCtx; + + @Override + public void setApplicationContext(final ApplicationContext theApplicationContext) { + myAppCtx = theApplicationContext; + myBeanFactory = theApplicationContext.getAutowireCapableBeanFactory(); + } + + @Override + protected Object createJobInstance(final TriggerFiredBundle bundle) throws Exception { + Object job = super.createJobInstance(bundle); + myBeanFactory.autowireBean(job); + if (job instanceof ApplicationContextAware) { + ((ApplicationContextAware) job).setApplicationContext(myAppCtx); + } + return job; + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/sched/QuartzTableSeeder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/sched/QuartzTableSeeder.java new file mode 100644 index 00000000000..f71bf370278 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/sched/QuartzTableSeeder.java @@ -0,0 +1,18 @@ +package ca.uhn.fhir.jpa.sched; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; + +import javax.annotation.PostConstruct; + +public class QuartzTableSeeder { + + @Autowired + private LocalContainerEntityManagerFactoryBean myEntityManagerFactory; + + @PostConstruct + public void start() { + + } + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/sched/SchedulerServiceImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/sched/SchedulerServiceImpl.java new file mode 100644 index 00000000000..7cf633b7551 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/sched/SchedulerServiceImpl.java @@ -0,0 +1,559 @@ +package ca.uhn.fhir.jpa.sched; + +import ca.uhn.fhir.jpa.model.sched.ISchedulerService; +import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import com.google.common.collect.Sets; +import org.apache.commons.lang3.Validate; +import org.quartz.Calendar; +import org.quartz.*; +import org.quartz.impl.JobDetailImpl; +import org.quartz.impl.StdSchedulerFactory; +import org.quartz.impl.matchers.GroupMatcher; +import org.quartz.spi.JobFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.event.ContextRefreshedEvent; +import org.springframework.context.event.EventListener; +import org.springframework.core.env.Environment; + +import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; +import java.util.*; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.stream.Collectors; + +import static org.quartz.impl.StdSchedulerFactory.PROP_SCHED_INSTANCE_NAME; + +/** + * This class provides task scheduling for the entire module using the Quartz library. + * Inside here, we have two schedulers: + *
    + *
  • + * The Local Scheduler handles tasks that need to execute locally. This + * typically means things that should happen on all nodes in a clustered + * environment. + *
  • + *
  • + * The Cluster Scheduler handles tasks that are distributed and should be + * handled by only one node in the cluster (assuming a clustered server). If the + * server is not clustered, this scheduler acts the same way as the + * local scheduler. + *
  • + *
+ */ +public class SchedulerServiceImpl implements ISchedulerService { + public static final String SCHEDULING_DISABLED = "scheduling_disabled"; + public static final String SCHEDULING_DISABLED_EQUALS_TRUE = SCHEDULING_DISABLED + "=true"; + + private static final Logger ourLog = LoggerFactory.getLogger(SchedulerServiceImpl.class); + private static int ourNextSchedulerId = 0; + private Scheduler myLocalScheduler; + private Scheduler myClusteredScheduler; + private String myThreadNamePrefix; + private boolean myLocalSchedulingEnabled; + private boolean myClusteredSchedulingEnabled; + @Autowired + private AutowiringSpringBeanJobFactory mySpringBeanJobFactory; + private AtomicBoolean myStopping = new AtomicBoolean(false); + @Autowired + private Environment myEnvironment; + + /** + * Constructor + */ + public SchedulerServiceImpl() { + setThreadNamePrefix("hapi-fhir-jpa-scheduler"); + setLocalSchedulingEnabled(true); + setClusteredSchedulingEnabled(true); + } + + public boolean isLocalSchedulingEnabled() { + return myLocalSchedulingEnabled; + } + + public void setLocalSchedulingEnabled(boolean theLocalSchedulingEnabled) { + myLocalSchedulingEnabled = theLocalSchedulingEnabled; + } + + public boolean isClusteredSchedulingEnabled() { + return myClusteredSchedulingEnabled; + } + + public void setClusteredSchedulingEnabled(boolean theClusteredSchedulingEnabled) { + myClusteredSchedulingEnabled = theClusteredSchedulingEnabled; + } + + public String getThreadNamePrefix() { + return myThreadNamePrefix; + } + + public void setThreadNamePrefix(String theThreadNamePrefix) { + myThreadNamePrefix = theThreadNamePrefix; + } + + @PostConstruct + public void start() throws SchedulerException { + myLocalScheduler = createLocalScheduler(); + myClusteredScheduler = createClusteredScheduler(); + myStopping.set(false); + } + + /** + * We defer startup of executing started tasks until we're sure we're ready for it + * and the startup is completely done + */ + @EventListener + public void contextStarted(ContextRefreshedEvent theEvent) throws SchedulerException { + try { + ourLog.info("Starting task schedulers for context {}", theEvent != null ? theEvent.getApplicationContext().getId() : "null"); + if (myLocalScheduler != null) { + myLocalScheduler.start(); + } + if (myClusteredScheduler != null) { + myClusteredScheduler.start(); + } + } catch (Exception e) { + ourLog.error("Failed to start context", e); + throw new SchedulerException(e); + } + } + + private Scheduler createLocalScheduler() throws SchedulerException { + if (!isLocalSchedulingEnabled() || isSchedulingDisabledForUnitTests()) { + return new NullScheduler(); + } + Properties localProperties = new Properties(); + localProperties.setProperty(PROP_SCHED_INSTANCE_NAME, "local-" + ourNextSchedulerId++); + quartzPropertiesCommon(localProperties); + quartzPropertiesLocal(localProperties); + StdSchedulerFactory factory = new StdSchedulerFactory(); + factory.initialize(localProperties); + Scheduler scheduler = factory.getScheduler(); + configureSchedulerCommon(scheduler); + scheduler.standby(); + return scheduler; + } + + private Scheduler createClusteredScheduler() throws SchedulerException { + if (!isClusteredSchedulingEnabled() || isSchedulingDisabledForUnitTests()) { + return new NullScheduler(); + } + Properties clusteredProperties = new Properties(); + clusteredProperties.setProperty(PROP_SCHED_INSTANCE_NAME, "clustered-" + ourNextSchedulerId++); + quartzPropertiesCommon(clusteredProperties); + quartzPropertiesClustered(clusteredProperties); + StdSchedulerFactory factory = new StdSchedulerFactory(); + factory.initialize(clusteredProperties); + Scheduler scheduler = factory.getScheduler(); + configureSchedulerCommon(scheduler); + scheduler.standby(); + return scheduler; + } + + private void configureSchedulerCommon(Scheduler theScheduler) throws SchedulerException { + theScheduler.setJobFactory(mySpringBeanJobFactory); + } + + @PreDestroy + public void stop() throws SchedulerException { + ourLog.info("Shutting down task scheduler..."); + + myStopping.set(true); + myLocalScheduler.shutdown(true); + myClusteredScheduler.shutdown(true); + } + + @Override + public void purgeAllScheduledJobsForUnitTest() throws SchedulerException { + myLocalScheduler.clear(); + myClusteredScheduler.clear(); + } + + @Override + public void logStatus() { + try { + Set keys = myLocalScheduler.getJobKeys(GroupMatcher.anyGroup()); + String keysString = keys.stream().map(t -> t.getName()).collect(Collectors.joining(", ")); + ourLog.info("Local scheduler has jobs: {}", keysString); + + keys = myClusteredScheduler.getJobKeys(GroupMatcher.anyGroup()); + keysString = keys.stream().map(t -> t.getName()).collect(Collectors.joining(", ")); + ourLog.info("Clustered scheduler has jobs: {}", keysString); + } catch (SchedulerException e) { + throw new InternalErrorException(e); + } + } + + @Override + public void scheduleFixedDelay(long theIntervalMillis, boolean theClusteredTask, ScheduledJobDefinition theJobDefinition) { + Validate.isTrue(theIntervalMillis >= 100); + + Validate.notNull(theJobDefinition); + Validate.notNull(theJobDefinition.getJobClass()); + Validate.notBlank(theJobDefinition.getId()); + + JobKey jobKey = new JobKey(theJobDefinition.getId()); + + JobDetailImpl jobDetail = new NonConcurrentJobDetailImpl(); + jobDetail.setJobClass(theJobDefinition.getJobClass()); + jobDetail.setKey(jobKey); + jobDetail.setName(theJobDefinition.getId()); + jobDetail.setJobDataMap(new JobDataMap(theJobDefinition.getJobData())); + + ScheduleBuilder schedule = SimpleScheduleBuilder + .simpleSchedule() + .withIntervalInMilliseconds(theIntervalMillis) + .repeatForever(); + + Trigger trigger = TriggerBuilder.newTrigger() + .forJob(jobDetail) + .startNow() + .withSchedule(schedule) + .build(); + + Set triggers = Sets.newHashSet(trigger); + try { + Scheduler scheduler; + if (theClusteredTask) { + scheduler = myClusteredScheduler; + } else { + scheduler = myLocalScheduler; + } + scheduler.scheduleJob(jobDetail, triggers, true); + } catch (SchedulerException e) { + ourLog.error("Failed to schedule job", e); + throw new InternalErrorException(e); + } + + } + + @Override + public boolean isStopping() { + return myStopping.get(); + } + + /** + * Properties for the local scheduler (see the class docs to learn what this means) + */ + protected void quartzPropertiesLocal(Properties theProperties) { + // nothing + } + + /** + * Properties for the cluster scheduler (see the class docs to learn what this means) + */ + protected void quartzPropertiesClustered(Properties theProperties) { +// theProperties.put("org.quartz.jobStore.tablePrefix", "QRTZHFJC_"); + } + + protected void quartzPropertiesCommon(Properties theProperties) { + theProperties.put("org.quartz.threadPool.threadCount", "4"); + theProperties.put("org.quartz.threadPool.threadNamePrefix", getThreadNamePrefix() + "-" + theProperties.get(PROP_SCHED_INSTANCE_NAME)); + } + + private boolean isSchedulingDisabledForUnitTests() { + String schedulingDisabled = myEnvironment.getProperty(SCHEDULING_DISABLED); + return "true".equals(schedulingDisabled); + } + + private static class NonConcurrentJobDetailImpl extends JobDetailImpl { + private static final long serialVersionUID = 5716197221121989740L; + + // All HAPI FHIR jobs shouldn't allow concurrent execution + @Override + public boolean isConcurrentExectionDisallowed() { + return true; + } + } + + private static class NullScheduler implements Scheduler { + @Override + public String getSchedulerName() { + return null; + } + + @Override + public String getSchedulerInstanceId() { + return null; + } + + @Override + public SchedulerContext getContext() { + return null; + } + + @Override + public void start() { + + } + + @Override + public void startDelayed(int seconds) { + + } + + @Override + public boolean isStarted() { + return false; + } + + @Override + public void standby() { + + } + + @Override + public boolean isInStandbyMode() { + return false; + } + + @Override + public void shutdown() { + + } + + @Override + public void shutdown(boolean waitForJobsToComplete) { + + } + + @Override + public boolean isShutdown() { + return false; + } + + @Override + public SchedulerMetaData getMetaData() { + return null; + } + + @Override + public List getCurrentlyExecutingJobs() { + return null; + } + + @Override + public void setJobFactory(JobFactory factory) { + + } + + @Override + public ListenerManager getListenerManager() { + return null; + } + + @Override + public Date scheduleJob(JobDetail jobDetail, Trigger trigger) { + return null; + } + + @Override + public Date scheduleJob(Trigger trigger) { + return null; + } + + @Override + public void scheduleJobs(Map> triggersAndJobs, boolean replace) { + + } + + @Override + public void scheduleJob(JobDetail jobDetail, Set triggersForJob, boolean replace) { + + } + + @Override + public boolean unscheduleJob(TriggerKey triggerKey) { + return false; + } + + @Override + public boolean unscheduleJobs(List triggerKeys) { + return false; + } + + @Override + public Date rescheduleJob(TriggerKey triggerKey, Trigger newTrigger) { + return null; + } + + @Override + public void addJob(JobDetail jobDetail, boolean replace) { + + } + + @Override + public void addJob(JobDetail jobDetail, boolean replace, boolean storeNonDurableWhileAwaitingScheduling) { + + } + + @Override + public boolean deleteJob(JobKey jobKey) { + return false; + } + + @Override + public boolean deleteJobs(List jobKeys) { + return false; + } + + @Override + public void triggerJob(JobKey jobKey) { + + } + + @Override + public void triggerJob(JobKey jobKey, JobDataMap data) { + + } + + @Override + public void pauseJob(JobKey jobKey) { + + } + + @Override + public void pauseJobs(GroupMatcher matcher) { + + } + + @Override + public void pauseTrigger(TriggerKey triggerKey) { + + } + + @Override + public void pauseTriggers(GroupMatcher matcher) { + + } + + @Override + public void resumeJob(JobKey jobKey) { + + } + + @Override + public void resumeJobs(GroupMatcher matcher) { + + } + + @Override + public void resumeTrigger(TriggerKey triggerKey) { + + } + + @Override + public void resumeTriggers(GroupMatcher matcher) { + + } + + @Override + public void pauseAll() { + + } + + @Override + public void resumeAll() { + + } + + @Override + public List getJobGroupNames() { + return null; + } + + @Override + public Set getJobKeys(GroupMatcher matcher) { + return null; + } + + @Override + public List getTriggersOfJob(JobKey jobKey) { + return null; + } + + @Override + public List getTriggerGroupNames() { + return null; + } + + @Override + public Set getTriggerKeys(GroupMatcher matcher) { + return null; + } + + @Override + public Set getPausedTriggerGroups() { + return null; + } + + @Override + public JobDetail getJobDetail(JobKey jobKey) { + return null; + } + + @Override + public Trigger getTrigger(TriggerKey triggerKey) { + return null; + } + + @Override + public Trigger.TriggerState getTriggerState(TriggerKey triggerKey) { + return null; + } + + @Override + public void resetTriggerFromErrorState(TriggerKey triggerKey) { + + } + + @Override + public void addCalendar(String calName, Calendar calendar, boolean replace, boolean updateTriggers) { + + } + + @Override + public boolean deleteCalendar(String calName) { + return false; + } + + @Override + public Calendar getCalendar(String calName) { + return null; + } + + @Override + public List getCalendarNames() { + return null; + } + + @Override + public boolean interrupt(JobKey jobKey) throws UnableToInterruptJobException { + return false; + } + + @Override + public boolean interrupt(String fireInstanceId) throws UnableToInterruptJobException { + return false; + } + + @Override + public boolean checkExists(JobKey jobKey) { + return false; + } + + @Override + public boolean checkExists(TriggerKey triggerKey) { + return false; + } + + @Override + public void clear() { + + } + } + + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/StaleSearchDeletingSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/StaleSearchDeletingSvcImpl.java index 5f41ca45230..af25e9a0eea 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/StaleSearchDeletingSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/StaleSearchDeletingSvcImpl.java @@ -21,12 +21,17 @@ package ca.uhn.fhir.jpa.search; */ import ca.uhn.fhir.jpa.dao.DaoConfig; +import ca.uhn.fhir.jpa.model.sched.ISchedulerService; +import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc; +import org.quartz.Job; +import org.quartz.JobExecutionContext; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.scheduling.annotation.Scheduled; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; +import javax.annotation.PostConstruct; + import static ca.uhn.fhir.jpa.search.cache.DatabaseSearchCacheSvcImpl.DEFAULT_CUTOFF_SLACK; /** @@ -43,6 +48,8 @@ public class StaleSearchDeletingSvcImpl implements IStaleSearchDeletingSvc { private DaoConfig myDaoConfig; @Autowired private ISearchCacheSvc mySearchCacheSvc; + @Autowired + private ISchedulerService mySchedulerService; @Override @Transactional(propagation = Propagation.NEVER) @@ -50,7 +57,14 @@ public class StaleSearchDeletingSvcImpl implements IStaleSearchDeletingSvc { mySearchCacheSvc.pollForStaleSearchesAndDeleteThem(); } - @Scheduled(fixedDelay = DEFAULT_CUTOFF_SLACK) + @PostConstruct + public void registerScheduledJob() { + ScheduledJobDefinition jobDetail = new ScheduledJobDefinition(); + jobDetail.setId(StaleSearchDeletingSvcImpl.class.getName()); + jobDetail.setJobClass(StaleSearchDeletingSvcImpl.SubmitJob.class); + mySchedulerService.scheduleFixedDelay(DEFAULT_CUTOFF_SLACK, true, jobDetail); + } + @Transactional(propagation = Propagation.NEVER) @Override public synchronized void schedulePollForStaleSearches() { @@ -58,4 +72,14 @@ public class StaleSearchDeletingSvcImpl implements IStaleSearchDeletingSvc { pollForStaleSearchesAndDeleteThem(); } } + + public static class SubmitJob implements Job { + @Autowired + private IStaleSearchDeletingSvc myTarget; + + @Override + public void execute(JobExecutionContext theContext) { + myTarget.schedulePollForStaleSearches(); + } + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/WarmSearchDefinition.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/WarmSearchDefinition.java new file mode 100644 index 00000000000..080a4c57fc6 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/WarmSearchDefinition.java @@ -0,0 +1,8 @@ +package ca.uhn.fhir.jpa.search; + +public class WarmSearchDefinition { + + private String mySearchUrl; + private long myRefreshPeriodMillis; + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/BaseSearchCacheSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/BaseSearchCacheSvcImpl.java index 96323564f37..dfa11bfaa0a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/BaseSearchCacheSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/BaseSearchCacheSvcImpl.java @@ -21,12 +21,16 @@ package ca.uhn.fhir.jpa.search.cache; */ import ca.uhn.fhir.jpa.entity.Search; +import ca.uhn.fhir.jpa.model.sched.ISchedulerService; +import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; import org.apache.commons.lang3.time.DateUtils; +import org.quartz.Job; +import org.quartz.JobExecutionContext; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.scheduling.annotation.Scheduled; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.support.TransactionTemplate; +import javax.annotation.PostConstruct; import java.util.Date; import java.util.Iterator; import java.util.Map; @@ -36,6 +40,8 @@ public abstract class BaseSearchCacheSvcImpl implements ISearchCacheSvc { @Autowired private PlatformTransactionManager myTxManager; + @Autowired + private ISchedulerService mySchedulerService; private ConcurrentHashMap myUnsyncedLastUpdated = new ConcurrentHashMap<>(); @@ -44,11 +50,18 @@ public abstract class BaseSearchCacheSvcImpl implements ISearchCacheSvc { myUnsyncedLastUpdated.put(theSearch.getId(), theDate); } + @PostConstruct + public void registerScheduledJob() { + ScheduledJobDefinition jobDetail = new ScheduledJobDefinition(); + jobDetail.setId(BaseSearchCacheSvcImpl.class.getName()); + jobDetail.setJobClass(BaseSearchCacheSvcImpl.SubmitJob.class); + mySchedulerService.scheduleFixedDelay(10 * DateUtils.MILLIS_PER_SECOND, false, jobDetail); + } + @Override - @Scheduled(fixedDelay = 10 * DateUtils.MILLIS_PER_SECOND) public void flushLastUpdated() { TransactionTemplate txTemplate = new TransactionTemplate(myTxManager); - txTemplate.execute(t->{ + txTemplate.execute(t -> { for (Iterator> iter = myUnsyncedLastUpdated.entrySet().iterator(); iter.hasNext(); ) { Map.Entry next = iter.next(); flushLastUpdated(next.getKey(), next.getValue()); @@ -60,5 +73,15 @@ public abstract class BaseSearchCacheSvcImpl implements ISearchCacheSvc { protected abstract void flushLastUpdated(Long theSearchId, Date theLastUpdated); + public static class SubmitJob implements Job { + @Autowired + private ISearchCacheSvc myTarget; + + @Override + public void execute(JobExecutionContext theContext) { + myTarget.flushLastUpdated(); + } + } + } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchCacheSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchCacheSvcImpl.java index 541c53b3477..9365e77a65c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchCacheSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchCacheSvcImpl.java @@ -136,7 +136,7 @@ public class DatabaseSearchCacheSvcImpl extends BaseSearchCacheSvcImpl { @Override public Collection findCandidatesForReuse(String theResourceType, String theQueryString, int theQueryStringHash, Date theCreatedAfter) { int hashCode = theQueryString.hashCode(); - return mySearchDao.find(theResourceType, hashCode, theCreatedAfter); + return mySearchDao.findWithCutoffOrExpiry(theResourceType, hashCode, theCreatedAfter); } @@ -166,7 +166,7 @@ public class DatabaseSearchCacheSvcImpl extends BaseSearchCacheSvcImpl { TransactionTemplate tt = new TransactionTemplate(myTxManager); final Slice toDelete = tt.execute(theStatus -> - mySearchDao.findWhereLastReturnedBefore(cutoff, PageRequest.of(0, 2000)) + mySearchDao.findWhereLastReturnedBefore(cutoff, new Date(), PageRequest.of(0, 2000)) ); for (final Long nextSearchToDelete : toDelete) { ourLog.debug("Deleting search with PID {}", nextSearchToDelete); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/IResourceReindexingSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/IResourceReindexingSvc.java index dbf7fe5ae64..cbf0f640fde 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/IResourceReindexingSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/IResourceReindexingSvc.java @@ -36,11 +36,6 @@ public interface IResourceReindexingSvc { */ Long markAllResourcesForReindexing(String theType); - /** - * Called automatically by the job scheduler - */ - void scheduleReindexingPass(); - /** * @return Returns null if the system did not attempt to perform a pass because one was * already proceeding. Otherwise, returns the number of resources affected. diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java index 52c9066d234..8ec617e6084 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java @@ -33,6 +33,8 @@ import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; import ca.uhn.fhir.jpa.entity.ResourceReindexJobEntity; import ca.uhn.fhir.jpa.model.entity.ForcedId; import ca.uhn.fhir.jpa.model.entity.ResourceTable; +import ca.uhn.fhir.jpa.model.sched.ISchedulerService; +import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException; @@ -44,12 +46,13 @@ import org.apache.commons.lang3.time.DateUtils; import org.hibernate.search.util.impl.Executors; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.r4.model.InstantType; +import org.quartz.Job; +import org.quartz.JobExecutionContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Slice; -import org.springframework.scheduling.annotation.Scheduled; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.TransactionDefinition; import org.springframework.transaction.support.TransactionCallback; @@ -101,6 +104,8 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc { private EntityManager myEntityManager; @Autowired private ISearchParamRegistry mySearchParamRegistry; + @Autowired + private ISchedulerService mySchedulerService; @VisibleForTesting void setReindexJobDaoForUnitTest(IResourceReindexJobDao theReindexJobDao) { @@ -182,11 +187,12 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc { return job.getId(); } - @Override - @Transactional(Transactional.TxType.NEVER) - @Scheduled(fixedDelay = 10 * DateUtils.MILLIS_PER_SECOND) - public void scheduleReindexingPass() { - runReindexingPass(); + @PostConstruct + public void registerScheduledJob() { + ScheduledJobDefinition jobDetail = new ScheduledJobDefinition(); + jobDetail.setId(ResourceReindexingSvcImpl.class.getName()); + jobDetail.setJobClass(ResourceReindexingSvcImpl.SubmitJob.class); + mySchedulerService.scheduleFixedDelay(10 * DateUtils.MILLIS_PER_SECOND, true, jobDetail); } @Override @@ -223,6 +229,8 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc { @Override public void cancelAndPurgeAllJobs() { ourLog.info("Cancelling and purging all resource reindexing jobs"); + myIndexingLock.lock(); + try { myTxTemplate.execute(t -> { myReindexJobDao.markAllOfTypeAsDeleted(); return null; @@ -232,6 +240,9 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc { initExecutor(); expungeJobsMarkedAsDeleted(); + } finally { + myIndexingLock.unlock(); + } } private int runReindexJobs() { @@ -277,7 +288,7 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc { } @VisibleForTesting - public void setSearchParamRegistryForUnitTest(ISearchParamRegistry theSearchParamRegistry) { + void setSearchParamRegistryForUnitTest(ISearchParamRegistry theSearchParamRegistry) { mySearchParamRegistry = theSearchParamRegistry; } @@ -306,7 +317,7 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc { Date low = theJob.getThresholdLow() != null ? theJob.getThresholdLow() : BEGINNING_OF_TIME; Date high = theJob.getThresholdHigh(); - // SqlQuery for resources within threshold + // Query for resources within threshold StopWatch pageSw = new StopWatch(); Slice range = myTxTemplate.execute(t -> { PageRequest page = PageRequest.of(0, PASS_SIZE); @@ -529,4 +540,14 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc { return myUpdated; } } + + public static class SubmitJob implements Job { + @Autowired + private IResourceReindexingSvc myTarget; + + @Override + public void execute(JobExecutionContext theContext) { + myTarget.runReindexingPass(); + } + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/warm/CacheWarmingSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/warm/CacheWarmingSvcImpl.java index 624e13c84b5..9cbf950be53 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/warm/CacheWarmingSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/warm/CacheWarmingSvcImpl.java @@ -26,22 +26,29 @@ import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.jpa.dao.DaoConfig; import ca.uhn.fhir.jpa.dao.DaoRegistry; import ca.uhn.fhir.jpa.dao.IFhirResourceDao; +import ca.uhn.fhir.jpa.model.sched.FireAtIntervalJob; +import ca.uhn.fhir.jpa.model.sched.ISchedulerService; +import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.util.UrlUtil; +import org.apache.commons.lang3.time.DateUtils; +import org.quartz.DisallowConcurrentExecution; +import org.quartz.JobExecutionContext; +import org.quartz.PersistJobDataAfterExecution; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.scheduling.annotation.Scheduled; import org.springframework.stereotype.Component; import javax.annotation.PostConstruct; -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; +import java.util.*; @Component public class CacheWarmingSvcImpl implements ICacheWarmingSvc { + public static final long SCHEDULED_JOB_INTERVAL = 10 * DateUtils.MILLIS_PER_SECOND; + private static final Logger ourLog = LoggerFactory.getLogger(CacheWarmingSvcImpl.class); @Autowired private DaoConfig myDaoConfig; private Map myCacheEntryToNextRefresh = new LinkedHashMap<>(); @@ -51,10 +58,12 @@ public class CacheWarmingSvcImpl implements ICacheWarmingSvc { private DaoRegistry myDaoRegistry; @Autowired private MatchUrlService myMatchUrlService; + @Autowired + private ISchedulerService mySchedulerService; @Override - @Scheduled(fixedDelay = 1000) public synchronized void performWarmingPass() { + ourLog.trace("Starting cache warming pass for {} tasks", myCacheEntryToNextRefresh.size()); for (WarmCacheEntry nextCacheEntry : new ArrayList<>(myCacheEntryToNextRefresh.keySet())) { @@ -74,6 +83,14 @@ public class CacheWarmingSvcImpl implements ICacheWarmingSvc { } + @PostConstruct + public void registerScheduledJob() { + ScheduledJobDefinition jobDetail = new ScheduledJobDefinition(); + jobDetail.setId(CacheWarmingSvcImpl.class.getName()); + jobDetail.setJobClass(CacheWarmingSvcImpl.SubmitJob.class); + mySchedulerService.scheduleFixedDelay(SCHEDULED_JOB_INTERVAL, true, jobDetail); + } + private void refreshNow(WarmCacheEntry theCacheEntry) { String nextUrl = theCacheEntry.getUrl(); @@ -98,7 +115,7 @@ public class CacheWarmingSvcImpl implements ICacheWarmingSvc { initCacheMap(); } - public synchronized void initCacheMap() { + public synchronized Set initCacheMap() { myCacheEntryToNextRefresh.clear(); List warmCacheEntries = myDaoConfig.getWarmCacheEntries(); @@ -110,6 +127,24 @@ public class CacheWarmingSvcImpl implements ICacheWarmingSvc { myCacheEntryToNextRefresh.put(next, 0L); } - + + return Collections.unmodifiableSet(myCacheEntryToNextRefresh.keySet()); + + } + + @DisallowConcurrentExecution + @PersistJobDataAfterExecution + public static class SubmitJob extends FireAtIntervalJob { + @Autowired + private ICacheWarmingSvc myTarget; + + public SubmitJob() { + super(SCHEDULED_JOB_INTERVAL); + } + + @Override + protected void doExecute(JobExecutionContext theContext) { + myTarget.performWarmingPass(); + } } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/warm/ICacheWarmingSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/warm/ICacheWarmingSvc.java index 0dc6a9c84bb..ab1dd8fe1bb 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/warm/ICacheWarmingSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/warm/ICacheWarmingSvc.java @@ -20,9 +20,6 @@ package ca.uhn.fhir.jpa.search.warm; * #L% */ -import org.springframework.scheduling.annotation.Scheduled; - public interface ICacheWarmingSvc { - @Scheduled(fixedDelay = 1000) void performWarmingPass(); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/ISubscriptionTriggeringSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/ISubscriptionTriggeringSvc.java index ed3ca0d62ae..56082a894bb 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/ISubscriptionTriggeringSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/ISubscriptionTriggeringSvc.java @@ -30,4 +30,6 @@ import java.util.List; public interface ISubscriptionTriggeringSvc { IBaseParameters triggerSubscription(List theResourceIds, List theSearchUrls, @IdParam IIdType theSubscriptionId); + + void runDeliveryPass(); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/SubscriptionTriggeringSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/SubscriptionTriggeringSvcImpl.java index cd75cf61a86..ea2a83daa98 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/SubscriptionTriggeringSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/subscription/SubscriptionTriggeringSvcImpl.java @@ -25,6 +25,9 @@ import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.jpa.dao.DaoConfig; import ca.uhn.fhir.jpa.dao.DaoRegistry; import ca.uhn.fhir.jpa.dao.IFhirResourceDao; +import ca.uhn.fhir.jpa.model.sched.FireAtIntervalJob; +import ca.uhn.fhir.jpa.model.sched.ISchedulerService; +import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; import ca.uhn.fhir.jpa.provider.SubscriptionTriggeringProvider; import ca.uhn.fhir.jpa.search.ISearchCoordinatorSvc; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; @@ -53,10 +56,12 @@ import org.hl7.fhir.instance.model.api.IBaseParameters; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; +import org.quartz.DisallowConcurrentExecution; +import org.quartz.JobExecutionContext; +import org.quartz.PersistJobDataAfterExecution; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.scheduling.annotation.Scheduled; import org.springframework.stereotype.Service; import javax.annotation.PostConstruct; @@ -73,10 +78,10 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; @Service public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc { + public static final long SCHEDULE_DELAY = DateUtils.MILLIS_PER_SECOND; private static final Logger ourLog = LoggerFactory.getLogger(SubscriptionTriggeringProvider.class); - private static final int DEFAULT_MAX_SUBMIT = 10000; - + private final List myActiveJobs = new ArrayList<>(); @Autowired private FhirContext myFhirContext; @Autowired @@ -89,10 +94,10 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc private MatchUrlService myMatchUrlService; @Autowired private IResourceModifiedConsumer myResourceModifiedConsumer; - - private final List myActiveJobs = new ArrayList<>(); private int myMaxSubmitPerPass = DEFAULT_MAX_SUBMIT; private ExecutorService myExecutorService; + @Autowired + private ISchedulerService mySchedulerService; @Override public IBaseParameters triggerSubscription(List theResourceIds, List theSearchUrls, @IdParam IIdType theSubscriptionId) { @@ -143,8 +148,8 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc // Submit job for processing synchronized (myActiveJobs) { myActiveJobs.add(jobDetails); + ourLog.info("Subscription triggering requested for {} resource and {} search - Gave job ID: {} and have {} jobs", resourceIds.size(), searchUrls.size(), jobDetails.getJobId(), myActiveJobs.size()); } - ourLog.info("Subscription triggering requested for {} resource and {} search - Gave job ID: {}", resourceIds.size(), searchUrls.size(), jobDetails.getJobId()); // Create a parameters response IBaseParameters retVal = ParametersUtil.newInstance(myFhirContext); @@ -154,10 +159,19 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc return retVal; } - @Scheduled(fixedDelay = DateUtils.MILLIS_PER_SECOND) + @PostConstruct + public void registerScheduledJob() { + ScheduledJobDefinition jobDetail = new ScheduledJobDefinition(); + jobDetail.setId(SubscriptionTriggeringSvcImpl.class.getName()); + jobDetail.setJobClass(SubscriptionTriggeringSvcImpl.SubmitJob.class); + mySchedulerService.scheduleFixedDelay(SCHEDULE_DELAY, false, jobDetail); + } + + @Override public void runDeliveryPass() { synchronized (myActiveJobs) { + if (myActiveJobs.isEmpty()) { return; } @@ -305,7 +319,7 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc return myExecutorService.submit(() -> { for (int i = 0; ; i++) { try { - myResourceModifiedConsumer.submitResourceModified(msg); + myResourceModifiedConsumer.submitResourceModified(msg); break; } catch (Exception e) { if (i >= 3) { @@ -375,6 +389,22 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc } + @DisallowConcurrentExecution + @PersistJobDataAfterExecution + public static class SubmitJob extends FireAtIntervalJob { + @Autowired + private ISubscriptionTriggeringSvc myTarget; + + public SubmitJob() { + super(SCHEDULE_DELAY); + } + + @Override + protected void doExecute(JobExecutionContext theContext) { + myTarget.runDeliveryPass(); + } + } + private static class SubscriptionTriggeringJobDetails { private String myJobId; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java index c0f1568c954..604acccee93 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseHapiTerminologySvcImpl.java @@ -28,6 +28,9 @@ import ca.uhn.fhir.jpa.dao.data.*; import ca.uhn.fhir.jpa.entity.*; import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum; import ca.uhn.fhir.jpa.model.entity.ResourceTable; +import ca.uhn.fhir.jpa.model.sched.ISchedulerService; +import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; +import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl; import ca.uhn.fhir.jpa.util.ScrollableResultsIterator; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; @@ -61,6 +64,8 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.hl7.fhir.r4.model.*; +import org.quartz.Job; +import org.quartz.JobExecutionContext; import org.hl7.fhir.r4.model.codesystems.ConceptSubsumptionOutcome; import org.springframework.beans.BeansException; import org.springframework.beans.factory.annotation.Autowired; @@ -162,6 +167,8 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, private PlatformTransactionManager myTxManager; @Autowired private ITermValueSetConceptViewDao myTermValueSetConceptViewDao; + @Autowired + private ISchedulerService mySchedulerService; private void addCodeIfNotAlreadyAdded(IValueSetConceptAccumulator theValueSetCodeAccumulator, Set theAddedCodes, TermConcept theConcept, boolean theAdd, AtomicInteger theCodeCounter) { String codeSystem = theConcept.getCodeSystemVersion().getCodeSystem().getCodeSystemUri(); @@ -1529,7 +1536,6 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, } } - @Scheduled(fixedRate = 5000) @Transactional(propagation = Propagation.NEVER) @Override public synchronized void saveDeferred() { @@ -1616,6 +1622,24 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, myTxTemplate = new TransactionTemplate(myTransactionManager); } + @PostConstruct + public void registerScheduledJob() { + // Register scheduled job to save deferred concepts + // In the future it would be great to make this a cluster-aware task somehow + ScheduledJobDefinition jobDefinition = new ScheduledJobDefinition(); + jobDefinition.setId(BaseHapiTerminologySvcImpl.class.getName() + "_saveDeferred"); + jobDefinition.setJobClass(SaveDeferredJob.class); + mySchedulerService.scheduleFixedDelay(5000, false, jobDefinition); + + // Register scheduled job to save deferred concepts + // In the future it would be great to make this a cluster-aware task somehow + ScheduledJobDefinition vsJobDefinition = new ScheduledJobDefinition(); + vsJobDefinition.setId(BaseHapiTerminologySvcImpl.class.getName() + "_preExpandValueSets"); + vsJobDefinition.setJobClass(PreExpandValueSetsJob.class); + mySchedulerService.scheduleFixedDelay(10 * DateUtils.MILLIS_PER_MINUTE, true, vsJobDefinition); + + } + @Override @Transactional(propagation = Propagation.REQUIRED) public void storeNewCodeSystemVersion(Long theCodeSystemResourcePid, String theSystemUri, String theSystemName, String theSystemVersionId, TermCodeSystemVersion theCodeSystemVersion) { @@ -1696,7 +1720,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, ourLog.info("Saving {} concepts...", totalCodeCount); - IdentityHashMap conceptsStack2 = new IdentityHashMap(); + IdentityHashMap conceptsStack2 = new IdentityHashMap<>(); for (TermConcept next : theCodeSystemVersion.getConcepts()) { persistChildren(next, codeSystemVersion, conceptsStack2, totalCodeCount); } @@ -1939,7 +1963,6 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, ourLog.info("Done storing TermConceptMap[{}]", termConceptMap.getId()); } - @Scheduled(fixedDelay = 600000) // 10 minutes. @Override public synchronized void preExpandDeferredValueSetsToTerminologyTables() { if (isNotSafeToPreExpandValueSets()) { @@ -2497,6 +2520,28 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc, return new VersionIndependentConcept(system, code); } + public static class SaveDeferredJob implements Job { + + @Autowired + private IHapiTerminologySvc myTerminologySvc; + + @Override + public void execute(JobExecutionContext theContext) { + myTerminologySvc.saveDeferred(); + } + } + + public static class PreExpandValueSetsJob implements Job { + + @Autowired + private IHapiTerminologySvc myTerminologySvc; + + @Override + public void execute(JobExecutionContext theContext) { + myTerminologySvc.preExpandDeferredValueSetsToTerminologyTables(); + } + } + /** * This method is present only for unit tests, do not call from client code */ diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/JsonUtil.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/JsonUtil.java new file mode 100644 index 00000000000..0084ab20228 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/JsonUtil.java @@ -0,0 +1,75 @@ +package ca.uhn.fhir.jpa.util; + +/*- + * #%L + * Smile CDR - CDR + * %% + * Copyright (C) 2016 - 2018 Simpatico Intelligent Systems Inc + * %% + * All rights reserved. + * #L% + */ + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; + +import javax.annotation.Nonnull; +import java.io.IOException; +import java.io.StringWriter; +import java.io.Writer; + +public class JsonUtil { + + private static final ObjectMapper ourMapperPrettyPrint; + private static final ObjectMapper ourMapperNonPrettyPrint; + + static { + ourMapperPrettyPrint = new ObjectMapper(); + ourMapperPrettyPrint.setSerializationInclusion(JsonInclude.Include.NON_NULL); + ourMapperPrettyPrint.enable(SerializationFeature.INDENT_OUTPUT); + + ourMapperNonPrettyPrint = new ObjectMapper(); + ourMapperNonPrettyPrint.setSerializationInclusion(JsonInclude.Include.NON_NULL); + ourMapperNonPrettyPrint.disable(SerializationFeature.INDENT_OUTPUT); + } + + /** + * Parse JSON + */ + public static T deserialize(@Nonnull String theInput, @Nonnull Class theType) throws IOException { + return ourMapperPrettyPrint.readerFor(theType).readValue(theInput); + } + + /** + * Encode JSON + */ + public static String serialize(@Nonnull Object theInput) throws IOException { + return serialize(theInput, true); + } + + /** + * Encode JSON + */ + public static String serialize(@Nonnull Object theInput, boolean thePrettyPrint) throws IOException { + StringWriter sw = new StringWriter(); + if (thePrettyPrint) { + ourMapperPrettyPrint.writeValue(sw, theInput); + } else { + ourMapperNonPrettyPrint.writeValue(sw, theInput); + } + return sw.toString(); + } + + /** + * Encode JSON + */ + public static void serialize(@Nonnull Object theInput, @Nonnull Writer theWriter) throws IOException { + // Note: We append a string here rather than just having ourMapper write directly + // to the Writer because ourMapper seems to close the writer for some stupid + // reason.. There's probably a way of preventing that bit I'm not sure what that + // is and it's not a big deal here. + theWriter.append(serialize(theInput)); + } + +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/ResourceCountCache.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/ResourceCountCache.java index 0762073f5eb..843736034fd 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/ResourceCountCache.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/ResourceCountCache.java @@ -20,14 +20,106 @@ package ca.uhn.fhir.jpa.util; * #L% */ +import ca.uhn.fhir.jpa.model.sched.ISchedulerService; +import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import com.google.common.annotations.VisibleForTesting; +import org.apache.commons.lang3.time.DateUtils; +import org.quartz.Job; +import org.quartz.JobExecutionContext; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; + +import javax.annotation.PostConstruct; import java.util.Map; import java.util.concurrent.Callable; +import java.util.concurrent.atomic.AtomicReference; + +public class ResourceCountCache { + + private static final Logger ourLog = LoggerFactory.getLogger(ResourceCountCache.class); + private static Long ourNowForUnitTest; + private final Callable> myFetcher; + private volatile long myCacheMillis; + private AtomicReference> myCapabilityStatement = new AtomicReference<>(); + private long myLastFetched; + @Autowired + private ISchedulerService mySchedulerService; -public class ResourceCountCache extends SingleItemLoadingCache> { /** * Constructor */ public ResourceCountCache(Callable> theFetcher) { - super(theFetcher); + myFetcher = theFetcher; } + + public synchronized void clear() { + ourLog.info("Clearing cache"); + myCapabilityStatement.set(null); + myLastFetched = 0; + } + + public synchronized Map get() { + return myCapabilityStatement.get(); + } + + private Map refresh() { + Map retVal; + try { + retVal = myFetcher.call(); + } catch (Exception e) { + throw new InternalErrorException(e); + } + + myCapabilityStatement.set(retVal); + myLastFetched = now(); + return retVal; + } + + public void setCacheMillis(long theCacheMillis) { + myCacheMillis = theCacheMillis; + } + + public void update() { + if (myCacheMillis > 0) { + long now = now(); + long expiry = now - myCacheMillis; + if (myLastFetched < expiry) { + refresh(); + } + } + } + + @PostConstruct + public void registerScheduledJob() { + ScheduledJobDefinition jobDetail = new ScheduledJobDefinition(); + jobDetail.setId(ResourceCountCache.class.getName()); + jobDetail.setJobClass(ResourceCountCache.SubmitJob.class); + mySchedulerService.scheduleFixedDelay(10 * DateUtils.MILLIS_PER_MINUTE, false, jobDetail); + } + + public static class SubmitJob implements Job { + @Autowired + private ResourceCountCache myTarget; + + @Override + public void execute(JobExecutionContext theContext) { + myTarget.update(); + } + } + + private static long now() { + if (ourNowForUnitTest != null) { + return ourNowForUnitTest; + } + return System.currentTimeMillis(); + } + + @VisibleForTesting + static void setNowForUnitTest(Long theNowForUnitTest) { + ourNowForUnitTest = theNowForUnitTest; + } + + } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SingleItemLoadingCache.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SingleItemLoadingCache.java deleted file mode 100644 index 4358fe4cdb5..00000000000 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SingleItemLoadingCache.java +++ /dev/null @@ -1,101 +0,0 @@ -package ca.uhn.fhir.jpa.util; - -/*- - * #%L - * HAPI FHIR JPA Server - * %% - * Copyright (C) 2014 - 2019 University Health Network - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; -import com.google.common.annotations.VisibleForTesting; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.scheduling.annotation.Scheduled; - -import java.util.concurrent.Callable; -import java.util.concurrent.atomic.AtomicReference; - -/** - * This is a simple cache for CapabilityStatement resources to - * be returned as server metadata. - */ -public class SingleItemLoadingCache { - private static final Logger ourLog = LoggerFactory.getLogger(SingleItemLoadingCache.class); - private static Long ourNowForUnitTest; - private final Callable myFetcher; - private volatile long myCacheMillis; - private AtomicReference myCapabilityStatement = new AtomicReference<>(); - private long myLastFetched; - - /** - * Constructor - */ - public SingleItemLoadingCache(Callable theFetcher) { - myFetcher = theFetcher; - } - - public synchronized void clear() { - ourLog.info("Clearing cache"); - myCapabilityStatement.set(null); - myLastFetched = 0; - } - - public synchronized T get() { - return myCapabilityStatement.get(); - } - - private T refresh() { - T retVal; - try { - retVal = myFetcher.call(); - } catch (Exception e) { - throw new InternalErrorException(e); - } - - myCapabilityStatement.set(retVal); - myLastFetched = now(); - return retVal; - } - - public void setCacheMillis(long theCacheMillis) { - myCacheMillis = theCacheMillis; - } - - @Scheduled(fixedDelay = 60000) - public void update() { - if (myCacheMillis > 0) { - long now = now(); - long expiry = now - myCacheMillis; - if (myLastFetched < expiry) { - refresh(); - } - } - } - - private static long now() { - if (ourNowForUnitTest != null) { - return ourNowForUnitTest; - } - return System.currentTimeMillis(); - } - - @VisibleForTesting - static void setNowForUnitTest(Long theNowForUnitTest) { - ourNowForUnitTest = theNowForUnitTest; - } - -} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/TestUtil.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/TestUtil.java index 7d4b72f40d2..95930a119ad 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/TestUtil.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/TestUtil.java @@ -259,7 +259,7 @@ public class TestUtil { } } - public static void sleepAtLeast(int theMillis) { + public static void sleepAtLeast(long theMillis) { long start = System.currentTimeMillis(); while (System.currentTimeMillis() <= start + theMillis) { try { diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java new file mode 100644 index 00000000000..74355e728fb --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java @@ -0,0 +1,242 @@ +package ca.uhn.fhir.jpa.bulk; + +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.jpa.model.util.JpaConstants; +import ca.uhn.fhir.jpa.util.JsonUtil; +import ca.uhn.fhir.rest.api.Constants; +import ca.uhn.fhir.rest.client.apache.ResourceEntity; +import ca.uhn.fhir.rest.server.RestfulServer; +import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; +import ca.uhn.fhir.test.utilities.JettyUtil; +import com.google.common.base.Charsets; +import org.apache.commons.io.IOUtils; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; +import org.eclipse.jetty.server.Server; +import org.eclipse.jetty.servlet.ServletHandler; +import org.eclipse.jetty.servlet.ServletHolder; +import org.hl7.fhir.r4.model.IdType; +import org.hl7.fhir.r4.model.InstantType; +import org.hl7.fhir.r4.model.Parameters; +import org.hl7.fhir.r4.model.StringType; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Captor; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Date; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.Matchers.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.*; + +@RunWith(MockitoJUnitRunner.class) +public class BulkDataExportProviderTest { + + private static final String A_JOB_ID = "0000000-AAAAAA"; + private static final Logger ourLog = LoggerFactory.getLogger(BulkDataExportProviderTest.class); + private Server myServer; + private FhirContext myCtx = FhirContext.forR4(); + private int myPort; + @Mock + private IBulkDataExportSvc myBulkDataExportSvc; + private CloseableHttpClient myClient; + @Captor + private ArgumentCaptor myOutputFormatCaptor; + @Captor + private ArgumentCaptor> myResourceTypesCaptor; + @Captor + private ArgumentCaptor mySinceCaptor; + @Captor + private ArgumentCaptor> myFiltersCaptor; + + @After + public void after() throws Exception { + JettyUtil.closeServer(myServer); + myClient.close(); + } + + @Before + public void start() throws Exception { + myServer = new Server(0); + + BulkDataExportProvider provider = new BulkDataExportProvider(); + provider.setBulkDataExportSvcForUnitTests(myBulkDataExportSvc); + provider.setFhirContextForUnitTest(myCtx); + + ServletHandler proxyHandler = new ServletHandler(); + RestfulServer servlet = new RestfulServer(myCtx); + servlet.registerProvider(provider); + ServletHolder servletHolder = new ServletHolder(servlet); + proxyHandler.addServletWithMapping(servletHolder, "/*"); + myServer.setHandler(proxyHandler); + JettyUtil.startServer(myServer); + myPort = JettyUtil.getPortForStartedServer(myServer); + + PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS); + HttpClientBuilder builder = HttpClientBuilder.create(); + builder.setConnectionManager(connectionManager); + myClient = builder.build(); + + } + + @Test + public void testSuccessfulInitiateBulkRequest() throws IOException { + + IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() + .setJobId(A_JOB_ID); + when(myBulkDataExportSvc.submitJob(any(), any(), any(), any())).thenReturn(jobInfo); + + InstantType now = InstantType.now(); + + Parameters input = new Parameters(); + input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON)); + input.addParameter(JpaConstants.PARAM_EXPORT_TYPE, new StringType("Patient, Practitioner")); + input.addParameter(JpaConstants.PARAM_EXPORT_SINCE, now); + input.addParameter(JpaConstants.PARAM_EXPORT_TYPE_FILTER, new StringType("Patient?identifier=foo")); + + HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT); + post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); + post.setEntity(new ResourceEntity(myCtx, input)); + try (CloseableHttpResponse response = myClient.execute(post)) { + ourLog.info("Response: {}", response.toString()); + + assertEquals(202, response.getStatusLine().getStatusCode()); + assertEquals("Accepted", response.getStatusLine().getReasonPhrase()); + assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); + } + + verify(myBulkDataExportSvc, times(1)).submitJob(myOutputFormatCaptor.capture(), myResourceTypesCaptor.capture(), mySinceCaptor.capture(), myFiltersCaptor.capture()); + assertEquals(Constants.CT_FHIR_NDJSON, myOutputFormatCaptor.getValue()); + assertThat(myResourceTypesCaptor.getValue(), containsInAnyOrder("Patient", "Practitioner")); + assertThat(mySinceCaptor.getValue(), notNullValue()); + assertThat(myFiltersCaptor.getValue(), containsInAnyOrder("Patient?identifier=foo")); + + } + + @Test + public void testPollForStatus_BUILDING() throws IOException { + + IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() + .setJobId(A_JOB_ID) + .setStatus(BulkJobStatusEnum.BUILDING) + .setStatusTime(InstantType.now().getValue()); + when(myBulkDataExportSvc.getJobStatusOrThrowResourceNotFound(eq(A_JOB_ID))).thenReturn(jobInfo); + + String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID; + HttpGet get = new HttpGet(url); + get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); + try (CloseableHttpResponse response = myClient.execute(get)) { + ourLog.info("Response: {}", response.toString()); + + assertEquals(202, response.getStatusLine().getStatusCode()); + assertEquals("Accepted", response.getStatusLine().getReasonPhrase()); + assertEquals("120", response.getFirstHeader(Constants.HEADER_RETRY_AFTER).getValue()); + assertThat(response.getFirstHeader(Constants.HEADER_X_PROGRESS).getValue(), containsString("Build in progress - Status set to BUILDING at 20")); + } + + } + + @Test + public void testPollForStatus_ERROR() throws IOException { + + IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() + .setJobId(A_JOB_ID) + .setStatus(BulkJobStatusEnum.ERROR) + .setStatusTime(InstantType.now().getValue()) + .setStatusMessage("Some Error Message"); + when(myBulkDataExportSvc.getJobStatusOrThrowResourceNotFound(eq(A_JOB_ID))).thenReturn(jobInfo); + + String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID; + HttpGet get = new HttpGet(url); + get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); + try (CloseableHttpResponse response = myClient.execute(get)) { + ourLog.info("Response: {}", response.toString()); + + assertEquals(500, response.getStatusLine().getStatusCode()); + assertEquals("Server Error", response.getStatusLine().getReasonPhrase()); + + String responseContent = IOUtils.toString(response.getEntity().getContent(), Charsets.UTF_8); + ourLog.info("Response content: {}", responseContent); + assertThat(responseContent, containsString("\"diagnostics\": \"Some Error Message\"")); + } + + } + + @Test + public void testPollForStatus_COMPLETED() throws IOException { + + IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() + .setJobId(A_JOB_ID) + .setStatus(BulkJobStatusEnum.COMPLETE) + .setStatusTime(InstantType.now().getValue()); + jobInfo.addFile().setResourceType("Patient").setResourceId(new IdType("Binary/111")); + jobInfo.addFile().setResourceType("Patient").setResourceId(new IdType("Binary/222")); + jobInfo.addFile().setResourceType("Patient").setResourceId(new IdType("Binary/333")); + when(myBulkDataExportSvc.getJobStatusOrThrowResourceNotFound(eq(A_JOB_ID))).thenReturn(jobInfo); + + String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID; + HttpGet get = new HttpGet(url); + get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); + try (CloseableHttpResponse response = myClient.execute(get)) { + ourLog.info("Response: {}", response.toString()); + + assertEquals(200, response.getStatusLine().getStatusCode()); + assertEquals("OK", response.getStatusLine().getReasonPhrase()); + assertEquals(Constants.CT_JSON, response.getEntity().getContentType().getValue()); + + String responseContent = IOUtils.toString(response.getEntity().getContent(), Charsets.UTF_8); + ourLog.info("Response content: {}", responseContent); + BulkExportResponseJson responseJson = JsonUtil.deserialize(responseContent, BulkExportResponseJson.class); + assertEquals(3, responseJson.getOutput().size()); + assertEquals("Patient", responseJson.getOutput().get(0).getType()); + assertEquals("http://localhost:" + myPort + "/Binary/111", responseJson.getOutput().get(0).getUrl()); + assertEquals("Patient", responseJson.getOutput().get(1).getType()); + assertEquals("http://localhost:" + myPort + "/Binary/222", responseJson.getOutput().get(1).getUrl()); + assertEquals("Patient", responseJson.getOutput().get(2).getType()); + assertEquals("http://localhost:" + myPort + "/Binary/333", responseJson.getOutput().get(2).getUrl()); + } + + } + + @Test + public void testPollForStatus_Gone() throws IOException { + + when(myBulkDataExportSvc.getJobStatusOrThrowResourceNotFound(eq(A_JOB_ID))).thenThrow(new ResourceNotFoundException("Unknown job: AAA")); + + String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID; + HttpGet get = new HttpGet(url); + get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); + try (CloseableHttpResponse response = myClient.execute(get)) { + ourLog.info("Response: {}", response.toString()); + String responseContent = IOUtils.toString(response.getEntity().getContent(), Charsets.UTF_8); + ourLog.info("Response content: {}", responseContent); + + assertEquals(404, response.getStatusLine().getStatusCode()); + assertEquals(Constants.CT_FHIR_JSON_NEW, response.getEntity().getContentType().getValue().replaceAll(";.*", "").trim()); + assertThat(responseContent, containsString("\"diagnostics\":\"Unknown job: AAA\"")); + } + + } + + +} diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java new file mode 100644 index 00000000000..064859f1208 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java @@ -0,0 +1,254 @@ +package ca.uhn.fhir.jpa.bulk; + +import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao; +import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao; +import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao; +import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test; +import ca.uhn.fhir.jpa.entity.BulkExportCollectionEntity; +import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity; +import ca.uhn.fhir.jpa.entity.BulkExportJobEntity; +import ca.uhn.fhir.rest.api.Constants; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import ca.uhn.fhir.test.utilities.UnregisterScheduledProcessor; +import com.google.common.collect.Sets; +import org.apache.commons.lang3.time.DateUtils; +import org.hl7.fhir.instance.model.api.IIdType; +import org.hl7.fhir.r4.model.Binary; +import org.hl7.fhir.r4.model.InstantType; +import org.hl7.fhir.r4.model.Observation; +import org.hl7.fhir.r4.model.Patient; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.context.TestPropertySource; + +import java.util.Date; +import java.util.UUID; + +import static org.hamcrest.CoreMatchers.containsString; +import static org.junit.Assert.*; + +@TestPropertySource(properties = { + UnregisterScheduledProcessor.SCHEDULING_DISABLED_EQUALS_TRUE +}) +public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { + + private static final Logger ourLog = LoggerFactory.getLogger(BulkDataExportSvcImplR4Test.class); + @Autowired + private IBulkExportJobDao myBulkExportJobDao; + @Autowired + private IBulkExportCollectionDao myBulkExportCollectionDao; + @Autowired + private IBulkExportCollectionFileDao myBulkExportCollectionFileDao; + @Autowired + private IBulkDataExportSvc myBulkDataExportSvc; + + + @Test + public void testPurgeExpiredJobs() { + + // Create an expired job + runInTransaction(() -> { + + Binary b = new Binary(); + b.setContent(new byte[]{0, 1, 2, 3}); + String binaryId = myBinaryDao.create(b).getId().toUnqualifiedVersionless().getValue(); + + BulkExportJobEntity job = new BulkExportJobEntity(); + job.setStatus(BulkJobStatusEnum.COMPLETE); + job.setExpiry(DateUtils.addHours(new Date(), -1)); + job.setJobId(UUID.randomUUID().toString()); + job.setRequest("$export"); + myBulkExportJobDao.save(job); + + BulkExportCollectionEntity collection = new BulkExportCollectionEntity(); + job.getCollections().add(collection); + collection.setResourceType("Patient"); + collection.setJob(job); + myBulkExportCollectionDao.save(collection); + + BulkExportCollectionFileEntity file = new BulkExportCollectionFileEntity(); + collection.getFiles().add(file); + file.setCollection(collection); + file.setResource(binaryId); + myBulkExportCollectionFileDao.save(file); + + }); + + // Check that things were created + runInTransaction(() -> { + assertEquals(1, myResourceTableDao.count()); + assertEquals(1, myBulkExportJobDao.count()); + assertEquals(1, myBulkExportCollectionDao.count()); + assertEquals(1, myBulkExportCollectionFileDao.count()); + }); + + // Run a purge pass + myBulkDataExportSvc.purgeExpiredFiles(); + + // Check that things were deleted + runInTransaction(() -> { + assertEquals(0, myResourceTableDao.count()); + assertEquals(0, myBulkExportJobDao.count()); + assertEquals(0, myBulkExportCollectionDao.count()); + assertEquals(0, myBulkExportCollectionFileDao.count()); + }); + + } + + @Test + public void testCreateBulkLoad_InvalidOutputFormat() { + try { + myBulkDataExportSvc.submitJob(Constants.CT_FHIR_JSON_NEW, Sets.newHashSet("Patient", "Observation"), null, null); + fail(); + } catch (InvalidRequestException e) { + assertEquals("Invalid output format: application/fhir+json", e.getMessage()); + } + } + + @Test + public void testCreateBulkLoad_NoResourceTypes() { + try { + myBulkDataExportSvc.submitJob(Constants.CT_FHIR_NDJSON, Sets.newHashSet(), null, null); + fail(); + } catch (InvalidRequestException e) { + assertEquals("No resource types specified", e.getMessage()); + } + } + + @Test + public void testCreateBulkLoad_InvalidResourceTypes() { + try { + myBulkDataExportSvc.submitJob(Constants.CT_FHIR_NDJSON, Sets.newHashSet("Patient", "FOO"), null, null); + fail(); + } catch (InvalidRequestException e) { + assertEquals("Unknown or unsupported resource type: FOO", e.getMessage()); + } + } + + @Test + public void testCreateBulkLoad() { + + // Create some resources to load + createResources(); + + // Create a bulk job + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(null, Sets.newHashSet("Patient", "Observation"), null, null); + assertNotNull(jobDetails.getJobId()); + + // Check the status + IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobStatusOrThrowResourceNotFound(jobDetails.getJobId()); + assertEquals(BulkJobStatusEnum.SUBMITTED, status.getStatus()); + assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation,Patient", status.getRequest()); + + // Run a scheduled pass to build the export + myBulkDataExportSvc.buildExportFiles(); + + // Fetch the job again + status = myBulkDataExportSvc.getJobStatusOrThrowResourceNotFound(jobDetails.getJobId()); + assertEquals(BulkJobStatusEnum.COMPLETE, status.getStatus()); + assertEquals(2, status.getFiles().size()); + + // Iterate over the files + for (IBulkDataExportSvc.FileEntry next : status.getFiles()) { + Binary nextBinary = myBinaryDao.read(next.getResourceId()); + assertEquals(Constants.CT_FHIR_NDJSON, nextBinary.getContentType()); + String nextContents = new String(nextBinary.getContent(), Constants.CHARSET_UTF8); + ourLog.info("Next contents for type {}:\n{}", next.getResourceType(), nextContents); + + if ("Patient".equals(next.getResourceType())) { + assertThat(nextContents, containsString("\"value\":\"PAT0\"}]}\n")); + assertEquals(10, nextContents.split("\n").length); + } else if ("Observation".equals(next.getResourceType())) { + assertThat(nextContents, containsString("\"subject\":{\"reference\":\"Patient/PAT0\"}}\n")); + assertEquals(10, nextContents.split("\n").length); + } else { + fail(next.getResourceType()); + } + + } + } + + @Test + public void testSubmitReusesExisting() { + + // Submit + IBulkDataExportSvc.JobInfo jobDetails1 = myBulkDataExportSvc.submitJob(null, Sets.newHashSet("Patient", "Observation"), null, null); + assertNotNull(jobDetails1.getJobId()); + + // Submit again + IBulkDataExportSvc.JobInfo jobDetails2 = myBulkDataExportSvc.submitJob(null, Sets.newHashSet("Patient", "Observation"), null, null); + assertNotNull(jobDetails2.getJobId()); + + assertEquals(jobDetails1.getJobId(), jobDetails2.getJobId()); + } + + + @Test + public void testCreateBulkLoad_WithSince() throws InterruptedException { + + // Create some resources to load + createResources(); + + sleepUntilTimeChanges(); + InstantType cutoff = InstantType.now(); + sleepUntilTimeChanges(); + + for (int i = 10; i < 12; i++) { + Patient patient = new Patient(); + patient.setId("PAT" + i); + patient.addIdentifier().setSystem("http://mrns").setValue("PAT" + i); + myPatientDao.update(patient).getId().toUnqualifiedVersionless(); + } + + // Create a bulk job + IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(null, Sets.newHashSet("Patient", "Observation"), cutoff.getValue(), null); + assertNotNull(jobDetails.getJobId()); + + // Check the status + IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobStatusOrThrowResourceNotFound(jobDetails.getJobId()); + assertEquals(BulkJobStatusEnum.SUBMITTED, status.getStatus()); + assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation,Patient&_since=" + cutoff.setTimeZoneZulu(true).getValueAsString(), status.getRequest()); + + // Run a scheduled pass to build the export + myBulkDataExportSvc.buildExportFiles(); + + // Fetch the job again + status = myBulkDataExportSvc.getJobStatusOrThrowResourceNotFound(jobDetails.getJobId()); + assertEquals(BulkJobStatusEnum.COMPLETE, status.getStatus()); + assertEquals(1, status.getFiles().size()); + + // Iterate over the files + for (IBulkDataExportSvc.FileEntry next : status.getFiles()) { + Binary nextBinary = myBinaryDao.read(next.getResourceId()); + assertEquals(Constants.CT_FHIR_NDJSON, nextBinary.getContentType()); + String nextContents = new String(nextBinary.getContent(), Constants.CHARSET_UTF8); + ourLog.info("Next contents for type {}:\n{}", next.getResourceType(), nextContents); + + if ("Patient".equals(next.getResourceType())) { + assertThat(nextContents, containsString("\"id\":\"PAT10\"")); + assertThat(nextContents, containsString("\"id\":\"PAT11\"")); + assertEquals(2, nextContents.split("\n").length); + } else { + fail(next.getResourceType()); + } + + } + } + + private void createResources() { + for (int i = 0; i < 10; i++) { + Patient patient = new Patient(); + patient.setId("PAT" + i); + patient.addIdentifier().setSystem("http://mrns").setValue("PAT" + i); + IIdType patId = myPatientDao.update(patient).getId().toUnqualifiedVersionless(); + + Observation obs = new Observation(); + obs.setId("OBS" + i); + obs.setStatus(Observation.ObservationStatus.FINAL); + obs.getSubject().setReference(patId.getValue()); + myObservationDao.update(obs); + } + } +} diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java index 248c7cd7435..34e66c70b49 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java @@ -4,6 +4,7 @@ import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.interceptor.api.IInterceptorService; import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.interceptor.executor.InterceptorService; +import ca.uhn.fhir.jpa.bulk.IBulkDataExportSvc; import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.provider.SystemProviderDstu2Test; @@ -389,9 +390,10 @@ public abstract class BaseJpaTest { return IOUtils.toString(bundleRes, Constants.CHARSET_UTF8); } - protected static void purgeDatabase(DaoConfig theDaoConfig, IFhirSystemDao theSystemDao, IResourceReindexingSvc theResourceReindexingSvc, ISearchCoordinatorSvc theSearchCoordinatorSvc, ISearchParamRegistry theSearchParamRegistry) { + protected static void purgeDatabase(DaoConfig theDaoConfig, IFhirSystemDao theSystemDao, IResourceReindexingSvc theResourceReindexingSvc, ISearchCoordinatorSvc theSearchCoordinatorSvc, ISearchParamRegistry theSearchParamRegistry, IBulkDataExportSvc theBulkDataExportSvc) { theSearchCoordinatorSvc.cancelAllActiveSearches(); theResourceReindexingSvc.cancelAndPurgeAllJobs(); + theBulkDataExportSvc.cancelAndPurgeAllJobs(); boolean expungeEnabled = theDaoConfig.isExpungeEnabled(); theDaoConfig.setExpungeEnabled(true); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java index 3b638ff8135..e9c6aacb86b 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java @@ -1,6 +1,7 @@ package ca.uhn.fhir.jpa.dao.dstu2; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.jpa.bulk.IBulkDataExportSvc; import ca.uhn.fhir.jpa.config.TestDstu2Config; import ca.uhn.fhir.jpa.dao.*; import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamStringDao; @@ -185,6 +186,8 @@ public abstract class BaseJpaDstu2Test extends BaseJpaTest { protected IFhirResourceDaoValueSet myValueSetDao; @Autowired protected SubscriptionLoader mySubscriptionLoader; + @Autowired + private IBulkDataExportSvc myBulkDataExportSvc; @Before public void beforeFlushFT() { @@ -202,7 +205,7 @@ public abstract class BaseJpaDstu2Test extends BaseJpaTest { @Before @Transactional() public void beforePurgeDatabase() { - purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry); + purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry, myBulkDataExportSvc); } @Before diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java index a21515375f6..76b57fca5e9 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java @@ -1,6 +1,7 @@ package ca.uhn.fhir.jpa.dao.dstu3; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.jpa.bulk.IBulkDataExportSvc; import ca.uhn.fhir.jpa.config.TestDstu3Config; import ca.uhn.fhir.jpa.dao.*; import ca.uhn.fhir.jpa.dao.data.*; @@ -256,6 +257,8 @@ public abstract class BaseJpaDstu3Test extends BaseJpaTest { protected ITermConceptMapGroupElementTargetDao myTermConceptMapGroupElementTargetDao; @Autowired private JpaValidationSupportChainDstu3 myJpaValidationSupportChainDstu3; + @Autowired + private IBulkDataExportSvc myBulkDataExportSvc; @After() public void afterCleanupDao() { @@ -302,7 +305,7 @@ public abstract class BaseJpaDstu3Test extends BaseJpaTest { @Before @Transactional() public void beforePurgeDatabase() { - purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry); + purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry, myBulkDataExportSvc); } @Before diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3ReferentialIntegrityTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3ReferentialIntegrityTest.java index 8c0391c897e..69c2d9472e6 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3ReferentialIntegrityTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3ReferentialIntegrityTest.java @@ -1,8 +1,9 @@ package ca.uhn.fhir.jpa.dao.dstu3; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; - +import ca.uhn.fhir.jpa.dao.DaoConfig; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException; +import ca.uhn.fhir.util.TestUtil; import org.hl7.fhir.dstu3.model.Organization; import org.hl7.fhir.dstu3.model.Patient; import org.hl7.fhir.dstu3.model.Reference; @@ -11,18 +12,11 @@ import org.junit.After; import org.junit.AfterClass; import org.junit.Test; -import ca.uhn.fhir.jpa.dao.DaoConfig; -import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; -import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException; -import ca.uhn.fhir.util.TestUtil; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; public class FhirResourceDaoDstu3ReferentialIntegrityTest extends BaseJpaDstu3Test { - @AfterClass - public static void afterClassClearContext() { - TestUtil.clearAllStaticFieldsForUnitTest(); - } - @After public void afterResetConfig() { myDaoConfig.setEnforceReferentialIntegrityOnWrite(new DaoConfig().isEnforceReferentialIntegrityOnWrite()); @@ -46,11 +40,11 @@ public class FhirResourceDaoDstu3ReferentialIntegrityTest extends BaseJpaDstu3Te @Test public void testCreateUnknownReferenceAllow() throws Exception { myDaoConfig.setEnforceReferentialIntegrityOnWrite(false); - + Patient p = new Patient(); p.setManagingOrganization(new Reference("Organization/AAA")); IIdType id = myPatientDao.create(p).getId().toUnqualifiedVersionless(); - + p = myPatientDao.read(id); assertEquals("Organization/AAA", p.getManagingOrganization().getReference()); @@ -61,11 +55,11 @@ public class FhirResourceDaoDstu3ReferentialIntegrityTest extends BaseJpaDstu3Te Organization o = new Organization(); o.setName("FOO"); IIdType oid = myOrganizationDao.create(o).getId().toUnqualifiedVersionless(); - + Patient p = new Patient(); p.setManagingOrganization(new Reference(oid)); IIdType pid = myPatientDao.create(p).getId().toUnqualifiedVersionless(); - + try { myOrganizationDao.delete(oid); fail(); @@ -81,19 +75,24 @@ public class FhirResourceDaoDstu3ReferentialIntegrityTest extends BaseJpaDstu3Te @Test public void testDeleteAllow() throws Exception { myDaoConfig.setEnforceReferentialIntegrityOnDelete(false); - + Organization o = new Organization(); o.setName("FOO"); IIdType oid = myOrganizationDao.create(o).getId().toUnqualifiedVersionless(); - + Patient p = new Patient(); p.setManagingOrganization(new Reference(oid)); IIdType pid = myPatientDao.create(p).getId().toUnqualifiedVersionless(); - + myOrganizationDao.delete(oid); myPatientDao.delete(pid); } - + @AfterClass + public static void afterClassClearContext() { + TestUtil.clearAllStaticFieldsForUnitTest(); + } + + } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3SearchWithLuceneDisabledTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3SearchWithLuceneDisabledTest.java index 00e0ee2efa4..aaa0ca40027 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3SearchWithLuceneDisabledTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3SearchWithLuceneDisabledTest.java @@ -1,6 +1,7 @@ package ca.uhn.fhir.jpa.dao.dstu3; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.jpa.bulk.IBulkDataExportSvc; import ca.uhn.fhir.jpa.config.TestDstu3WithoutLuceneConfig; import ca.uhn.fhir.jpa.dao.*; import ca.uhn.fhir.jpa.provider.dstu3.JpaSystemProviderDstu3; @@ -149,10 +150,12 @@ public class FhirResourceDaoDstu3SearchWithLuceneDisabledTest extends BaseJpaTes private IValidationSupport myValidationSupport; @Autowired private IResourceReindexingSvc myResourceReindexingSvc; + @Autowired + private IBulkDataExportSvc myBulkDataExportSvc; @Before public void beforePurgeDatabase() { - purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry); + purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry, myBulkDataExportSvc); } @Before diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/expunge/PartitionRunnerTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/expunge/PartitionRunnerTest.java index ff624a1595f..fdf1e0411e4 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/expunge/PartitionRunnerTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/expunge/PartitionRunnerTest.java @@ -73,7 +73,7 @@ public class PartitionRunnerTest { myLatch.setExpectedCount(1); myPartitionRunner.runInPartitionedThreads(resourceIds, partitionConsumer); PartitionCall partitionCall = (PartitionCall) PointcutLatch.getLatchInvocationParameter(myLatch.awaitExpected()); - assertEquals(EXPUNGE_THREADNAME_1, partitionCall.threadName); + assertEquals("main", partitionCall.threadName); assertEquals(1, partitionCall.size); } @@ -86,7 +86,7 @@ public class PartitionRunnerTest { myLatch.setExpectedCount(1); myPartitionRunner.runInPartitionedThreads(resourceIds, partitionConsumer); PartitionCall partitionCall = (PartitionCall) PointcutLatch.getLatchInvocationParameter(myLatch.awaitExpected()); - assertEquals(EXPUNGE_THREADNAME_1, partitionCall.threadName); + assertEquals("main", partitionCall.threadName); assertEquals(2, partitionCall.size); } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java index d872b64fc6e..e78572f417b 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java @@ -4,6 +4,7 @@ import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.interceptor.api.IInterceptorService; import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider; import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor; +import ca.uhn.fhir.jpa.bulk.IBulkDataExportSvc; import ca.uhn.fhir.jpa.config.TestR4Config; import ca.uhn.fhir.jpa.dao.*; import ca.uhn.fhir.jpa.dao.data.*; @@ -315,6 +316,8 @@ public abstract class BaseJpaR4Test extends BaseJpaTest { private List mySystemInterceptors; @Autowired private DaoRegistry myDaoRegistry; + @Autowired + private IBulkDataExportSvc myBulkDataExportSvc; @After() public void afterCleanupDao() { @@ -376,7 +379,7 @@ public abstract class BaseJpaR4Test extends BaseJpaTest { @Before @Transactional() public void beforePurgeDatabase() { - purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry); + purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry, myBulkDataExportSvc); } @Before diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/ConsentEventsDaoR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/ConsentEventsDaoR4Test.java index 83eae406ace..433b41302df 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/ConsentEventsDaoR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/ConsentEventsDaoR4Test.java @@ -13,7 +13,6 @@ import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails; import ca.uhn.fhir.rest.server.RestfulServer; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; -import com.google.common.collect.Lists; import org.apache.commons.collections4.ListUtils; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; @@ -32,7 +31,6 @@ import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import javax.servlet.ServletException; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; @@ -54,9 +52,9 @@ public class ConsentEventsDaoR4Test extends BaseJpaR4SystemTest { private InterceptorService myInterceptorService; private List myObservationIdsOddOnly; private List myObservationIdsEvenOnly; - private List myObservationIdsEvenOnlyBackwards; - private List myObservationIdsBackwards; + private List myObservationIdsWithVersions; private List myPatientIdsEvenOnly; + private List myObservationIdsEvenOnlyWithVersions; @After public void after() { @@ -313,9 +311,9 @@ public class ConsentEventsDaoR4Test extends BaseJpaR4SystemTest { * Note: Each observation in the observation list will appear twice in the actual * returned results because we create it then update it in create50Observations() */ - assertEquals(sort(myObservationIdsEvenOnlyBackwards.subList(0, 3), myObservationIdsEvenOnlyBackwards.subList(0, 3)), sort(returnedIdValues)); assertEquals(1, hitCount.get()); - assertEquals(sort(myObservationIdsBackwards.subList(0, 5), myObservationIdsBackwards.subList(0, 5)), sort(interceptedResourceIds)); + assertEquals(myObservationIdsWithVersions.subList(90, myObservationIdsWithVersions.size()), sort(interceptedResourceIds)); + assertEquals(myObservationIdsEvenOnlyWithVersions.subList(44, 50), sort(returnedIdValues)); } @@ -349,6 +347,7 @@ public class ConsentEventsDaoR4Test extends BaseJpaR4SystemTest { private void create50Observations() { myPatientIds = new ArrayList<>(); myObservationIds = new ArrayList<>(); + myObservationIdsWithVersions = new ArrayList<>(); Patient p = new Patient(); p.setActive(true); @@ -370,6 +369,7 @@ public class ConsentEventsDaoR4Test extends BaseJpaR4SystemTest { obs1.addIdentifier().setSystem("urn:system").setValue("I" + leftPad("" + i, 5, '0')); IIdType obs1id = myObservationDao.create(obs1).getId().toUnqualifiedVersionless(); myObservationIds.add(obs1id.toUnqualifiedVersionless().getValue()); + myObservationIdsWithVersions.add(obs1id.toUnqualifiedVersionless().getValue()); obs1.setId(obs1id); if (obs1id.getIdPartAsLong() % 2 == 0) { @@ -378,6 +378,8 @@ public class ConsentEventsDaoR4Test extends BaseJpaR4SystemTest { obs1.getSubject().setReference(oddPid); } myObservationDao.update(obs1); + myObservationIdsWithVersions.add(obs1id.toUnqualifiedVersionless().getValue()); + } myPatientIdsEvenOnly = @@ -391,10 +393,13 @@ public class ConsentEventsDaoR4Test extends BaseJpaR4SystemTest { .stream() .filter(t -> Long.parseLong(t.substring(t.indexOf('/') + 1)) % 2 == 0) .collect(Collectors.toList()); + myObservationIdsEvenOnlyWithVersions = + myObservationIdsWithVersions + .stream() + .filter(t -> Long.parseLong(t.substring(t.indexOf('/') + 1)) % 2 == 0) + .collect(Collectors.toList()); myObservationIdsOddOnly = ListUtils.removeAll(myObservationIds, myObservationIdsEvenOnly); - myObservationIdsBackwards = Lists.reverse(myObservationIds); - myObservationIdsEvenOnlyBackwards = Lists.reverse(myObservationIdsEvenOnly); } static class PreAccessInterceptorCounting implements IAnonymousInterceptor { diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CacheWarmingTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CacheWarmingTest.java index 7488bcc4b14..584c813da5e 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CacheWarmingTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CacheWarmingTest.java @@ -78,7 +78,7 @@ public class FhirResourceDaoR4CacheWarmingTest extends BaseJpaR4Test { .setUrl("Patient?name=smith") ); CacheWarmingSvcImpl cacheWarmingSvc = (CacheWarmingSvcImpl) myCacheWarmingSvc; - cacheWarmingSvc.initCacheMap(); + ourLog.info("Have {} tasks", cacheWarmingSvc.initCacheMap().size()); Patient p1 = new Patient(); p1.setId("p1"); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithLuceneDisabledTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithLuceneDisabledTest.java index 0fe82cc86db..7057059ac32 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithLuceneDisabledTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithLuceneDisabledTest.java @@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.dao.r4; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.config.TestR4Config; +import ca.uhn.fhir.jpa.bulk.IBulkDataExportSvc; import ca.uhn.fhir.jpa.config.TestR4WithoutLuceneConfig; import ca.uhn.fhir.jpa.dao.*; import ca.uhn.fhir.jpa.search.ISearchCoordinatorSvc; @@ -37,7 +38,6 @@ import java.util.List; import static org.junit.Assert.*; import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.when; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(classes = {TestR4WithoutLuceneConfig.class}) @@ -110,11 +110,13 @@ public class FhirResourceDaoR4SearchWithLuceneDisabledTest extends BaseJpaTest { private IFhirSystemDao mySystemDao; @Autowired private IResourceReindexingSvc myResourceReindexingSvc; + @Autowired + private IBulkDataExportSvc myBulkDataExportSvc; @Before @Transactional() public void beforePurgeDatabase() { - purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry); + purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry, myBulkDataExportSvc); } @Before diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UniqueSearchParamTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UniqueSearchParamTest.java index 8e621e7f504..6e82e9f4887 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UniqueSearchParamTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4UniqueSearchParamTest.java @@ -696,7 +696,8 @@ public class FhirResourceDaoR4UniqueSearchParamTest extends BaseJpaR4Test { myResourceReindexingSvc.markAllResourcesForReindexing("Observation"); assertEquals(1, myResourceReindexingSvc.forceReindexingPass()); - assertEquals(0, myResourceReindexingSvc.forceReindexingPass()); + myResourceReindexingSvc.forceReindexingPass(); + myResourceReindexingSvc.forceReindexingPass(); assertEquals(0, myResourceReindexingSvc.forceReindexingPass()); uniques = myResourceIndexedCompositeStringUniqueDao.findAll(); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/SearchParamExtractorR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/SearchParamExtractorR4Test.java index 02061893f58..7a3b3e00ab1 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/SearchParamExtractorR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/SearchParamExtractorR4Test.java @@ -48,6 +48,11 @@ public class SearchParamExtractorR4Test { return getActiveSearchParams(theResourceName).get(theParamName); } + @Override + public void refreshCacheIfNecessary() { + // nothing + } + @Override public Map> getActiveSearchParams() { throw new UnsupportedOperationException(); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java index 15c56df0035..0a49e0dc105 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java @@ -4,6 +4,7 @@ import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.interceptor.api.IInterceptorService; import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider; import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor; +import ca.uhn.fhir.jpa.bulk.IBulkDataExportSvc; import ca.uhn.fhir.jpa.config.TestR5Config; import ca.uhn.fhir.jpa.dao.*; import ca.uhn.fhir.jpa.dao.data.*; @@ -315,6 +316,8 @@ public abstract class BaseJpaR5Test extends BaseJpaTest { private List mySystemInterceptors; @Autowired private DaoRegistry myDaoRegistry; + @Autowired + private IBulkDataExportSvc myBulkDataExportSvc; @After() public void afterCleanupDao() { @@ -376,7 +379,7 @@ public abstract class BaseJpaR5Test extends BaseJpaTest { @Before @Transactional() public void beforePurgeDatabase() { - purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry); + purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry, myBulkDataExportSvc); } @Before diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ConsentInterceptorResourceProviderR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ConsentInterceptorResourceProviderR4Test.java index ae4b221e433..005a4fce51f 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ConsentInterceptorResourceProviderR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ConsentInterceptorResourceProviderR4Test.java @@ -4,7 +4,7 @@ import ca.uhn.fhir.jpa.config.BaseConfig; import ca.uhn.fhir.jpa.config.TestR4Config; import ca.uhn.fhir.jpa.dao.DaoConfig; import ca.uhn.fhir.rest.api.Constants; -import ca.uhn.fhir.rest.api.PreferReturnEnum; +import ca.uhn.fhir.rest.api.PreferHeader; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.client.interceptor.CapturingInterceptor; import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; @@ -349,7 +349,7 @@ public class ConsentInterceptorResourceProviderR4Test extends BaseResourceProvid Patient patient = new Patient(); patient.setActive(true); - IIdType id = ourClient.create().resource(patient).prefer(PreferReturnEnum.REPRESENTATION).execute().getId().toUnqualifiedVersionless(); + IIdType id = ourClient.create().resource(patient).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute().getId().toUnqualifiedVersionless(); DelegatingConsentService consentService = new DelegatingConsentService(); myConsentInterceptor = new ConsentInterceptor(consentService, IConsentContextServices.NULL_IMPL); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderOnlySomeResourcesProvidedR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderOnlySomeResourcesProvidedR4Test.java index 142faf093ce..5244c39af22 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderOnlySomeResourcesProvidedR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderOnlySomeResourcesProvidedR4Test.java @@ -2,7 +2,6 @@ package ca.uhn.fhir.jpa.provider.r4; import ca.uhn.fhir.jpa.dao.DaoRegistry; import ca.uhn.fhir.jpa.util.TestUtil; -import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import org.hl7.fhir.r4.model.Patient; import org.hl7.fhir.r4.model.Practitioner; @@ -10,16 +9,18 @@ import org.junit.AfterClass; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; +import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.ContextConfiguration; import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; import static org.hamcrest.CoreMatchers.containsString; -import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; @SuppressWarnings("Duplicates") @ContextConfiguration(classes = {ResourceProviderOnlySomeResourcesProvidedR4Test.OnlySomeResourcesProvidedCtxConfig.class}) +@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS) public class ResourceProviderOnlySomeResourcesProvidedR4Test extends BaseResourceProviderR4Test { @Test @@ -62,6 +63,13 @@ public class ResourceProviderOnlySomeResourcesProvidedR4Test extends BaseResourc } } + + @PreDestroy + public void stop() { + myDaoRegistry.setSupportedResourceTypes(); + } + + } @AfterClass diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java index 44da70e8c45..ee180009174 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java @@ -31,6 +31,7 @@ import com.google.common.collect.Lists; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; +import org.apache.commons.lang3.time.DateUtils; import org.apache.http.NameValuePair; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.*; @@ -491,13 +492,13 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { private void checkParamMissing(String paramName) throws IOException { HttpGet get = new HttpGet(ourServerBase + "/Observation?" + paramName + ":missing=false"); CloseableHttpResponse resp = ourHttpClient.execute(get); - IOUtils.closeQuietly(resp.getEntity().getContent()); + resp.getEntity().getContent().close(); assertEquals(200, resp.getStatusLine().getStatusCode()); } private ArrayList genResourcesOfType(Bundle theRes, Class theClass) { - ArrayList retVal = new ArrayList(); + ArrayList retVal = new ArrayList<>(); for (BundleEntryComponent next : theRes.getEntry()) { if (next.getResource() != null) { if (theClass.isAssignableFrom(next.getResource().getClass())) { @@ -531,14 +532,11 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { ourLog.info("About to perform search for: {}", theUri); - CloseableHttpResponse response = ourHttpClient.execute(get); - try { + try (CloseableHttpResponse response = ourHttpClient.execute(get)) { String resp = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8); ourLog.info(resp); Bundle bundle = myFhirCtx.newXmlParser().parseResource(Bundle.class, resp); ids = toUnqualifiedIdValues(bundle); - } finally { - IOUtils.closeQuietly(response); } return ids; } @@ -547,14 +545,11 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { List ids; HttpGet get = new HttpGet(uri); - CloseableHttpResponse response = ourHttpClient.execute(get); - try { + try (CloseableHttpResponse response = ourHttpClient.execute(get)) { String resp = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8); ourLog.info(resp); Bundle bundle = myFhirCtx.newXmlParser().parseResource(Bundle.class, resp); ids = toUnqualifiedVersionlessIdValues(bundle); - } finally { - IOUtils.closeQuietly(response); } return ids; } @@ -589,7 +584,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { String resBody = IOUtils.toString(ResourceProviderR4Test.class.getResource("/r4/document-father.json"), StandardCharsets.UTF_8); resBody = resBody.replace("\"type\": \"document\"", "\"type\": \"transaction\""); try { - client.create().resource(resBody).execute().getId(); + client.create().resource(resBody).execute(); fail(); } catch (UnprocessableEntityException e) { assertThat(e.getMessage(), containsString("Unable to store a Bundle resource on this server with a Bundle.type value of: transaction")); @@ -688,7 +683,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { .create() .resource(p) .conditionalByUrl("Patient?identifier=foo|bar") - .prefer(PreferReturnEnum.REPRESENTATION) + .prefer(PreferHeader.PreferReturnEnum.REPRESENTATION) .execute(); assertEquals(id.getIdPart(), outcome.getId().getIdPart()); @@ -721,7 +716,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { assertEquals(200, resp.getStatusLine().getStatusCode()); assertEquals(resource.withVersion("2").getValue(), resp.getFirstHeader("Content-Location").getValue()); } finally { - IOUtils.closeQuietly(resp); + resp.close(); } fromDB = ourClient.read().resource(Binary.class).withId(resource.toVersionless()).execute(); @@ -737,7 +732,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { assertEquals(200, resp.getStatusLine().getStatusCode()); assertEquals(resource.withVersion("3").getValue(), resp.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); } finally { - IOUtils.closeQuietly(resp); + resp.close(); } fromDB = ourClient.read().resource(Binary.class).withId(resource.toVersionless()).execute(); @@ -754,7 +749,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { try { assertEquals(400, resp.getStatusLine().getStatusCode()); } finally { - IOUtils.closeQuietly(resp); + resp.close(); } fromDB = ourClient.read().resource(Binary.class).withId(resource.toVersionless()).execute(); @@ -769,7 +764,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { public void testCreateBundle() throws IOException { String input = IOUtils.toString(getClass().getResourceAsStream("/bryn-bundle.json"), StandardCharsets.UTF_8); Validate.notNull(input); - ourClient.create().resource(input).execute().getResource(); + ourClient.create().resource(input).execute(); } @Test @@ -1659,7 +1654,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { b = parser.parseResource(Bundle.class, new InputStreamReader(ResourceProviderR4Test.class.getResourceAsStream("/r4/bug147-bundle.json"))); Bundle resp = ourClient.transaction().withBundle(b).execute(); - List ids = new ArrayList(); + List ids = new ArrayList<>(); for (BundleEntryComponent next : resp.getEntry()) { IdType toAdd = new IdType(next.getResponse().getLocation()).toUnqualifiedVersionless(); ids.add(toAdd); @@ -1673,7 +1668,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { Parameters output = ourClient.operation().onInstance(patientId).named("everything").withNoParameters(Parameters.class).execute(); b = (Bundle) output.getParameter().get(0).getResource(); - ids = new ArrayList(); + ids = new ArrayList<>(); boolean dupes = false; for (BundleEntryComponent next : b.getEntry()) { IdType toAdd = next.getResource().getIdElement().toUnqualifiedVersionless(); @@ -1694,7 +1689,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { Parameters output = ourClient.operation().onInstance(patientId).named("everything").withParameters(input).execute(); b = (Bundle) output.getParameter().get(0).getResource(); - ids = new ArrayList(); + ids = new ArrayList<>(); boolean dupes = false; for (BundleEntryComponent next : b.getEntry()) { IdType toAdd = next.getResource().getIdElement().toUnqualifiedVersionless(); @@ -1760,7 +1755,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { Parameters output = ourClient.operation().onInstance(patientId).named("everything").withNoParameters(Parameters.class).execute(); b = (Bundle) output.getParameter().get(0).getResource(); - List ids = new ArrayList(); + List ids = new ArrayList<>(); for (BundleEntryComponent next : b.getEntry()) { IdType toAdd = next.getResource().getIdElement().toUnqualifiedVersionless(); ids.add(toAdd); @@ -1892,7 +1887,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { try { assertEquals(200, response.getStatusLine().getStatusCode()); String output = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8); - IOUtils.closeQuietly(response.getEntity().getContent()); + response.getEntity().getContent().close(); ourLog.info(output); List ids = toUnqualifiedVersionlessIds(myFhirCtx.newXmlParser().parseResource(Bundle.class, output)); ourLog.info(ids.toString()); @@ -1907,7 +1902,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { try { assertEquals(200, response.getStatusLine().getStatusCode()); String output = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8); - IOUtils.closeQuietly(response.getEntity().getContent()); + response.getEntity().getContent().close(); ourLog.info(output); List ids = toUnqualifiedVersionlessIds(myFhirCtx.newXmlParser().parseResource(Bundle.class, output)); ourLog.info(ids.toString()); @@ -1926,7 +1921,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { // try { // assertEquals(200, response.getStatusLine().getStatusCode()); // String output = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8); - // IOUtils.closeQuietly(response.getEntity().getContent()); + // response.getEntity().getContent().close(); // ourLog.info(output); // List ids = toUnqualifiedVersionlessIds(myFhirCtx.newXmlParser().parseResource(Bundle.class, output)); // ourLog.info(ids.toString()); @@ -1940,7 +1935,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { // try { // assertEquals(200, response.getStatusLine().getStatusCode()); // String output = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8); - // IOUtils.closeQuietly(response.getEntity().getContent()); + // response.getEntity().getContent().close(); // ourLog.info(output); // List ids = toUnqualifiedVersionlessIds(myFhirCtx.newXmlParser().parseResource(Bundle.class, output)); // ourLog.info(ids.toString()); @@ -1964,7 +1959,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { assertEquals(53, inputBundle.getEntry().size()); - Set allIds = new TreeSet(); + Set allIds = new TreeSet<>(); for (BundleEntryComponent nextEntry : inputBundle.getEntry()) { nextEntry.getRequest().setMethod(HTTPVerb.PUT); nextEntry.getRequest().setUrl(nextEntry.getResource().getId()); @@ -1986,7 +1981,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { ourLog.info(myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(responseBundle)); - List ids = new ArrayList(); + List ids = new ArrayList<>(); for (BundleEntryComponent nextEntry : responseBundle.getEntry()) { ids.add(nextEntry.getResource().getIdElement().toUnqualifiedVersionless().getValue()); } @@ -1995,7 +1990,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { assertThat(responseBundle.getEntry().size(), lessThanOrEqualTo(25)); - TreeSet idsSet = new TreeSet(); + TreeSet idsSet = new TreeSet<>(); for (int i = 0; i < responseBundle.getEntry().size(); i++) { for (BundleEntryComponent nextEntry : responseBundle.getEntry()) { idsSet.add(nextEntry.getResource().getIdElement().toUnqualifiedVersionless().getValue()); @@ -2118,13 +2113,10 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { HttpPost post = new HttpPost(ourServerBase + "/Patient/" + JpaConstants.OPERATION_VALIDATE); post.setEntity(new StringEntity(input, ContentType.APPLICATION_JSON)); - CloseableHttpResponse resp = ourHttpClient.execute(post); - try { + try (CloseableHttpResponse resp = ourHttpClient.execute(post)) { String respString = IOUtils.toString(resp.getEntity().getContent(), Charsets.UTF_8); ourLog.info(respString); assertEquals(200, resp.getStatusLine().getStatusCode()); - } finally { - IOUtils.closeQuietly(resp); } } @@ -2144,14 +2136,11 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { HttpPost post = new HttpPost(ourServerBase + "/Patient/$validate"); post.setEntity(new StringEntity(input, ContentType.APPLICATION_JSON)); - CloseableHttpResponse resp = ourHttpClient.execute(post); - try { + try (CloseableHttpResponse resp = ourHttpClient.execute(post)) { String respString = IOUtils.toString(resp.getEntity().getContent(), Charsets.UTF_8); ourLog.info(respString); assertThat(respString, containsString("Unknown extension http://hl7.org/fhir/ValueSet/v3-ActInvoiceGroupCode")); assertEquals(200, resp.getStatusLine().getStatusCode()); - } finally { - IOUtils.closeQuietly(resp); } } finally { ourRestServer.unregisterInterceptor(interceptor); @@ -2247,15 +2236,12 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { myResourceCountsCache.update(); HttpGet get = new HttpGet(ourServerBase + "/$get-resource-counts"); - CloseableHttpResponse response = ourHttpClient.execute(get); - try { + try (CloseableHttpResponse response = ourHttpClient.execute(get)) { assertEquals(200, response.getStatusLine().getStatusCode()); String output = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8); - IOUtils.closeQuietly(response.getEntity().getContent()); + response.getEntity().getContent().close(); ourLog.info(output); assertThat(output, containsString(" resources = new ArrayList(); + for (int i = 0; i < 50; i++) { + Organization org = new Organization(); + org.setName("HELLO"); + resources.add(org); + } + ourClient.transaction().withResources(resources).prettyPrint().encodedXml().execute(); + + /* + * First, make sure that we don't reuse a search if + * it's not marked with an expiry + */ + { + myDaoConfig.setReuseCachedSearchResultsForMillis(10L); + Bundle result1 = ourClient + .search() + .forResource("Organization") + .returnBundle(Bundle.class) + .execute(); + final String uuid1 = toSearchUuidFromLinkNext(result1); + sleepOneClick(); + Bundle result2 = ourClient + .search() + .forResource("Organization") + .returnBundle(Bundle.class) + .execute(); + final String uuid2 = toSearchUuidFromLinkNext(result2); + assertNotEquals(uuid1, uuid2); + } + + /* + * Now try one but mark it with an expiry time + * in the future + */ + { + myDaoConfig.setReuseCachedSearchResultsForMillis(1000L); + Bundle result1 = ourClient + .search() + .forResource("Organization") + .returnBundle(Bundle.class) + .execute(); + final String uuid1 = toSearchUuidFromLinkNext(result1); + runInTransaction(() -> { + Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid1).orElseThrow(()->new IllegalStateException()); + search.setExpiryOrNull(DateUtils.addSeconds(new Date(), -2)); + mySearchEntityDao.save(search); + }); + sleepOneClick(); + Bundle result2 = ourClient + .search() + .forResource("Organization") + .returnBundle(Bundle.class) + .execute(); + + // Expiry doesn't affect reusablility + final String uuid2 = toSearchUuidFromLinkNext(result2); + assertEquals(uuid1, uuid2); + + } + } + + @Test public void testSearchReusesResultsDisabled() { List resources = new ArrayList<>(); @@ -4995,13 +5028,13 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { String encoded = myFhirCtx.newJsonParser().encodeResourceToString(p); HttpPut put = new HttpPut(ourServerBase + "/Patient/A"); - put.setEntity(new StringEntity(encoded, "application/fhir+json", "UTF-8")); + put.setEntity(new StringEntity(encoded, ContentType.create("application/fhir+json", "UTF-8"))); CloseableHttpResponse response = ourHttpClient.execute(put); try { assertEquals(201, response.getStatusLine().getStatusCode()); } finally { - IOUtils.closeQuietly(response); + response.close(); } p = new Patient(); @@ -5010,13 +5043,13 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { encoded = myFhirCtx.newJsonParser().encodeResourceToString(p); put = new HttpPut(ourServerBase + "/Patient/A"); - put.setEntity(new StringEntity(encoded, "application/fhir+json", "UTF-8")); + put.setEntity(new StringEntity(encoded, ContentType.create("application/fhir+json", "UTF-8"))); response = ourHttpClient.execute(put); try { assertEquals(200, response.getStatusLine().getStatusCode()); } finally { - IOUtils.closeQuietly(response); + response.close(); } } @@ -5100,8 +5133,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { HttpPut post = new HttpPut(ourServerBase + "/Patient/A2"); post.setEntity(new StringEntity(resource, ContentType.create(Constants.CT_FHIR_XML, "UTF-8"))); - CloseableHttpResponse response = ourHttpClient.execute(post); - try { + try (CloseableHttpResponse response = ourHttpClient.execute(post)) { String responseString = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8); ourLog.info(responseString); assertEquals(400, response.getStatusLine().getStatusCode()); @@ -5109,8 +5141,6 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { assertEquals( "Can not update resource, resource body must contain an ID element which matches the request URL for update (PUT) operation - Resource body ID of \"333\" does not match URL ID of \"A2\"", oo.getIssue().get(0).getDiagnostics()); - } finally { - response.close(); } } @@ -5119,7 +5149,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { String input = IOUtils.toString(getClass().getResourceAsStream("/dstu3-person.json"), StandardCharsets.UTF_8); try { - MethodOutcome resp = ourClient.update().resource(input).withId("Patient/PERSON1").execute(); + ourClient.update().resource(input).withId("Patient/PERSON1").execute(); } catch (InvalidRequestException e) { assertEquals("", e.getMessage()); } @@ -5139,7 +5169,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { assertThat(resp, containsString("No resource supplied for $validate operation (resource is required unless mode is "delete")")); assertEquals(400, response.getStatusLine().getStatusCode()); } finally { - IOUtils.closeQuietly(response.getEntity().getContent()); + response.getEntity().getContent().close(); response.close(); } } @@ -5163,7 +5193,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { assertThat(resp, containsString("No resource supplied for $validate operation (resource is required unless mode is "delete")")); assertEquals(400, response.getStatusLine().getStatusCode()); } finally { - IOUtils.closeQuietly(response.getEntity().getContent()); + response.getEntity().getContent().close(); response.close(); } } @@ -5189,7 +5219,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { assertThat(resp, stringContainsInOrder(">ERROR<", "[Patient.contact[0]]", "
SHALL at least contain a contact's details or a reference to an organization", ""));
 		} finally {
-			IOUtils.closeQuietly(response.getEntity().getContent());
+			response.getEntity().getContent().close();
 			response.close();
 		}
 	}
@@ -5217,7 +5247,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
 			ourLog.info(resp);
 			assertEquals(200, response.getStatusLine().getStatusCode());
 		} finally {
-			IOUtils.closeQuietly(response.getEntity().getContent());
+			response.getEntity().getContent().close();
 			response.close();
 		}
 	}
@@ -5241,7 +5271,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
 			assertEquals(412, response.getStatusLine().getStatusCode());
 			assertThat(resp, containsString("SHALL at least contain a contact's details or a reference to an organization"));
 		} finally {
-			IOUtils.closeQuietly(response.getEntity().getContent());
+			response.getEntity().getContent().close();
 			response.close();
 		}
 	}
@@ -5270,7 +5300,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
 			ourLog.info(resp);
 			assertEquals(200, response.getStatusLine().getStatusCode());
 		} finally {
-			IOUtils.closeQuietly(response.getEntity().getContent());
+			response.getEntity().getContent().close();
 			response.close();
 		}
 	}
@@ -5298,7 +5328,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
 			assertThat(resp, not(containsString("warn")));
 			assertThat(resp, not(containsString("error")));
 		} finally {
-			IOUtils.closeQuietly(response.getEntity().getContent());
+			response.getEntity().getContent().close();
 			response.close();
 		}
 	}
@@ -5325,7 +5355,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
 				stringContainsInOrder("", "", "", "",
 					""));
 		} finally {
-			IOUtils.closeQuietly(response.getEntity().getContent());
+			response.getEntity().getContent().close();
 			response.close();
 		}
 	}
@@ -5358,7 +5388,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
 			assertThat(resp, containsString(""));
 			assertThat(resp, containsString(""));
 		} finally {
-			IOUtils.closeQuietly(response.getEntity().getContent());
+			response.getEntity().getContent().close();
 			response.close();
 		}
 
@@ -5378,7 +5408,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
 				""));
 			//@formatter:on
 		} finally {
-			IOUtils.closeQuietly(response.getEntity().getContent());
+			response.getEntity().getContent().close();
 			response.close();
 		}
 
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/StaleSearchDeletingSvcR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/StaleSearchDeletingSvcR4Test.java
index f2429ccf050..f20f42fe527 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/StaleSearchDeletingSvcR4Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/StaleSearchDeletingSvcR4Test.java
@@ -9,6 +9,7 @@ import ca.uhn.fhir.jpa.entity.SearchResult;
 import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
 import ca.uhn.fhir.jpa.model.entity.ResourceTable;
 import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
+import ca.uhn.fhir.jpa.search.StaleSearchDeletingSvcImpl;
 import ca.uhn.fhir.jpa.search.cache.DatabaseSearchCacheSvcImpl;
 import ca.uhn.fhir.rest.gclient.IClientExecutable;
 import ca.uhn.fhir.rest.gclient.IQuery;
@@ -28,6 +29,7 @@ import org.springframework.test.util.AopTestUtils;
 import java.util.Date;
 import java.util.UUID;
 
+import static ca.uhn.fhir.jpa.util.TestUtil.sleepAtLeast;
 import static org.hamcrest.Matchers.*;
 import static org.junit.Assert.*;
 
@@ -68,13 +70,11 @@ public class StaleSearchDeletingSvcR4Test extends BaseResourceProviderR4Test {
 			myPatientDao.create(pt1, mySrd).getId().toUnqualifiedVersionless();
 		}
 
-		//@formatter:off
 		IClientExecutable, Bundle> search = ourClient
 			.search()
 			.forResource(Patient.class)
 			.where(Patient.NAME.matches().value("Everything"))
 			.returnBundle(Bundle.class);
-		//@formatter:on
 
 		Bundle resp1 = search.execute();
 
@@ -172,6 +172,40 @@ public class StaleSearchDeletingSvcR4Test extends BaseResourceProviderR4Test {
 
 	}
 
+	@Test
+	public void testDontDeleteSearchBeforeExpiry() {
+		DatabaseSearchCacheSvcImpl.setMaximumResultsToDeleteForUnitTest(10);
+
+		runInTransaction(() -> {
+			Search search = new Search();
+
+			// Expires in one second, so it should not be deleted right away,
+			// but it should be deleted if we try again after one second...
+			search.setExpiryOrNull(DateUtils.addMilliseconds(new Date(), 1000));
+
+			search.setStatus(SearchStatusEnum.FINISHED);
+			search.setUuid(UUID.randomUUID().toString());
+			search.setCreated(DateUtils.addDays(new Date(), -10000));
+			search.setSearchType(SearchTypeEnum.SEARCH);
+			search.setResourceType("Patient");
+			search.setSearchLastReturned(DateUtils.addDays(new Date(), -10000));
+			search = mySearchEntityDao.save(search);
+
+		});
+
+		// Should not delete right now
+		assertEquals(1, mySearchEntityDao.count());
+		myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
+		assertEquals(1, mySearchEntityDao.count());
+
+		sleepAtLeast(1100);
+
+		// Now it's time to delete
+		myStaleSearchDeletingSvc.pollForStaleSearchesAndDeleteThem();
+		assertEquals(0, mySearchEntityDao.count());
+
+	}
+
 	@AfterClass
 	public static void afterClassClearContext() {
 		TestUtil.clearAllStaticFieldsForUnitTest();
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/sched/SchedulerServiceImplTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/sched/SchedulerServiceImplTest.java
new file mode 100644
index 00000000000..8409ea0c049
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/sched/SchedulerServiceImplTest.java
@@ -0,0 +1,207 @@
+package ca.uhn.fhir.jpa.sched;
+
+import ca.uhn.fhir.jpa.model.sched.FireAtIntervalJob;
+import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
+import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.quartz.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.BeanUtils;
+import org.springframework.beans.BeansException;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.context.ApplicationContext;
+import org.springframework.context.ApplicationContextAware;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.data.util.ProxyUtils;
+import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
+import org.springframework.test.util.AopTestUtils;
+
+import static ca.uhn.fhir.jpa.util.TestUtil.sleepAtLeast;
+import static org.hamcrest.Matchers.*;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.fail;
+
+@ContextConfiguration(classes = SchedulerServiceImplTest.TestConfiguration.class)
+@RunWith(SpringJUnit4ClassRunner.class)
+public class SchedulerServiceImplTest {
+
+	private static final Logger ourLog = LoggerFactory.getLogger(SchedulerServiceImplTest.class);
+
+	@Autowired
+	private ISchedulerService mySvc;
+	private static long ourTaskDelay;
+
+	@Before
+	public void before() {
+		ourTaskDelay = 0;
+	}
+
+	@Test
+	public void testScheduleTask() {
+
+		ScheduledJobDefinition def = new ScheduledJobDefinition()
+			.setId(CountingJob.class.getName())
+			.setJobClass(CountingJob.class);
+
+		mySvc.scheduleFixedDelay(100, false, def);
+
+		sleepAtLeast(1000);
+
+		ourLog.info("Fired {} times", CountingJob.ourCount);
+
+		assertThat(CountingJob.ourCount, greaterThan(3));
+		assertThat(CountingJob.ourCount, lessThan(20));
+	}
+
+	@Test
+	public void testStopAndStartService() throws SchedulerException {
+
+		ScheduledJobDefinition def = new ScheduledJobDefinition()
+			.setId(CountingJob.class.getName())
+			.setJobClass(CountingJob.class);
+
+		SchedulerServiceImpl svc = AopTestUtils.getTargetObject(mySvc);
+		svc.stop();
+		svc.start();
+		svc.contextStarted(null);
+
+		mySvc.scheduleFixedDelay(100, false, def);
+
+		sleepAtLeast(1000);
+
+		ourLog.info("Fired {} times", CountingJob.ourCount);
+
+		assertThat(CountingJob.ourCount, greaterThan(3));
+		assertThat(CountingJob.ourCount, lessThan(20));
+	}
+
+	@Test
+	public void testScheduleTaskLongRunningDoesntRunConcurrently() {
+
+		ScheduledJobDefinition def = new ScheduledJobDefinition()
+			.setId(CountingJob.class.getName())
+			.setJobClass(CountingJob.class);
+		ourTaskDelay = 500;
+
+		mySvc.scheduleFixedDelay(100, false, def);
+
+		sleepAtLeast(1000);
+
+		ourLog.info("Fired {} times", CountingJob.ourCount);
+
+		assertThat(CountingJob.ourCount, greaterThanOrEqualTo(1));
+		assertThat(CountingJob.ourCount, lessThan(5));
+	}
+
+	@Test
+	public void testIntervalJob() {
+
+		ScheduledJobDefinition def = new ScheduledJobDefinition()
+			.setId(CountingIntervalJob.class.getName())
+			.setJobClass(CountingIntervalJob.class);
+		ourTaskDelay = 500;
+
+		mySvc.scheduleFixedDelay(100, false, def);
+
+		sleepAtLeast(2000);
+
+		ourLog.info("Fired {} times", CountingIntervalJob.ourCount);
+
+		assertThat(CountingIntervalJob.ourCount, greaterThanOrEqualTo(2));
+		assertThat(CountingIntervalJob.ourCount, lessThan(6));
+	}
+
+	@After
+	public void after() throws SchedulerException {
+		CountingJob.ourCount = 0;
+		CountingIntervalJob.ourCount = 0;
+		mySvc.purgeAllScheduledJobsForUnitTest();
+	}
+
+	@DisallowConcurrentExecution
+	public static class CountingJob implements Job, ApplicationContextAware {
+
+		private static int ourCount;
+
+		@Autowired
+		@Qualifier("stringBean")
+		private String myStringBean;
+		private ApplicationContext myAppCtx;
+
+		@Override
+		public void execute(JobExecutionContext theContext) {
+			if (!"String beans are good.".equals(myStringBean)) {
+				fail("Did not autowire stringBean correctly, found: " + myStringBean);
+			}
+			if (myAppCtx == null) {
+				fail("Did not populate appctx");
+			}
+			if (ourTaskDelay > 0) {
+				ourLog.info("Job has fired, going to sleep for {}ms", ourTaskDelay);
+				sleepAtLeast(ourTaskDelay);
+				ourLog.info("Done sleeping");
+			} else {
+				ourLog.info("Job has fired...");
+			}
+			ourCount++;
+		}
+
+		@Override
+		public void setApplicationContext(ApplicationContext theAppCtx) throws BeansException {
+			myAppCtx = theAppCtx;
+		}
+	}
+
+
+	@DisallowConcurrentExecution
+	@PersistJobDataAfterExecution
+	public static class CountingIntervalJob extends FireAtIntervalJob {
+
+		private static int ourCount;
+
+		@Autowired
+		@Qualifier("stringBean")
+		private String myStringBean;
+		private ApplicationContext myAppCtx;
+
+		public CountingIntervalJob() {
+			super(500);
+		}
+
+		@Override
+		public void doExecute(JobExecutionContext theContext) {
+				ourLog.info("Job has fired, going to sleep for {}ms", ourTaskDelay);
+				sleepAtLeast(ourTaskDelay);
+			ourCount++;
+		}
+
+	}
+
+
+	@Configuration
+	public static class TestConfiguration {
+
+		@Bean
+		public ISchedulerService schedulerService() {
+			return new SchedulerServiceImpl();
+		}
+
+		@Bean
+		public String stringBean() {
+			return "String beans are good.";
+		}
+
+		@Bean
+		public AutowiringSpringBeanJobFactory springBeanJobFactory() {
+			return new AutowiringSpringBeanJobFactory();
+		}
+
+	}
+}
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/SubscriptionTestUtil.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/SubscriptionTestUtil.java
index 55378ac0fc6..cac0a7fd5ac 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/SubscriptionTestUtil.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/SubscriptionTestUtil.java
@@ -80,7 +80,4 @@ public class SubscriptionTestUtil {
 		subscriber.setEmailSender(myEmailSender);
 	}
 
-	public IEmailSender getEmailSender() {
-		return myEmailSender;
-	}
 }
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/email/EmailSubscriptionDstu2Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/email/EmailSubscriptionDstu2Test.java
index 86581c8194a..a83d6d87533 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/email/EmailSubscriptionDstu2Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/email/EmailSubscriptionDstu2Test.java
@@ -39,6 +39,7 @@ public class EmailSubscriptionDstu2Test extends BaseResourceProviderDstu2Test {
 	@Autowired
 	private SubscriptionTestUtil mySubscriptionTestUtil;
 
+	@Override
 	@After
 	public void after() throws Exception {
 		ourLog.info("** AFTER **");
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/module/matcher/InMemorySubscriptionMatcherR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/module/matcher/InMemorySubscriptionMatcherR4Test.java
index d3626e7d691..b3f0d8c839c 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/module/matcher/InMemorySubscriptionMatcherR4Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/module/matcher/InMemorySubscriptionMatcherR4Test.java
@@ -408,6 +408,19 @@ public class InMemorySubscriptionMatcherR4Test {
 		}
 	}
 
+	@Test
+	public void testReferenceAlias() {
+		Observation obs = new Observation();
+		obs.setId("Observation/123");
+		obs.getSubject().setReference("Patient/123");
+
+		SearchParameterMap params;
+
+		params = new SearchParameterMap();
+		params.add(Observation.SP_PATIENT, new ReferenceParam("Patient/123"));
+		assertMatched(obs, params);
+	}
+
 	@Test
 	public void testSearchResourceReferenceOnlyCorrectPath() {
 		Organization org = new Organization();
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/resthook/SubscriptionTriggeringDstu3Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/resthook/SubscriptionTriggeringDstu3Test.java
index e571c931eb9..a0c1684a7b4 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/resthook/SubscriptionTriggeringDstu3Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/resthook/SubscriptionTriggeringDstu3Test.java
@@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.subscription.resthook;
 
 import ca.uhn.fhir.context.FhirContext;
 import ca.uhn.fhir.jpa.dao.DaoConfig;
+import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
 import ca.uhn.fhir.jpa.provider.SubscriptionTriggeringProvider;
 import ca.uhn.fhir.jpa.provider.dstu3.BaseResourceProviderDstu3Test;
 import ca.uhn.fhir.jpa.subscription.SubscriptionTestUtil;
@@ -55,6 +56,10 @@ public class SubscriptionTriggeringDstu3Test extends BaseResourceProviderDstu3Te
 
 	@Autowired
 	private SubscriptionTestUtil mySubscriptionTestUtil;
+	@Autowired
+	private SubscriptionTriggeringSvcImpl mySubscriptionTriggeringSvc;
+	@Autowired
+	private ISchedulerService mySchedulerService;
 
 	@After
 	public void afterUnregisterRestHookListener() {
@@ -80,9 +85,6 @@ public class SubscriptionTriggeringDstu3Test extends BaseResourceProviderDstu3Te
 		myDaoConfig.setSearchPreFetchThresholds(new DaoConfig().getSearchPreFetchThresholds());
 	}
 
-	@Autowired
-	private SubscriptionTriggeringSvcImpl mySubscriptionTriggeringSvc;
-
 	@Before
 	public void beforeRegisterRestHookListener() {
 		mySubscriptionTestUtil.registerRestHookInterceptor();
@@ -98,6 +100,8 @@ public class SubscriptionTriggeringDstu3Test extends BaseResourceProviderDstu3Te
 		ourCreatedPatients.clear();
 		ourUpdatedPatients.clear();
 		ourContentTypes.clear();
+
+		mySchedulerService.logStatus();
 	}
 
 	private Subscription createSubscription(String theCriteria, String thePayload, String theEndpoint) throws InterruptedException {
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/util/SingleItemLoadingCacheTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/util/ResourceCountCacheTest.java
similarity index 55%
rename from hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/util/SingleItemLoadingCacheTest.java
rename to hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/util/ResourceCountCacheTest.java
index 47b62570f38..d18606d5458 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/util/SingleItemLoadingCacheTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/util/ResourceCountCacheTest.java
@@ -1,6 +1,5 @@
 package ca.uhn.fhir.jpa.util;
 
-import org.hl7.fhir.dstu3.model.CapabilityStatement;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -8,29 +7,31 @@ import org.junit.runner.RunWith;
 import org.mockito.Mock;
 import org.mockito.junit.MockitoJUnitRunner;
 
+import java.util.HashMap;
+import java.util.Map;
 import java.util.concurrent.Callable;
-import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicLong;
 
 import static org.junit.Assert.assertEquals;
 import static org.mockito.Mockito.when;
 
 @RunWith(MockitoJUnitRunner.class)
-public class SingleItemLoadingCacheTest {
+public class ResourceCountCacheTest {
 
 	@Mock
-	private Callable myFetcher;
+	private Callable> myFetcher;
 
 	@After
 	public void after() {
-		SingleItemLoadingCache.setNowForUnitTest(null);
+		ResourceCountCache.setNowForUnitTest(null);
 	}
 
 	@Before
 	public void before() throws Exception {
-		AtomicInteger id = new AtomicInteger();
+		AtomicLong id = new AtomicLong();
 		when(myFetcher.call()).thenAnswer(t->{
-			CapabilityStatement retVal = new CapabilityStatement();
-			retVal.setId("" + id.incrementAndGet());
+			Map retVal = new HashMap<>();
+			retVal.put("A", id.incrementAndGet());
 			return retVal;
 		});
 	}
@@ -38,36 +39,36 @@ public class SingleItemLoadingCacheTest {
 	@Test
 	public void testCache() {
 		long start = System.currentTimeMillis();
-		SingleItemLoadingCache.setNowForUnitTest(start);
+		ResourceCountCache.setNowForUnitTest(start);
 
 		// Cache is initialized on startup
-		SingleItemLoadingCache cache = new SingleItemLoadingCache<>(myFetcher);
+		ResourceCountCache cache = new ResourceCountCache(myFetcher);
 		cache.setCacheMillis(500);
 		assertEquals(null, cache.get());
 
 		// Not time to update yet
 		cache.update();
-		assertEquals("1", cache.get().getId());
+		assertEquals(Long.valueOf(1), cache.get().get("A"));
 
 		// Wait a bit, still not time to update
-		SingleItemLoadingCache.setNowForUnitTest(start + 400);
+		ResourceCountCache.setNowForUnitTest(start + 400);
 		cache.update();
-		assertEquals("1", cache.get().getId());
+		assertEquals(Long.valueOf(1), cache.get().get("A"));
 
 		// Wait a bit more and the cache is expired
-		SingleItemLoadingCache.setNowForUnitTest(start + 800);
+		ResourceCountCache.setNowForUnitTest(start + 800);
 		cache.update();
-		assertEquals("2", cache.get().getId());
+		assertEquals(Long.valueOf(2), cache.get().get("A"));
 
 	}
 
 	@Test
 	public void testCacheWithLoadingDisabled() {
 		long start = System.currentTimeMillis();
-		SingleItemLoadingCache.setNowForUnitTest(start);
+		ResourceCountCache.setNowForUnitTest(start);
 
 		// Cache of 0 means "never load"
-		SingleItemLoadingCache cache = new SingleItemLoadingCache<>(myFetcher);
+		ResourceCountCache cache = new ResourceCountCache(myFetcher);
 		cache.setCacheMillis(0);
 
 		/*
@@ -79,11 +80,11 @@ public class SingleItemLoadingCacheTest {
 		cache.update();
 		assertEquals(null, cache.get());
 
-		SingleItemLoadingCache.setNowForUnitTest(start + 400);
+		ResourceCountCache.setNowForUnitTest(start + 400);
 		cache.update();
 		assertEquals(null, cache.get());
 
-		SingleItemLoadingCache.setNowForUnitTest(start + 80000);
+		ResourceCountCache.setNowForUnitTest(start + 80000);
 		cache.update();
 		assertEquals(null, cache.get());
 
diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
index f45e4bd1eb0..8ed677d751c 100644
--- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
+++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
@@ -109,6 +109,16 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
 			.addIndex("IDX_VS_CONCEPT_ORDER")
 			.unique(true)
 			.withColumns("VALUESET_PID", "VALUESET_ORDER");
+
+
+		// Account for RESTYPE_LEN column increasing from 30 to 35
+		version.onTable("HFJ_RESOURCE").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 35);
+		version.onTable("HFJ_HISTORY_TAG").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 35);
+		version.onTable("HFJ_RES_LINK").modifyColumn("SOURCE_RESOURCE_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 35);
+		version.onTable("HFJ_RES_LINK").modifyColumn("TARGET_RESOURCE_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 35);
+		version.onTable("HFJ_RES_TAG").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 35);
+
+
 	}
 
 	protected void init400() {
diff --git a/hapi-fhir-jpaserver-model/pom.xml b/hapi-fhir-jpaserver-model/pom.xml
index 8ae1cc7e7f8..d05f0d7e70f 100644
--- a/hapi-fhir-jpaserver-model/pom.xml
+++ b/hapi-fhir-jpaserver-model/pom.xml
@@ -112,6 +112,11 @@
 			commons-collections4
 		
 
+		
+			org.quartz-scheduler
+			quartz
+		
+
 		
 		
 			javax.annotation
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java
index ff61a5849b0..898a59c2903 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java
@@ -47,7 +47,7 @@ import static org.apache.commons.lang3.StringUtils.defaultString;
 	@Index(name = "IDX_INDEXSTATUS", columnList = "SP_INDEX_STATUS")
 })
 public class ResourceTable extends BaseHasResource implements Serializable {
-	static final int RESTYPE_LEN = 30;
+	public static final int RESTYPE_LEN = 35;
 	private static final int MAX_LANGUAGE_LENGTH = 20;
 	private static final int MAX_PROFILE_LENGTH = 200;
 	private static final long serialVersionUID = 1L;
@@ -199,7 +199,7 @@ public class ResourceTable extends BaseHasResource implements Serializable {
 	@OneToMany(mappedBy = "myTargetResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
 	@OptimisticLock(excluded = true)
 	private Collection myResourceLinksAsTarget;
-	@Column(name = "RES_TYPE", length = RESTYPE_LEN)
+	@Column(name = "RES_TYPE", length = RESTYPE_LEN, nullable = false)
 	@Field
 	@OptimisticLock(excluded = true)
 	private String myResourceType;
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/sched/FireAtIntervalJob.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/sched/FireAtIntervalJob.java
new file mode 100644
index 00000000000..dd085302555
--- /dev/null
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/sched/FireAtIntervalJob.java
@@ -0,0 +1,45 @@
+package ca.uhn.fhir.jpa.model.sched;
+
+import org.quartz.DisallowConcurrentExecution;
+import org.quartz.Job;
+import org.quartz.JobExecutionContext;
+import org.quartz.PersistJobDataAfterExecution;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@DisallowConcurrentExecution
+@PersistJobDataAfterExecution
+public abstract class FireAtIntervalJob implements Job {
+
+	public static final String NEXT_EXECUTION_TIME = "NEXT_EXECUTION_TIME";
+	private static final Logger ourLog = LoggerFactory.getLogger(FireAtIntervalJob.class);
+	private final long myMillisBetweenExecutions;
+
+	public FireAtIntervalJob(long theMillisBetweenExecutions) {
+		myMillisBetweenExecutions = theMillisBetweenExecutions;
+	}
+
+	@Override
+	public final void execute(JobExecutionContext theContext) {
+		Long nextExecution = (Long) theContext.getJobDetail().getJobDataMap().get(NEXT_EXECUTION_TIME);
+
+		if (nextExecution != null) {
+			long cutoff = System.currentTimeMillis();
+			if (nextExecution >= cutoff) {
+				return;
+			}
+		}
+
+		try {
+			doExecute(theContext);
+		} catch (Throwable t) {
+			ourLog.error("Job threw uncaught exception", t);
+		} finally {
+			long newNextExecution = System.currentTimeMillis() + myMillisBetweenExecutions;
+			theContext.getJobDetail().getJobDataMap().put(NEXT_EXECUTION_TIME, newNextExecution);
+		}
+	}
+
+	protected abstract void doExecute(JobExecutionContext theContext);
+
+}
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/sched/ISchedulerService.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/sched/ISchedulerService.java
new file mode 100644
index 00000000000..f7ee8f8d99a
--- /dev/null
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/sched/ISchedulerService.java
@@ -0,0 +1,21 @@
+package ca.uhn.fhir.jpa.model.sched;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.quartz.SchedulerException;
+
+public interface ISchedulerService {
+
+	@VisibleForTesting
+	void purgeAllScheduledJobsForUnitTest() throws SchedulerException;
+
+	void logStatus();
+
+	/**
+	 * @param theIntervalMillis How many milliseconds between passes should this job run
+	 * @param theClusteredTask  If true, only one instance of this task will fire across the whole cluster (when running in a clustered environment). If false, or if not running in a clustered environment, this task will execute locally (and should execute on all nodes of the cluster)
+	 * @param theJobDefinition  The Job to fire
+	 */
+	void scheduleFixedDelay(long theIntervalMillis, boolean theClusteredTask, ScheduledJobDefinition theJobDefinition);
+
+	boolean isStopping();
+}
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/sched/ScheduledJobDefinition.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/sched/ScheduledJobDefinition.java
new file mode 100644
index 00000000000..f1ed0defaa7
--- /dev/null
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/sched/ScheduledJobDefinition.java
@@ -0,0 +1,51 @@
+package ca.uhn.fhir.jpa.model.sched;
+
+import org.apache.commons.lang3.Validate;
+import org.quartz.Job;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+public class ScheduledJobDefinition {
+
+
+	private Class myJobClass;
+	private String myId;
+	private Map myJobData;
+
+	public Map getJobData() {
+		Map retVal = myJobData;
+		if (retVal == null) {
+			retVal = Collections.emptyMap();
+		}
+		return Collections.unmodifiableMap(retVal);
+	}
+
+	public Class getJobClass() {
+		return myJobClass;
+	}
+
+	public ScheduledJobDefinition setJobClass(Class theJobClass) {
+		myJobClass = theJobClass;
+		return this;
+	}
+
+	public String getId() {
+		return myId;
+	}
+
+	public ScheduledJobDefinition setId(String theId) {
+		myId = theId;
+		return this;
+	}
+
+	public void addJobData(String thePropertyName, String thePropertyValue) {
+		Validate.notBlank(thePropertyName);
+		if (myJobData == null) {
+			myJobData = new HashMap<>();
+		}
+		Validate.isTrue(myJobData.containsKey(thePropertyName) == false);
+		myJobData.put(thePropertyName, thePropertyValue);
+	}
+}
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java
index a85e709bd36..e34a6624c0a 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java
@@ -174,6 +174,16 @@ public class JpaConstants {
 	 */
 	public static final String OPERATION_UPLOAD_EXTERNAL_CODE_SYSTEM = "$upload-external-code-system";
 
+	/**
+	 * Operation name for the "$export" operation
+	 */
+	public static final String OPERATION_EXPORT = "$export";
+
+	/**
+	 * Operation name for the "$export-poll-status" operation
+	 */
+	public static final String OPERATION_EXPORT_POLL_STATUS = "$export-poll-status";
+
 	/**
 	 * 

* This extension should be of type string and should be @@ -238,5 +248,28 @@ public class JpaConstants { */ public static final String EXT_META_SOURCE = "http://hapifhir.io/fhir/StructureDefinition/resource-meta-source"; + /** + * Parameter for the $export operation + */ + public static final String PARAM_EXPORT_POLL_STATUS_JOB_ID = "_jobId"; + /** + * Parameter for the $export operation + */ + public static final String PARAM_EXPORT_OUTPUT_FORMAT = "_outputFormat"; + + /** + * Parameter for the $export operation + */ + public static final String PARAM_EXPORT_TYPE = "_type"; + + /** + * Parameter for the $export operation + */ + public static final String PARAM_EXPORT_SINCE = "_since"; + + /** + * Parameter for the $export operation + */ + public static final String PARAM_EXPORT_TYPE_FILTER = "_typeFilter"; } diff --git a/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceTableTest.java b/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceTableTest.java new file mode 100644 index 00000000000..28a5c1049e0 --- /dev/null +++ b/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceTableTest.java @@ -0,0 +1,20 @@ +package ca.uhn.fhir.jpa.model.entity; + +import ca.uhn.fhir.context.FhirContext; +import org.junit.Test; + +import static org.junit.Assert.*; + +public class ResourceTableTest { + + @Test + public void testResourceLength() { + for (String nextName : FhirContext.forR4().getResourceNames()) { + if (nextName.length() > ResourceTable.RESTYPE_LEN) { + fail("Name " + nextName + " length of " + nextName.length() + " is > " + ResourceTable.RESTYPE_LEN); + } + } + } + + +} diff --git a/hapi-fhir-jpaserver-searchparam/pom.xml b/hapi-fhir-jpaserver-searchparam/pom.xml index 8fd32350fa2..bbe90fcb671 100755 --- a/hapi-fhir-jpaserver-searchparam/pom.xml +++ b/hapi-fhir-jpaserver-searchparam/pom.xml @@ -100,8 +100,13 @@ org.springframework.retry spring-retry - + + org.quartz-scheduler + quartz + + + ch.qos.logback logback-classic diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/SearchParameterMap.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/SearchParameterMap.java index 7d91f3dbc23..cecd579760b 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/SearchParameterMap.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/SearchParameterMap.java @@ -59,6 +59,7 @@ public class SearchParameterMap implements Serializable { private SortSpec mySort; private SummaryEnum mySummaryMode; private SearchTotalModeEnum mySearchTotalMode; + private Long myQueryCacheExpiryTime; /** * Constructor @@ -74,6 +75,14 @@ public class SearchParameterMap implements Serializable { add(theName, theParam); } + public Long getQueryCacheExpiryTime() { + return myQueryCacheExpiryTime; + } + + public void setQueryCacheExpiryTime(Long theQueryCacheExpiryTime) { + myQueryCacheExpiryTime = theQueryCacheExpiryTime; + } + public SummaryEnum getSummaryMode() { return mySummaryMode; } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/BaseSearchParamRegistry.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/BaseSearchParamRegistry.java index f0a9b3bac6d..e6669c6f49e 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/BaseSearchParamRegistry.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/BaseSearchParamRegistry.java @@ -27,6 +27,8 @@ import ca.uhn.fhir.interceptor.api.HookParams; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.jpa.model.entity.ModelConfig; +import ca.uhn.fhir.jpa.model.sched.ISchedulerService; +import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; import ca.uhn.fhir.jpa.searchparam.JpaRuntimeSearchParam; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; @@ -40,10 +42,11 @@ import com.google.common.annotations.VisibleForTesting; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.time.DateUtils; import org.hl7.fhir.instance.model.api.IBaseResource; +import org.quartz.Job; +import org.quartz.JobExecutionContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.scheduling.annotation.Scheduled; import javax.annotation.PostConstruct; import java.util.*; @@ -63,6 +66,8 @@ public abstract class BaseSearchParamRegistry implemen private ISearchParamProvider mySearchParamProvider; @Autowired private FhirContext myFhirContext; + @Autowired + private ISchedulerService mySchedulerService; private Map> myBuiltInSearchParams; private volatile Map> myActiveUniqueSearchParams = Collections.emptyMap(); @@ -345,7 +350,7 @@ public abstract class BaseSearchParamRegistry implemen } int refreshCacheWithRetry() { - Retrier refreshCacheRetrier = new Retrier(() -> { + Retrier refreshCacheRetrier = new Retrier<>(() -> { synchronized (BaseSearchParamRegistry.this) { return mySearchParamProvider.refreshCache(this, REFRESH_INTERVAL); } @@ -353,11 +358,15 @@ public abstract class BaseSearchParamRegistry implemen return refreshCacheRetrier.runWithRetry(); } - @Scheduled(fixedDelay = 10 * DateUtils.MILLIS_PER_SECOND) - public void refreshCacheOnSchedule() { - refreshCacheIfNecessary(); + @PostConstruct + public void registerScheduledJob() { + ScheduledJobDefinition jobDetail = new ScheduledJobDefinition(); + jobDetail.setId(BaseSearchParamRegistry.class.getName()); + jobDetail.setJobClass(SubmitJob.class); + mySchedulerService.scheduleFixedDelay(10 * DateUtils.MILLIS_PER_SECOND, false, jobDetail); } + @Override public void refreshCacheIfNecessary() { if (myActiveSearchParams == null || System.currentTimeMillis() - REFRESH_INTERVAL > myLastRefresh) { @@ -371,6 +380,16 @@ public abstract class BaseSearchParamRegistry implemen return Collections.unmodifiableMap(myActiveSearchParams); } + public static class SubmitJob implements Job { + @Autowired + private ISearchParamRegistry myTarget; + + @Override + public void execute(JobExecutionContext theContext) { + myTarget.refreshCacheIfNecessary(); + } + } + public static Map> createBuiltInSearchParamMap(FhirContext theFhirContext) { Map> resourceNameToSearchParams = new HashMap<>(); diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/ISearchParamRegistry.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/ISearchParamRegistry.java index 38c54a3095a..c927186ffda 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/ISearchParamRegistry.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/ISearchParamRegistry.java @@ -41,6 +41,8 @@ public interface ISearchParamRegistry { */ RuntimeSearchParam getActiveSearchParam(String theResourceName, String theParamName); + void refreshCacheIfNecessary(); + Map> getActiveSearchParams(); Map getActiveSearchParams(String theResourceName); diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/retry/Retrier.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/retry/Retrier.java index 14a4bcd480a..ad6cbc6b202 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/retry/Retrier.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/retry/Retrier.java @@ -24,14 +24,19 @@ import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.time.DateUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.BeanCreationException; import org.springframework.retry.RetryCallback; import org.springframework.retry.RetryContext; import org.springframework.retry.RetryListener; +import org.springframework.retry.RetryPolicy; import org.springframework.retry.backoff.ExponentialBackOffPolicy; import org.springframework.retry.listener.RetryListenerSupport; +import org.springframework.retry.policy.ExceptionClassifierRetryPolicy; import org.springframework.retry.policy.SimpleRetryPolicy; import org.springframework.retry.support.RetryTemplate; +import java.util.HashMap; +import java.util.Map; import java.util.function.Supplier; public class Retrier { @@ -53,7 +58,17 @@ public class Retrier { backOff.setMultiplier(2); myRetryTemplate.setBackOffPolicy(backOff); - SimpleRetryPolicy retryPolicy = new SimpleRetryPolicy(); + SimpleRetryPolicy retryPolicy = new SimpleRetryPolicy(){ + private static final long serialVersionUID = -4522467251787518700L; + + @Override + public boolean canRetry(RetryContext context) { + if (context.getLastThrowable() instanceof BeanCreationException) { + return false; + } + return super.canRetry(context); + } + }; retryPolicy.setMaxAttempts(theMaxRetries); myRetryTemplate.setRetryPolicy(retryPolicy); diff --git a/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorDstu3Test.java b/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorDstu3Test.java index 9cf121417cc..7309fe39e80 100644 --- a/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorDstu3Test.java +++ b/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorDstu3Test.java @@ -11,8 +11,8 @@ import ca.uhn.fhir.jpa.searchparam.JpaRuntimeSearchParam; import ca.uhn.fhir.jpa.searchparam.extractor.SearchParamExtractorDstu3; import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; import ca.uhn.fhir.util.TestUtil; -import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport; import org.hl7.fhir.dstu3.hapi.ctx.DefaultProfileValidationSupport; +import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport; import org.hl7.fhir.dstu3.model.Observation; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -43,13 +43,18 @@ public class SearchParamExtractorDstu3Test { throw new UnsupportedOperationException(); } + @Override + public void refreshCacheIfNecessary() { + // nothing + } + @Override public Map> getActiveSearchParams() { throw new UnsupportedOperationException(); } @Override - public Map getActiveSearchParams(String theResourceName) { + public Map getActiveSearchParams(String theResourceName) { RuntimeResourceDefinition nextResDef = ourCtx.getResourceDefinition(theResourceName); Map sps = new HashMap<>(); for (RuntimeSearchParam nextSp : nextResDef.getSearchParams()) { @@ -98,10 +103,10 @@ public class SearchParamExtractorDstu3Test { public static void afterClassClearContext() { TestUtil.clearAllStaticFieldsForUnitTest(); } - + @BeforeClass public static void beforeClass() { ourValidationSupport = new DefaultProfileValidationSupport(); } - + } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/module/cache/SubscriptionConstants.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/module/cache/SubscriptionConstants.java index 6bfb148777b..cb7f1321321 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/module/cache/SubscriptionConstants.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/module/cache/SubscriptionConstants.java @@ -35,7 +35,7 @@ public class SubscriptionConstants { * The maximum number of subscriptions that can be active at once */ - public static final int MAX_SUBSCRIPTION_RESULTS = 1000; + public static final int MAX_SUBSCRIPTION_RESULTS = 50000; /** * The size of the queue used for sending resources to the subscription matching processor and by each subscription delivery queue diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/module/cache/SubscriptionDeliveryHandlerFactory.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/module/cache/SubscriptionDeliveryHandlerFactory.java index 400e5af37f4..eb350809984 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/module/cache/SubscriptionDeliveryHandlerFactory.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/module/cache/SubscriptionDeliveryHandlerFactory.java @@ -29,6 +29,7 @@ import org.hl7.fhir.r4.model.Subscription; import org.springframework.beans.factory.annotation.Lookup; import org.springframework.messaging.MessageHandler; import org.springframework.stereotype.Component; +import org.thymeleaf.util.Validate; import java.util.Optional; diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/module/cache/SubscriptionLoader.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/module/cache/SubscriptionLoader.java index 89e42fa25b1..339d8a0117b 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/module/cache/SubscriptionLoader.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/module/cache/SubscriptionLoader.java @@ -21,6 +21,9 @@ package ca.uhn.fhir.jpa.subscription.module.cache; */ import ca.uhn.fhir.jpa.api.IDaoRegistry; +import ca.uhn.fhir.jpa.model.sched.FireAtIntervalJob; +import ca.uhn.fhir.jpa.model.sched.ISchedulerService; +import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.retry.Retrier; import ca.uhn.fhir.rest.api.server.IBundleProvider; @@ -30,13 +33,16 @@ import com.google.common.annotations.VisibleForTesting; import org.apache.commons.lang3.time.DateUtils; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.r4.model.Subscription; +import org.quartz.DisallowConcurrentExecution; +import org.quartz.JobExecutionContext; +import org.quartz.PersistJobDataAfterExecution; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Lazy; -import org.springframework.scheduling.annotation.Scheduled; import org.springframework.stereotype.Service; +import javax.annotation.PostConstruct; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -46,24 +52,23 @@ import java.util.concurrent.Semaphore; @Service @Lazy public class SubscriptionLoader { + public static final long REFRESH_INTERVAL = DateUtils.MILLIS_PER_MINUTE; private static final Logger ourLog = LoggerFactory.getLogger(SubscriptionLoader.class); private static final int MAX_RETRIES = 60; // 60 * 5 seconds = 5 minutes - + private final Object mySyncSubscriptionsLock = new Object(); @Autowired private ISubscriptionProvider mySubscriptionProvider; @Autowired private SubscriptionRegistry mySubscriptionRegistry; @Autowired(required = false) private IDaoRegistry myDaoRegistry; - - private final Object mySyncSubscriptionsLock = new Object(); private Semaphore mySyncSubscriptionsSemaphore = new Semaphore(1); + @Autowired + private ISchedulerService mySchedulerService; /** * Read the existing subscriptions from the database */ - @SuppressWarnings("unused") - @Scheduled(fixedDelay = DateUtils.MILLIS_PER_MINUTE) public void syncSubscriptions() { if (myDaoRegistry != null && !myDaoRegistry.isResourceTypeSupported("Subscription")) { return; @@ -78,6 +83,14 @@ public class SubscriptionLoader { } } + @PostConstruct + public void registerScheduledJob() { + ScheduledJobDefinition jobDetail = new ScheduledJobDefinition(); + jobDetail.setId(SubscriptionLoader.class.getName()); + jobDetail.setJobClass(SubscriptionLoader.SubmitJob.class); + mySchedulerService.scheduleFixedDelay(REFRESH_INTERVAL, false, jobDetail); + } + @VisibleForTesting public int doSyncSubscriptionsForUnitTest() { // Two passes for delete flag to take effect @@ -92,6 +105,10 @@ public class SubscriptionLoader { } private int doSyncSubscriptions() { + if (mySchedulerService.isStopping()) { + return 0; + } + synchronized (mySyncSubscriptionsLock) { ourLog.debug("Starting sync subscriptions"); SearchParameterMap map = new SearchParameterMap(); @@ -106,11 +123,13 @@ public class SubscriptionLoader { IBundleProvider subscriptionBundleList = mySubscriptionProvider.search(map); - if (subscriptionBundleList.size() >= SubscriptionConstants.MAX_SUBSCRIPTION_RESULTS) { + Integer subscriptionCount = subscriptionBundleList.size(); + assert subscriptionCount != null; + if (subscriptionCount >= SubscriptionConstants.MAX_SUBSCRIPTION_RESULTS) { ourLog.error("Currently over " + SubscriptionConstants.MAX_SUBSCRIPTION_RESULTS + " subscriptions. Some subscriptions have not been loaded."); } - List resourceList = subscriptionBundleList.getResources(0, subscriptionBundleList.size()); + List resourceList = subscriptionBundleList.getResources(0, subscriptionCount); Set allIds = new HashSet<>(); int changesCount = 0; @@ -134,5 +153,21 @@ public class SubscriptionLoader { public void setSubscriptionProviderForUnitTest(ISubscriptionProvider theSubscriptionProvider) { mySubscriptionProvider = theSubscriptionProvider; } + + @DisallowConcurrentExecution + @PersistJobDataAfterExecution + public static class SubmitJob extends FireAtIntervalJob { + @Autowired + private SubscriptionLoader myTarget; + + public SubmitJob() { + super(REFRESH_INTERVAL); + } + + @Override + protected void doExecute(JobExecutionContext theContext) { + myTarget.syncSubscriptions(); + } + } } diff --git a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/config/TestSubscriptionDstu3Config.java b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/config/TestSubscriptionDstu3Config.java index 5ff8ce59398..a4238230f12 100644 --- a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/config/TestSubscriptionDstu3Config.java +++ b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/config/TestSubscriptionDstu3Config.java @@ -1,6 +1,7 @@ package ca.uhn.fhir.jpa.subscription.module.config; import ca.uhn.fhir.jpa.api.IDaoRegistry; +import ca.uhn.fhir.jpa.model.sched.ISchedulerService; import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamProvider; import ca.uhn.fhir.jpa.subscription.module.cache.ISubscriptionProvider; import org.springframework.context.annotation.*; @@ -31,4 +32,9 @@ public class TestSubscriptionDstu3Config extends SubscriptionDstu3Config { return retVal; } + @Bean + public ISchedulerService schedulerService() { + return mock(ISchedulerService.class); + } + } diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java index 8b543b5baa9..e2a164287b9 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java @@ -2,6 +2,7 @@ package ca.uhn.fhirtest; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; +import ca.uhn.fhir.jpa.bulk.BulkDataExportProvider; import ca.uhn.fhir.jpa.config.WebsocketDispatcherConfig; import ca.uhn.fhir.jpa.dao.DaoConfig; import ca.uhn.fhir.jpa.dao.DaoRegistry; @@ -258,6 +259,11 @@ public class TestRestfulServer extends RestfulServer { CascadingDeleteInterceptor cascadingDeleteInterceptor = new CascadingDeleteInterceptor(daoRegistry, interceptorBroadcaster); registerInterceptor(cascadingDeleteInterceptor); + /* + * Bulk Export + */ + registerProvider(myAppCtx.getBean(BulkDataExportProvider.class)); + } /** diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/interceptor/AnalyticsInterceptor.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/interceptor/AnalyticsInterceptor.java index 64baf5158d4..b21b6b27050 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/interceptor/AnalyticsInterceptor.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/interceptor/AnalyticsInterceptor.java @@ -1,8 +1,25 @@ package ca.uhn.fhirtest.interceptor; -import static org.apache.commons.lang3.StringUtils.defaultIfBlank; -import static org.apache.commons.lang3.StringUtils.isBlank; +import ca.uhn.fhir.jpa.model.sched.ISchedulerService; +import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; +import ca.uhn.fhir.rest.api.RestOperationTypeEnum; +import ca.uhn.fhir.rest.client.apache.ApacheRestfulClientFactory; +import ca.uhn.fhir.rest.server.interceptor.InterceptorAdapter; +import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; +import ca.uhn.fhir.util.UrlUtil; +import org.apache.http.client.HttpClient; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.quartz.Job; +import org.quartz.JobExecutionContext; +import org.springframework.beans.factory.annotation.Autowired; +import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; +import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; @@ -10,34 +27,21 @@ import java.util.LinkedList; import java.util.List; import java.util.UUID; -import javax.annotation.PreDestroy; - -import org.apache.commons.io.IOUtils; -import org.apache.http.client.HttpClient; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; -import org.apache.http.impl.client.CloseableHttpClient; -import org.springframework.scheduling.annotation.Scheduled; - -import ca.uhn.fhir.rest.api.RestOperationTypeEnum; -import ca.uhn.fhir.rest.client.apache.ApacheRestfulClientFactory; -import ca.uhn.fhir.rest.server.interceptor.InterceptorAdapter; -import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; -import ca.uhn.fhir.util.UrlUtil; +import static org.apache.commons.lang3.StringUtils.defaultIfBlank; +import static org.apache.commons.lang3.StringUtils.isBlank; public class AnalyticsInterceptor extends InterceptorAdapter { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(AnalyticsInterceptor.class); private String myAnalyticsTid; private int myCollectThreshold = 100000; - private final LinkedList myEventBuffer = new LinkedList(); + private final LinkedList myEventBuffer = new LinkedList<>(); private String myHostname; private HttpClient myHttpClient; - private long myLastFlushed; private long mySubmitPeriod = 60000; private int mySubmitThreshold = 1000; + @Autowired + private ISchedulerService mySchedulerService; /** * Constructor @@ -51,21 +55,39 @@ public class AnalyticsInterceptor extends InterceptorAdapter { } } - @PreDestroy - public void destroy() { - if (myHttpClient instanceof CloseableHttpClient) { - IOUtils.closeQuietly((CloseableHttpClient) myHttpClient); + public static class SubmitJob implements Job { + @Autowired + private AnalyticsInterceptor myAnalyticsInterceptor; + + @Override + public void execute(JobExecutionContext theContext) { + myAnalyticsInterceptor.flush(); } } - protected void doFlush() { + @PostConstruct + public void start() { + ScheduledJobDefinition jobDetail = new ScheduledJobDefinition(); + jobDetail.setId(getClass().getName()); + jobDetail.setJobClass(SubmitJob.class); + mySchedulerService.scheduleFixedDelay(5000, false, jobDetail); + } + + @PreDestroy + public void stop() throws IOException { + if (myHttpClient instanceof CloseableHttpClient) { + ((CloseableHttpClient) myHttpClient).close(); + } + } + + private void doFlush() { List eventsToFlush; synchronized (myEventBuffer) { int size = myEventBuffer.size(); if (size > 20) { size = 20; } - eventsToFlush = new ArrayList(size); + eventsToFlush = new ArrayList<>(size); for (int i = 0; i < size; i++) { AnalyticsEvent nextEvent = myEventBuffer.pollFirst(); if (nextEvent != null) { @@ -93,23 +115,16 @@ public class AnalyticsInterceptor extends InterceptorAdapter { String contents = b.toString(); HttpPost post = new HttpPost("https://www.google-analytics.com/batch"); post.setEntity(new StringEntity(contents, ContentType.APPLICATION_FORM_URLENCODED)); - CloseableHttpResponse response = null; - try { - response = (CloseableHttpResponse) myHttpClient.execute(post); + try (CloseableHttpResponse response = (CloseableHttpResponse) myHttpClient.execute(post)) { ourLog.trace("Analytics response: {}", response); ourLog.info("Flushed {} analytics events and got HTTP {} {}", eventsToFlush.size(), response.getStatusLine().getStatusCode(), response.getStatusLine().getReasonPhrase()); } catch (Exception e) { ourLog.error("Failed to submit analytics:", e); - } finally { - if (response != null) { - IOUtils.closeQuietly(response); - } } } - @Scheduled(fixedDelay = 5000) - public synchronized void flush() { + private synchronized void flush() { int pendingEvents; synchronized (myEventBuffer) { pendingEvents = myEventBuffer.size(); @@ -119,14 +134,13 @@ public class AnalyticsInterceptor extends InterceptorAdapter { return; } - if (System.currentTimeMillis() - myLastFlushed > mySubmitPeriod) { + if (System.currentTimeMillis() > mySubmitPeriod) { doFlush(); return; } if (pendingEvents >= mySubmitThreshold) { doFlush(); - return; } } @@ -178,51 +192,51 @@ public class AnalyticsInterceptor extends InterceptorAdapter { private String myUserAgent; - public String getApplicationName() { + String getApplicationName() { return myApplicationName; } - public String getClientId() { + String getClientId() { return myClientId; } - public String getResourceName() { + String getResourceName() { return myResourceName; } - public RestOperationTypeEnum getRestOperation() { + RestOperationTypeEnum getRestOperation() { return myRestOperation; } - public String getSourceIp() { + String getSourceIp() { return mySourceIp; } - public String getUserAgent() { + String getUserAgent() { return myUserAgent; } - public void setApplicationName(String theApplicationName) { + void setApplicationName(String theApplicationName) { myApplicationName = theApplicationName; } - public void setClientId(String theClientId) { + void setClientId(String theClientId) { myClientId = theClientId; } - public void setResourceName(String theResourceName) { + void setResourceName(String theResourceName) { myResourceName = theResourceName; } - public void setRestOperation(RestOperationTypeEnum theRestOperation) { + void setRestOperation(RestOperationTypeEnum theRestOperation) { myRestOperation = theRestOperation; } - public void setSourceIp(String theSourceIp) { + void setSourceIp(String theSourceIp) { mySourceIp = theSourceIp; } - public void setUserAgent(String theUserAgent) { + void setUserAgent(String theUserAgent) { myUserAgent = theUserAgent; } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IRestfulServer.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IRestfulServer.java index 8c076e5842c..29745b1ace6 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IRestfulServer.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IRestfulServer.java @@ -1,7 +1,7 @@ package ca.uhn.fhir.rest.api.server; import ca.uhn.fhir.context.api.BundleInclusionRule; -import ca.uhn.fhir.rest.api.PreferReturnEnum; +import ca.uhn.fhir.rest.api.PreferHeader; import ca.uhn.fhir.rest.server.IPagingProvider; import ca.uhn.fhir.rest.server.IRestfulServerDefaults; @@ -31,6 +31,6 @@ public interface IRestfulServer extends IRestfulServer BundleInclusionRule getBundleInclusionRule(); - PreferReturnEnum getDefaultPreferReturn(); + PreferHeader.PreferReturnEnum getDefaultPreferReturn(); } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServer.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServer.java index 5921e6ba67f..ebaa0684368 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServer.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServer.java @@ -98,9 +98,9 @@ public class RestfulServer extends HttpServlet implements IRestfulServerPrefer header. *

* This setting changes the default behaviour if no Prefer header is supplied by the client. - * The default is {@link PreferReturnEnum#REPRESENTATION} + * The default is {@link PreferHeader.PreferReturnEnum#REPRESENTATION} *

* * @see HL7 FHIR Specification section on the Prefer header */ @Override - public PreferReturnEnum getDefaultPreferReturn() { + public PreferHeader.PreferReturnEnum getDefaultPreferReturn() { return myDefaultPreferReturn; } @@ -1786,12 +1786,12 @@ public class RestfulServer extends HttpServlet implements IRestfulServerPrefer header. *

* This setting changes the default behaviour if no Prefer header is supplied by the client. - * The default is {@link PreferReturnEnum#REPRESENTATION} + * The default is {@link PreferHeader.PreferReturnEnum#REPRESENTATION} *

* * @see HL7 FHIR Specification section on the Prefer header */ - public void setDefaultPreferReturn(PreferReturnEnum theDefaultPreferReturn) { + public void setDefaultPreferReturn(PreferHeader.PreferReturnEnum theDefaultPreferReturn) { Validate.notNull(theDefaultPreferReturn, "theDefaultPreferReturn must not be null"); myDefaultPreferReturn = theDefaultPreferReturn; } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServerUtils.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServerUtils.java index cb966f6e6a3..e27ad31748e 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServerUtils.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServerUtils.java @@ -41,6 +41,7 @@ import ca.uhn.fhir.util.DateUtils; import ca.uhn.fhir.util.UrlUtil; import org.hl7.fhir.instance.model.api.*; +import javax.annotation.Nonnull; import javax.servlet.http.HttpServletRequest; import java.io.IOException; import java.io.Writer; @@ -674,43 +675,54 @@ public class RestfulServerUtils { return ourOperationsWhichAllowPreferHeader.contains(theRestOperationType); } - /** - * @param theServer If null, no default will be used. If not null, the default will be read from the server. - */ - public static PreferReturnEnum parsePreferHeader(IRestfulServer theServer, String theValue) { - PreferReturnEnum retVal = null; + @Nonnull + public static PreferHeader parsePreferHeader(IRestfulServer theServer, String theValue) { + PreferHeader retVal = new PreferHeader(); + + if (isNotBlank(theValue)) { + StringTokenizer tok = new StringTokenizer(theValue, ";"); + while (tok.hasMoreTokens()) { + String next = trim(tok.nextToken()); + int eqIndex = next.indexOf('='); + + String key; + String value; + if (eqIndex == -1 || eqIndex >= next.length() - 2) { + key = next; + value = ""; + } else { + key = next.substring(0, eqIndex).trim(); + value = next.substring(eqIndex + 1).trim(); + } + + if (key.equals(Constants.HEADER_PREFER_RETURN)) { + + if (value.length() < 2) { + continue; + } + if ('"' == value.charAt(0) && '"' == value.charAt(value.length() - 1)) { + value = value.substring(1, value.length() - 1); + } + + retVal.setReturn(PreferHeader.PreferReturnEnum.fromHeaderValue(value)); + + } else if (key.equals(Constants.HEADER_PREFER_RESPOND_ASYNC)) { + + retVal.setRespondAsync(true); + + } + } + } - if (isNotBlank(theValue)) { - StringTokenizer tok = new StringTokenizer(theValue, ","); - while (tok.hasMoreTokens()) { - String next = tok.nextToken(); - int eqIndex = next.indexOf('='); - if (eqIndex == -1 || eqIndex >= next.length() - 2) { - continue; - } - - String key = next.substring(0, eqIndex).trim(); - if (key.equals(Constants.HEADER_PREFER_RETURN) == false) { - continue; - } - - String value = next.substring(eqIndex + 1).trim(); - if (value.length() < 2) { - continue; - } - if ('"' == value.charAt(0) && '"' == value.charAt(value.length() - 1)) { - value = value.substring(1, value.length() - 1); - } - - retVal = PreferReturnEnum.fromHeaderValue(value); - } + if (retVal.getReturn() == null && theServer != null && theServer.getDefaultPreferReturn() != null) { + retVal.setReturn(theServer.getDefaultPreferReturn()); } - if (retVal == null && theServer != null) { - retVal = theServer.getDefaultPreferReturn(); - } return retVal; - } + } + + + public static boolean prettyPrintResponse(IRestfulServerDefaults theServer, RequestDetails theRequest) { Map requestParams = theRequest.getParameters(); diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseOutcomeReturningMethodBinding.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseOutcomeReturningMethodBinding.java index 67894bd5c66..f66e1699936 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseOutcomeReturningMethodBinding.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseOutcomeReturningMethodBinding.java @@ -41,6 +41,8 @@ import java.lang.reflect.Method; import java.util.Collections; import java.util.Set; +import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; + abstract class BaseOutcomeReturningMethodBinding extends BaseMethodBinding { static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseOutcomeReturningMethodBinding.class); @@ -181,9 +183,11 @@ abstract class BaseOutcomeReturningMethodBinding extends BaseMethodBinding newParameterType = theContext.getElementDefinition(operationParam.typeName()).getImplementingClass(); + BaseRuntimeElementDefinition elementDefinition = theContext.getElementDefinition(operationParam.typeName()); + org.apache.commons.lang3.Validate.notNull(elementDefinition, "Unknown type name in @OperationParam: typeName=\"%s\"", operationParam.typeName()); + Class newParameterType = elementDefinition.getImplementingClass(); if (!declaredParameterType.isAssignableFrom(newParameterType)) { throw new ConfigurationException("Non assignable parameter typeName=\"" + operationParam.typeName() + "\" specified on method " + theMethod); } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/OperationParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/OperationParameter.java index 4afac45b0cb..779fe92471b 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/OperationParameter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/OperationParameter.java @@ -311,46 +311,48 @@ public class OperationParameter implements IParameter { private void translateQueryParametersIntoServerArgumentForPost(RequestDetails theRequest, List matchingParamValues) { IBaseResource requestContents = (IBaseResource) theRequest.getUserData().get(REQUEST_CONTENTS_USERDATA_KEY); - RuntimeResourceDefinition def = myContext.getResourceDefinition(requestContents); - if (def.getName().equals("Parameters")) { + if (requestContents != null) { + RuntimeResourceDefinition def = myContext.getResourceDefinition(requestContents); + if (def.getName().equals("Parameters")) { - BaseRuntimeChildDefinition paramChild = def.getChildByName("parameter"); - BaseRuntimeElementCompositeDefinition paramChildElem = (BaseRuntimeElementCompositeDefinition) paramChild.getChildByName("parameter"); + BaseRuntimeChildDefinition paramChild = def.getChildByName("parameter"); + BaseRuntimeElementCompositeDefinition paramChildElem = (BaseRuntimeElementCompositeDefinition) paramChild.getChildByName("parameter"); - RuntimeChildPrimitiveDatatypeDefinition nameChild = (RuntimeChildPrimitiveDatatypeDefinition) paramChildElem.getChildByName("name"); - BaseRuntimeChildDefinition valueChild = paramChildElem.getChildByName("value[x]"); - BaseRuntimeChildDefinition resourceChild = paramChildElem.getChildByName("resource"); + RuntimeChildPrimitiveDatatypeDefinition nameChild = (RuntimeChildPrimitiveDatatypeDefinition) paramChildElem.getChildByName("name"); + BaseRuntimeChildDefinition valueChild = paramChildElem.getChildByName("value[x]"); + BaseRuntimeChildDefinition resourceChild = paramChildElem.getChildByName("resource"); - IAccessor paramChildAccessor = paramChild.getAccessor(); - List values = paramChildAccessor.getValues(requestContents); - for (IBase nextParameter : values) { - List nextNames = nameChild.getAccessor().getValues(nextParameter); - if (nextNames != null && nextNames.size() > 0) { - IPrimitiveType nextName = (IPrimitiveType) nextNames.get(0); - if (myName.equals(nextName.getValueAsString())) { + IAccessor paramChildAccessor = paramChild.getAccessor(); + List values = paramChildAccessor.getValues(requestContents); + for (IBase nextParameter : values) { + List nextNames = nameChild.getAccessor().getValues(nextParameter); + if (nextNames != null && nextNames.size() > 0) { + IPrimitiveType nextName = (IPrimitiveType) nextNames.get(0); + if (myName.equals(nextName.getValueAsString())) { - if (myParameterType.isAssignableFrom(nextParameter.getClass())) { - matchingParamValues.add(nextParameter); - } else { - List paramValues = valueChild.getAccessor().getValues(nextParameter); - List paramResources = resourceChild.getAccessor().getValues(nextParameter); - if (paramValues != null && paramValues.size() > 0) { - tryToAddValues(paramValues, matchingParamValues); - } else if (paramResources != null && paramResources.size() > 0) { - tryToAddValues(paramResources, matchingParamValues); + if (myParameterType.isAssignableFrom(nextParameter.getClass())) { + matchingParamValues.add(nextParameter); + } else { + List paramValues = valueChild.getAccessor().getValues(nextParameter); + List paramResources = resourceChild.getAccessor().getValues(nextParameter); + if (paramValues != null && paramValues.size() > 0) { + tryToAddValues(paramValues, matchingParamValues); + } else if (paramResources != null && paramResources.size() > 0) { + tryToAddValues(paramResources, matchingParamValues); + } } - } + } } } + + } else { + + if (myParameterType.isAssignableFrom(requestContents.getClass())) { + tryToAddValues(Arrays.asList(requestContents), matchingParamValues); + } + } - - } else { - - if (myParameterType.isAssignableFrom(requestContents.getClass())) { - tryToAddValues(Arrays.asList((IBase) requestContents), matchingParamValues); - } - } } diff --git a/hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/RestfulServerUtilsTest.java b/hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/RestfulServerUtilsTest.java new file mode 100644 index 00000000000..bf84fdaf71c --- /dev/null +++ b/hapi-fhir-server/src/test/java/ca/uhn/fhir/rest/server/RestfulServerUtilsTest.java @@ -0,0 +1,30 @@ +package ca.uhn.fhir.rest.server; + +import ca.uhn.fhir.rest.api.PreferHeader; +import org.junit.Test; + +import static org.junit.Assert.*; + +public class RestfulServerUtilsTest{ + + @Test + public void testParsePreferReturn() { + PreferHeader header = RestfulServerUtils.parsePreferHeader(null,"return=representation"); + assertEquals(PreferHeader.PreferReturnEnum.REPRESENTATION, header.getReturn()); + assertFalse(header.getRespondAsync()); + } + + @Test + public void testParsePreferReturnAndAsync() { + PreferHeader header = RestfulServerUtils.parsePreferHeader(null,"return=OperationOutcome; respond-async"); + assertEquals(PreferHeader.PreferReturnEnum.OPERATION_OUTCOME, header.getReturn()); + assertTrue(header.getRespondAsync()); + } + + @Test + public void testParsePreferAsync() { + PreferHeader header = RestfulServerUtils.parsePreferHeader(null,"respond-async"); + assertEquals(null, header.getReturn()); + assertTrue(header.getRespondAsync()); + } +} diff --git a/hapi-fhir-structures-dstu2.1/src/test/java/ca/uhn/fhir/rest/client/GenericClientDstu2_1Test.java b/hapi-fhir-structures-dstu2.1/src/test/java/ca/uhn/fhir/rest/client/GenericClientDstu2_1Test.java index e5a41d7f5ef..bdfb08040d9 100644 --- a/hapi-fhir-structures-dstu2.1/src/test/java/ca/uhn/fhir/rest/client/GenericClientDstu2_1Test.java +++ b/hapi-fhir-structures-dstu2.1/src/test/java/ca/uhn/fhir/rest/client/GenericClientDstu2_1Test.java @@ -15,7 +15,6 @@ import com.helger.commons.io.stream.StringInputStream; import org.apache.commons.io.IOUtils; import org.apache.commons.io.input.ReaderInputStream; import org.apache.http.*; -import org.apache.http.client.ClientProtocolException; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.*; import org.apache.http.message.BasicHeader; @@ -558,7 +557,7 @@ public class GenericClientDstu2_1Test { Patient pt = new Patient(); pt.getText().setDivAsString("A PATIENT"); - MethodOutcome outcome = client.create().resource(pt).prefer(PreferReturnEnum.REPRESENTATION).execute(); + MethodOutcome outcome = client.create().resource(pt).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); assertEquals(2, myAnswerCount); assertNotNull(outcome.getOperationOutcome()); @@ -604,7 +603,7 @@ public class GenericClientDstu2_1Test { Patient pt = new Patient(); pt.getText().setDivAsString("A PATIENT"); - MethodOutcome outcome = client.create().resource(pt).prefer(PreferReturnEnum.REPRESENTATION).execute(); + MethodOutcome outcome = client.create().resource(pt).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); assertEquals(1, myAnswerCount); assertNull(outcome.getOperationOutcome()); @@ -1030,7 +1029,7 @@ public class GenericClientDstu2_1Test { client .update() .resource(bundle) - .prefer(PreferReturnEnum.REPRESENTATION) + .prefer(PreferHeader.PreferReturnEnum.REPRESENTATION) .encodedJson() .execute(); @@ -1075,7 +1074,7 @@ public class GenericClientDstu2_1Test { client .update() .resource(bundle) - .prefer(PreferReturnEnum.REPRESENTATION) + .prefer(PreferHeader.PreferReturnEnum.REPRESENTATION) .encodedXml() .execute(); @@ -1714,7 +1713,7 @@ public class GenericClientDstu2_1Test { pt.setId("Patient/222"); pt.getText().setDivAsString("A PATIENT"); - MethodOutcome outcome = client.update().resource(pt).prefer(PreferReturnEnum.REPRESENTATION).execute(); + MethodOutcome outcome = client.update().resource(pt).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); assertEquals(2, myAnswerCount); assertNotNull(outcome.getOperationOutcome()); @@ -1759,7 +1758,7 @@ public class GenericClientDstu2_1Test { pt.setId("Patient/222"); pt.getText().setDivAsString("A PATIENT"); - MethodOutcome outcome = client.update().resource(pt).prefer(PreferReturnEnum.REPRESENTATION).execute(); + MethodOutcome outcome = client.update().resource(pt).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); assertEquals(1, myAnswerCount); assertNull(outcome.getOperationOutcome()); diff --git a/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/rest/client/GenericClientDstu2Test.java b/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/rest/client/GenericClientDstu2Test.java index f499992b6c6..571737fc0dc 100644 --- a/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/rest/client/GenericClientDstu2Test.java +++ b/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/rest/client/GenericClientDstu2Test.java @@ -416,12 +416,12 @@ public class GenericClientDstu2Test { Patient p = new Patient(); p.addName().addFamily("FOOFAMILY"); - client.create().resource(p).prefer(PreferReturnEnum.MINIMAL).execute(); + client.create().resource(p).prefer(PreferHeader.PreferReturnEnum.MINIMAL).execute(); assertEquals(1, capt.getAllValues().get(idx).getHeaders(Constants.HEADER_PREFER).length); assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_MINIMAL, capt.getAllValues().get(idx).getHeaders(Constants.HEADER_PREFER)[0].getValue()); idx++; - client.create().resource(p).prefer(PreferReturnEnum.REPRESENTATION).execute(); + client.create().resource(p).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); assertEquals(1, capt.getAllValues().get(idx).getHeaders(Constants.HEADER_PREFER).length); assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_REPRESENTATION, capt.getAllValues().get(idx).getHeaders(Constants.HEADER_PREFER)[0].getValue()); idx++; @@ -2465,12 +2465,12 @@ public class GenericClientDstu2Test { p.setId(new IdDt("1")); p.addName().addFamily("FOOFAMILY"); - client.update().resource(p).prefer(PreferReturnEnum.MINIMAL).execute(); + client.update().resource(p).prefer(PreferHeader.PreferReturnEnum.MINIMAL).execute(); assertEquals(1, capt.getAllValues().get(idx).getHeaders(Constants.HEADER_PREFER).length); assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_MINIMAL, capt.getAllValues().get(idx).getHeaders(Constants.HEADER_PREFER)[0].getValue()); idx++; - client.update().resource(p).prefer(PreferReturnEnum.REPRESENTATION).execute(); + client.update().resource(p).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); assertEquals(1, capt.getAllValues().get(idx).getHeaders(Constants.HEADER_PREFER).length); assertEquals(Constants.HEADER_PREFER_RETURN + '=' + Constants.HEADER_PREFER_RETURN_REPRESENTATION, capt.getAllValues().get(idx).getHeaders(Constants.HEADER_PREFER)[0].getValue()); idx++; diff --git a/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/rest/client/GenericClientDstu3Test.java b/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/rest/client/GenericClientDstu3Test.java index 625b0ba0a85..ab02674bee5 100644 --- a/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/rest/client/GenericClientDstu3Test.java +++ b/hapi-fhir-structures-dstu3/src/test/java/ca/uhn/fhir/rest/client/GenericClientDstu3Test.java @@ -544,7 +544,7 @@ public class GenericClientDstu3Test { Patient pt = new Patient(); pt.getText().setDivAsString("A PATIENT"); - MethodOutcome outcome = client.create().resource(pt).prefer(PreferReturnEnum.REPRESENTATION).execute(); + MethodOutcome outcome = client.create().resource(pt).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); assertEquals(2, myAnswerCount); assertNotNull(outcome.getOperationOutcome()); @@ -590,7 +590,7 @@ public class GenericClientDstu3Test { Patient pt = new Patient(); pt.getText().setDivAsString("A PATIENT"); - MethodOutcome outcome = client.create().resource(pt).prefer(PreferReturnEnum.REPRESENTATION).execute(); + MethodOutcome outcome = client.create().resource(pt).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); assertEquals(1, myAnswerCount); assertNull(outcome.getOperationOutcome()); @@ -965,7 +965,7 @@ public class GenericClientDstu3Test { client .update() .resource(bundle) - .prefer(PreferReturnEnum.REPRESENTATION) + .prefer(PreferHeader.PreferReturnEnum.REPRESENTATION) .encodedJson() .execute(); @@ -1010,7 +1010,7 @@ public class GenericClientDstu3Test { client .update() .resource(bundle) - .prefer(PreferReturnEnum.REPRESENTATION) + .prefer(PreferHeader.PreferReturnEnum.REPRESENTATION) .encodedXml() .execute(); @@ -1679,7 +1679,7 @@ public class GenericClientDstu3Test { pt.setId("Patient/222"); pt.getText().setDivAsString("A PATIENT"); - MethodOutcome outcome = client.update().resource(pt).prefer(PreferReturnEnum.REPRESENTATION).execute(); + MethodOutcome outcome = client.update().resource(pt).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); assertEquals(2, myAnswerCount); assertNotNull(outcome.getOperationOutcome()); @@ -1724,7 +1724,7 @@ public class GenericClientDstu3Test { pt.setId("Patient/222"); pt.getText().setDivAsString("A PATIENT"); - MethodOutcome outcome = client.update().resource(pt).prefer(PreferReturnEnum.REPRESENTATION).execute(); + MethodOutcome outcome = client.update().resource(pt).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); assertEquals(1, myAnswerCount); assertNull(outcome.getOperationOutcome()); diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/client/GenericClientR4Test.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/client/GenericClientR4Test.java index ef5011187ed..5effc52e0e1 100644 --- a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/client/GenericClientR4Test.java +++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/client/GenericClientR4Test.java @@ -342,7 +342,7 @@ public class GenericClientR4Test { Patient pt = new Patient(); pt.getText().setDivAsString("A PATIENT"); - MethodOutcome outcome = client.create().resource(pt).prefer(PreferReturnEnum.REPRESENTATION).execute(); + MethodOutcome outcome = client.create().resource(pt).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); assertEquals(2, myAnswerCount); assertNotNull(outcome.getOperationOutcome()); @@ -388,7 +388,7 @@ public class GenericClientR4Test { Patient pt = new Patient(); pt.getText().setDivAsString("A PATIENT"); - MethodOutcome outcome = client.create().resource(pt).prefer(PreferReturnEnum.REPRESENTATION).execute(); + MethodOutcome outcome = client.create().resource(pt).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); assertEquals(1, myAnswerCount); assertNull(outcome.getOperationOutcome()); @@ -1296,7 +1296,7 @@ public class GenericClientR4Test { client .update() .resource(bundle) - .prefer(PreferReturnEnum.REPRESENTATION) + .prefer(PreferHeader.PreferReturnEnum.REPRESENTATION) .encodedJson() .execute(); @@ -1341,7 +1341,7 @@ public class GenericClientR4Test { client .update() .resource(bundle) - .prefer(PreferReturnEnum.REPRESENTATION) + .prefer(PreferHeader.PreferReturnEnum.REPRESENTATION) .encodedXml() .execute(); @@ -2153,7 +2153,7 @@ public class GenericClientR4Test { pt.setId("Patient/222"); pt.getText().setDivAsString("A PATIENT"); - MethodOutcome outcome = client.update().resource(pt).prefer(PreferReturnEnum.REPRESENTATION).execute(); + MethodOutcome outcome = client.update().resource(pt).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); assertEquals(2, myAnswerCount); assertNotNull(outcome.getOperationOutcome()); @@ -2198,7 +2198,7 @@ public class GenericClientR4Test { pt.setId("Patient/222"); pt.getText().setDivAsString("A PATIENT"); - MethodOutcome outcome = client.update().resource(pt).prefer(PreferReturnEnum.REPRESENTATION).execute(); + MethodOutcome outcome = client.update().resource(pt).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); assertEquals(1, myAnswerCount); assertNull(outcome.getOperationOutcome()); diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/CreateR4Test.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/CreateR4Test.java index a2fcf4c1259..fdf61656450 100644 --- a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/CreateR4Test.java +++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/CreateR4Test.java @@ -4,7 +4,7 @@ import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.rest.annotation.*; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.MethodOutcome; -import ca.uhn.fhir.rest.api.PreferReturnEnum; +import ca.uhn.fhir.rest.api.PreferHeader; import ca.uhn.fhir.rest.client.MyPatientWithExtensions; import ca.uhn.fhir.util.TestUtil; import org.apache.commons.io.IOUtils; @@ -268,7 +268,7 @@ public class CreateR4Test { HttpPost httpPost = new HttpPost("http://localhost:" + ourPort + "/Patient"); httpPost.setEntity(new StringEntity(body, ContentType.parse("application/fhir+json; charset=utf-8"))); - ourServlet.setDefaultPreferReturn(PreferReturnEnum.OPERATION_OUTCOME); + ourServlet.setDefaultPreferReturn(PreferHeader.PreferReturnEnum.OPERATION_OUTCOME); try (CloseableHttpResponse status = ourClient.execute(httpPost)) { assertEquals(201, status.getStatusLine().getStatusCode()); @@ -293,7 +293,7 @@ public class CreateR4Test { HttpPost httpPost = new HttpPost("http://localhost:" + ourPort + "/Patient"); httpPost.setEntity(new StringEntity(body, ContentType.parse("application/fhir+json; charset=utf-8"))); - ourServlet.setDefaultPreferReturn(PreferReturnEnum.MINIMAL); + ourServlet.setDefaultPreferReturn(PreferHeader.PreferReturnEnum.MINIMAL); try (CloseableHttpResponse status = ourClient.execute(httpPost)) { assertEquals(201, status.getStatusLine().getStatusCode()); diff --git a/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/rest/server/PreferTest.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/PreferTest.java similarity index 95% rename from hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/rest/server/PreferTest.java rename to hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/PreferTest.java index 57bade29e4a..ed46a0f89d9 100644 --- a/hapi-fhir-structures-dstu2/src/test/java/ca/uhn/fhir/rest/server/PreferTest.java +++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/rest/server/PreferTest.java @@ -1,9 +1,6 @@ package ca.uhn.fhir.rest.server; import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.model.api.IResource; -import ca.uhn.fhir.model.dstu2.resource.OperationOutcome; -import ca.uhn.fhir.model.dstu2.resource.Patient; import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.annotation.Create; import ca.uhn.fhir.rest.annotation.IdParam; @@ -25,6 +22,10 @@ import org.eclipse.jetty.server.Server; import org.eclipse.jetty.servlet.ServletHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.r4.model.IdType; +import org.hl7.fhir.r4.model.OperationOutcome; +import org.hl7.fhir.r4.model.Patient; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; @@ -40,12 +41,11 @@ import static org.junit.Assert.*; public class PreferTest { private static CloseableHttpClient ourClient; - private static FhirContext ourCtx = FhirContext.forDstu2(); + private static FhirContext ourCtx = FhirContext.forR4(); private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(PreferTest.class); private static int ourPort; private static Server ourServer; - - public static IBaseOperationOutcome ourReturnOperationOutcome; + private static IBaseOperationOutcome ourReturnOperationOutcome; @Before public void before() { @@ -190,7 +190,7 @@ public class PreferTest { @Create() public MethodOutcome createPatient(@ResourceParam Patient thePatient) { - IdDt id = new IdDt("Patient/001/_history/002"); + IdType id = new IdType("Patient/001/_history/002"); MethodOutcome retVal = new MethodOutcome(id); thePatient.setId(id); @@ -202,12 +202,12 @@ public class PreferTest { } @Override - public Class getResourceType() { + public Class getResourceType() { return Patient.class; } @Update() - public MethodOutcome updatePatient(@ResourceParam Patient thePatient, @IdParam IdDt theIdParam) { + public MethodOutcome updatePatient(@ResourceParam Patient thePatient, @IdParam IdType theIdParam) { IdDt id = new IdDt("Patient/001/_history/002"); MethodOutcome retVal = new MethodOutcome(id); diff --git a/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/client/FhirClientTest.java b/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/client/FhirClientTest.java index 55e3a507524..e6852ba8067 100644 --- a/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/client/FhirClientTest.java +++ b/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/client/FhirClientTest.java @@ -3,7 +3,7 @@ package ca.uhn.fhir.tests.integration.karaf.client; import java.io.IOException; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.rest.api.MethodOutcome; -import ca.uhn.fhir.rest.api.PreferReturnEnum; +import ca.uhn.fhir.rest.api.PreferHeader; import ca.uhn.fhir.rest.client.api.IGenericClient; import org.hl7.fhir.dstu3.model.Patient; import org.junit.Ignore; @@ -56,7 +56,7 @@ public class FhirClientTest { patient.setId("PATIENTID"); patient.getMeta().addProfile("http://BAR"); patient.addName().addGiven("GIVEN"); - MethodOutcome execute = client.create().resource(patient).prefer(PreferReturnEnum.REPRESENTATION).execute(); + MethodOutcome execute = client.create().resource(patient).prefer(PreferHeader.PreferReturnEnum.REPRESENTATION).execute(); ourLog.info(execute.toString()); } diff --git a/pom.xml b/pom.xml index 874e7591e9e..2fa04c38797 100755 --- a/pom.xml +++ b/pom.xml @@ -1278,6 +1278,11 @@ postgresql 42.2.6.jre7 + + org.quartz-scheduler + quartz + 2.3.1 + org.slf4j slf4j-android diff --git a/src/changes/changes.xml b/src/changes/changes.xml index 37cb46292ba..b2bd8047af9 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -31,6 +31,14 @@ can use either the source URI, the request ID, or both. ]]> + + New Feature: + Support for the FHIR Bulk Data Export specification has been added to the JPA server. See the + specification for information on how this works. Note that + only system level export is currently supported but others will follow. + ]]> + The email Subscription deliverer now respects the payload property of the subscription when deciding how to encode the resource being sent. Thanks to Sean McIlvenna for the @@ -142,6 +150,9 @@ terminology service to accurately reflect the new CodeSystem URL and/or concepts. This is now corrected. + + The JPA server now uses the Quartz scheduling library as a lob scheduling mechanism +