diff --git a/hapi-deployable-pom/pom.xml b/hapi-deployable-pom/pom.xml
index 033bfdecb51..69e4479dce4 100644
--- a/hapi-deployable-pom/pom.xml
+++ b/hapi-deployable-pom/pom.xml
@@ -2,11 +2,11 @@
4.0.0
- ca.uhn.hapi.fhir
- hapi-fhir
- 5.5.0-PRE3-SNAPSHOT
- ../pom.xml
-
+ ca.uhn.hapi.fhir
+ hapi-fhir
+ 5.5.0-PRE4-SNAPSHOT
+ ../pom.xml
+
hapi-deployable-pompom
diff --git a/hapi-fhir-android/pom.xml b/hapi-fhir-android/pom.xml
index 5dd416c3a79..03fb806379f 100644
--- a/hapi-fhir-android/pom.xml
+++ b/hapi-fhir-android/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 5.5.0-PRE3-SNAPSHOT
+ 5.5.0-PRE4-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-base/pom.xml b/hapi-fhir-base/pom.xml
index 68580db17d9..9f950a9f42c 100644
--- a/hapi-fhir-base/pom.xml
+++ b/hapi-fhir-base/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 5.5.0-PRE3-SNAPSHOT
+ 5.5.0-PRE4-SNAPSHOT../hapi-deployable-pom/pom.xml
@@ -19,11 +19,14 @@
+
+ com.fasterxml.jackson.datatype
+ jackson-datatype-jsr310
+ com.fasterxml.jackson.corejackson-databind
-
com.fasterxml.woodstox
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/FhirContext.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/FhirContext.java
index 6a4a7d01695..54cdce0f05d 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/FhirContext.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/FhirContext.java
@@ -225,6 +225,18 @@ public class FhirContext {
}
+ public static FhirContext forDstu3Cached() {
+ return forCached(FhirVersionEnum.DSTU3);
+ }
+
+ public static FhirContext forR4Cached() {
+ return forCached(FhirVersionEnum.R4);
+ }
+
+ public static FhirContext forR5Cached() {
+ return forCached(FhirVersionEnum.R5);
+ }
+
private String createUnknownResourceNameError(final String theResourceName, final FhirVersionEnum theVersion) {
return getLocalizer().getMessage(FhirContext.class, "unknownResourceName", theResourceName, theVersion);
}
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java
index ec21cd36275..cbf4321461c 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java
@@ -1840,6 +1840,9 @@ public enum Pointcut implements IPointcut {
* pulled out of the servlet request. This parameter is identical to the RequestDetails parameter above but will
* only be populated when operating in a RestfulServer implementation. It is provided as a convenience.
*
+ *
+ * ca.uhn.fhir.context.RuntimeResourceDefinition - the resource type being accessed
+ *
*
*
* Hooks must return void.
@@ -1851,7 +1854,8 @@ public enum Pointcut implements IPointcut {
// Params
"ca.uhn.fhir.interceptor.model.RequestPartitionId",
"ca.uhn.fhir.rest.api.server.RequestDetails",
- "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
+ "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails",
+ "ca.uhn.fhir.context.RuntimeResourceDefinition"
),
/**
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/RequestPartitionId.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/RequestPartitionId.java
index f8518e3a3f0..d161be864f1 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/RequestPartitionId.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/RequestPartitionId.java
@@ -20,6 +20,10 @@ package ca.uhn.fhir.interceptor.model;
* #L%
*/
+import ca.uhn.fhir.model.api.IModelJson;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
@@ -41,12 +45,17 @@ import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
/**
* @since 5.0.0
*/
-public class RequestPartitionId {
-
+public class RequestPartitionId implements IModelJson {
private static final RequestPartitionId ALL_PARTITIONS = new RequestPartitionId();
+ private static final ObjectMapper ourObjectMapper = new ObjectMapper().registerModule(new com.fasterxml.jackson.datatype.jsr310.JavaTimeModule());
+
+ @JsonProperty("partitionDate")
private final LocalDate myPartitionDate;
+ @JsonProperty("allPartitions")
private final boolean myAllPartitions;
+ @JsonProperty("partitionIds")
private final List myPartitionIds;
+ @JsonProperty("partitionNames")
private final List myPartitionNames;
/**
@@ -80,6 +89,10 @@ public class RequestPartitionId {
myAllPartitions = true;
}
+ public static RequestPartitionId fromJson(String theJson) throws JsonProcessingException {
+ return ourObjectMapper.readValue(theJson, RequestPartitionId.class);
+ }
+
public boolean isAllPartitions() {
return myAllPartitions;
}
@@ -308,4 +321,8 @@ public class RequestPartitionId {
}
return retVal;
}
+
+ public String asJson() throws JsonProcessingException {
+ return ourObjectMapper.writeValueAsString(this);
+ }
}
diff --git a/hapi-fhir-base/src/test/java/ca/uhn/fhir/interceptor/model/RequestPartitionIdTest.java b/hapi-fhir-base/src/test/java/ca/uhn/fhir/interceptor/model/RequestPartitionIdTest.java
index 896be941a7c..3ac1e6006eb 100644
--- a/hapi-fhir-base/src/test/java/ca/uhn/fhir/interceptor/model/RequestPartitionIdTest.java
+++ b/hapi-fhir-base/src/test/java/ca/uhn/fhir/interceptor/model/RequestPartitionIdTest.java
@@ -1,16 +1,22 @@
package ca.uhn.fhir.interceptor.model;
+import com.fasterxml.jackson.core.JsonProcessingException;
import com.google.common.collect.Lists;
import org.junit.jupiter.api.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.time.LocalDate;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.containsString;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class RequestPartitionIdTest {
+ private static final Logger ourLog = LoggerFactory.getLogger(RequestPartitionIdTest.class);
@Test
public void testHashCode() {
@@ -36,5 +42,30 @@ public class RequestPartitionIdTest {
assertFalse(RequestPartitionId.forPartitionIdsAndNames(null, Lists.newArrayList(1, 2), null).isDefaultPartition());
}
+ @Test
+ public void testSerDeserSer() throws JsonProcessingException {
+ {
+ RequestPartitionId start = RequestPartitionId.fromPartitionId(123, LocalDate.of(2020, 1, 1));
+ String json = assertSerDeserSer(start);
+ assertThat(json, containsString("\"partitionDate\":[2020,1,1]"));
+ assertThat(json, containsString("\"partitionIds\":[123]"));
+ }
+ {
+ RequestPartitionId start = RequestPartitionId.forPartitionIdsAndNames(Lists.newArrayList("Name1", "Name2"), null, null);
+ String json = assertSerDeserSer(start);
+ assertThat(json, containsString("partitionNames\":[\"Name1\",\"Name2\"]"));
+ }
+ assertSerDeserSer(RequestPartitionId.allPartitions());
+ assertSerDeserSer(RequestPartitionId.defaultPartition());
+ }
+ private String assertSerDeserSer(RequestPartitionId start) throws JsonProcessingException {
+ String json = start.asJson();
+ ourLog.info(json);
+ RequestPartitionId end = RequestPartitionId.fromJson(json);
+ assertEquals(start, end);
+ String json2 = end.asJson();
+ assertEquals(json, json2);
+ return json;
+ }
}
diff --git a/hapi-fhir-bom/pom.xml b/hapi-fhir-bom/pom.xml
index 9415127567b..7726fc7198b 100644
--- a/hapi-fhir-bom/pom.xml
+++ b/hapi-fhir-bom/pom.xml
@@ -3,14 +3,14 @@
4.0.0ca.uhn.hapi.fhirhapi-fhir-bom
- 5.5.0-PRE3-SNAPSHOT
+ 5.5.0-PRE4-SNAPSHOTpomHAPI FHIR BOMca.uhn.hapi.fhirhapi-deployable-pom
- 5.5.0-PRE3-SNAPSHOT
+ 5.5.0-PRE4-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml
index 21035298167..6c7599ba5d8 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml
+++ b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 5.5.0-PRE3-SNAPSHOT
+ 5.5.0-PRE4-SNAPSHOT../../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml
index 53ed73bd820..338a9a084dd 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml
+++ b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhirhapi-fhir-cli
- 5.5.0-PRE3-SNAPSHOT
+ 5.5.0-PRE4-SNAPSHOT../pom.xml
diff --git a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml
index 6cc4988abc3..3a4f1a69e0d 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml
+++ b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 5.5.0-PRE3-SNAPSHOT
+ 5.5.0-PRE4-SNAPSHOT../../hapi-deployable-pom
diff --git a/hapi-fhir-cli/pom.xml b/hapi-fhir-cli/pom.xml
index d331ce01d1c..5a7e5fb4a75 100644
--- a/hapi-fhir-cli/pom.xml
+++ b/hapi-fhir-cli/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-fhir
- 5.5.0-PRE3-SNAPSHOT
+ 5.5.0-PRE4-SNAPSHOT../pom.xml
diff --git a/hapi-fhir-client-okhttp/pom.xml b/hapi-fhir-client-okhttp/pom.xml
index 32340305a54..af07f89d206 100644
--- a/hapi-fhir-client-okhttp/pom.xml
+++ b/hapi-fhir-client-okhttp/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 5.5.0-PRE3-SNAPSHOT
+ 5.5.0-PRE4-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-client/pom.xml b/hapi-fhir-client/pom.xml
index 451c3105b47..9b80dcfc050 100644
--- a/hapi-fhir-client/pom.xml
+++ b/hapi-fhir-client/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 5.5.0-PRE3-SNAPSHOT
+ 5.5.0-PRE4-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-converter/pom.xml b/hapi-fhir-converter/pom.xml
index 1b5cd57c84f..c6fde35a95f 100644
--- a/hapi-fhir-converter/pom.xml
+++ b/hapi-fhir-converter/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 5.5.0-PRE3-SNAPSHOT
+ 5.5.0-PRE4-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-dist/pom.xml b/hapi-fhir-dist/pom.xml
index 95936f4073b..740c5fbd51e 100644
--- a/hapi-fhir-dist/pom.xml
+++ b/hapi-fhir-dist/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-fhir
- 5.5.0-PRE3-SNAPSHOT
+ 5.5.0-PRE4-SNAPSHOT../pom.xml
diff --git a/hapi-fhir-docs/pom.xml b/hapi-fhir-docs/pom.xml
index 27d42c6f910..afc37850a1b 100644
--- a/hapi-fhir-docs/pom.xml
+++ b/hapi-fhir-docs/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 5.5.0-PRE3-SNAPSHOT
+ 5.5.0-PRE4-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_5_0/2697-delete-expunge-spring-batch.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_5_0/2697-delete-expunge-spring-batch.yaml
new file mode 100644
index 00000000000..c7ca691f25b
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_5_0/2697-delete-expunge-spring-batch.yaml
@@ -0,0 +1,8 @@
+---
+type: change
+issue: 2697
+title: "DELETE _expunge=true has been converted to use Spring Batch. It now simply returns the jobId of the Spring Batch
+job while the job continues to run in the background. A new operation called $expunge-delete has been added to provide
+more fine-grained control of the delete expunge operation. This operation accepts an ordered list of URLs to be delete
+expunged and an optional batch-size parameter that will be used to perform the delete expunge. If no batch size is
+specified in the operation, then the value of DaoConfig.getExpungeBatchSize() is used."
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa/configuration.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa/configuration.md
index ef3881dddfc..932befb6f1a 100644
--- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa/configuration.md
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa/configuration.md
@@ -129,4 +129,7 @@ X-Retry-On-Version-Conflict: retry; max-retries=100
# Controlling Delete with Expunge size
-During the delete with expunge operation there is an internal synchronous search which locates all the resources to be deleted. The default maximum size of this search is 10000. This can be configured via the [Internal Synchronous Search Size](/hapi-fhir/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setInternalSynchronousSearchSize(java.lang.Integer)) property.
+Delete with expunge submits a job to delete and expunge the requested resources. This is done in batches. If the DELETE
+?_expunge=true syntax is used to trigger the delete expunge, then the batch size will be determined by the value
+of [Expunge Batch Size](/apidocs/hapi-fhir-jpaserver-api/ca/uhn/fhir/jpa/api/config/DaoConfig.html#getExpungeBatchSize())
+property.
diff --git a/hapi-fhir-jacoco/pom.xml b/hapi-fhir-jacoco/pom.xml
index 246b0427f79..f12483d081d 100644
--- a/hapi-fhir-jacoco/pom.xml
+++ b/hapi-fhir-jacoco/pom.xml
@@ -11,7 +11,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 5.5.0-PRE3-SNAPSHOT
+ 5.5.0-PRE4-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jaxrsserver-base/pom.xml b/hapi-fhir-jaxrsserver-base/pom.xml
index 58076cd5149..ca13ac36381 100644
--- a/hapi-fhir-jaxrsserver-base/pom.xml
+++ b/hapi-fhir-jaxrsserver-base/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 5.5.0-PRE3-SNAPSHOT
+ 5.5.0-PRE4-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jaxrsserver-example/pom.xml b/hapi-fhir-jaxrsserver-example/pom.xml
index 2a1bbb76b89..9259b74dfdb 100644
--- a/hapi-fhir-jaxrsserver-example/pom.xml
+++ b/hapi-fhir-jaxrsserver-example/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhirhapi-fhir
- 5.5.0-PRE3-SNAPSHOT
+ 5.5.0-PRE4-SNAPSHOT../pom.xml
diff --git a/hapi-fhir-jpaserver-api/pom.xml b/hapi-fhir-jpaserver-api/pom.xml
index 392b5b3d8ff..5a8f35f6464 100644
--- a/hapi-fhir-jpaserver-api/pom.xml
+++ b/hapi-fhir-jpaserver-api/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 5.5.0-PRE3-SNAPSHOT
+ 5.5.0-PRE4-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/config/DaoConfig.java b/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/config/DaoConfig.java
index a60afe0dcab..858cf8a4a83 100644
--- a/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/config/DaoConfig.java
+++ b/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/config/DaoConfig.java
@@ -8,6 +8,7 @@ import ca.uhn.fhir.rest.api.SearchTotalModeEnum;
import ca.uhn.fhir.util.HapiExtensions;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Sets;
+import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.dstu2.model.Subscription;
@@ -220,7 +221,7 @@ public class DaoConfig {
* update setter javadoc if default changes
*/
@Nonnull
- private Long myTranslationCachesExpireAfterWriteInMinutes = DEFAULT_TRANSLATION_CACHES_EXPIRE_AFTER_WRITE_IN_MINUTES;
+ private final Long myTranslationCachesExpireAfterWriteInMinutes = DEFAULT_TRANSLATION_CACHES_EXPIRE_AFTER_WRITE_IN_MINUTES;
/**
* @since 5.4.0
*/
@@ -461,10 +462,12 @@ public class DaoConfig {
*
* Default is false
*
- * @since 5.5.0
+ * @since 5.4.0
+ * @deprecated Deprecated in 5.5.0. Use {@link #setMatchUrlCacheEnabled(boolean)} instead (the name of this method is misleading)
*/
- public boolean isMatchUrlCacheEnabled() {
- return getMatchUrlCache();
+ @Deprecated
+ public void setMatchUrlCache(boolean theMatchUrlCache) {
+ myMatchUrlCacheEnabled = theMatchUrlCache;
}
/**
@@ -475,12 +478,10 @@ public class DaoConfig {
*
* Default is false
*
- * @since 5.4.0
- * @deprecated Deprecated in 5.5.0. Use {@link #setMatchUrlCacheEnabled(boolean)} instead (the name of this method is misleading)
+ * @since 5.5.0
*/
- @Deprecated
- public void setMatchUrlCache(boolean theMatchUrlCache) {
- myMatchUrlCacheEnabled = theMatchUrlCache;
+ public boolean isMatchUrlCacheEnabled() {
+ return getMatchUrlCache();
}
/**
@@ -1629,7 +1630,8 @@ public class DaoConfig {
/**
* The expunge batch size (default 800) determines the number of records deleted within a single transaction by the
- * expunge operation.
+ * expunge operation. When expunging via DELETE ?_expunge=true, then this value determines the batch size for
+ * the number of resources deleted and expunged at a time.
*/
public int getExpungeBatchSize() {
return myExpungeBatchSize;
@@ -1637,7 +1639,8 @@ public class DaoConfig {
/**
* The expunge batch size (default 800) determines the number of records deleted within a single transaction by the
- * expunge operation.
+ * expunge operation. When expunging via DELETE ?_expunge=true, then this value determines the batch size for
+ * the number of resources deleted and expunged at a time.
*/
public void setExpungeBatchSize(int theExpungeBatchSize) {
myExpungeBatchSize = theExpungeBatchSize;
@@ -2328,9 +2331,8 @@ public class DaoConfig {
/**
*
- * This determines the internal search size that is run synchronously during operations such as:
- * 1. Delete with _expunge parameter.
- * 2. Searching for Code System IDs by System and Code
+ * This determines the internal search size that is run synchronously during operations such as searching for
+ * Code System IDs by System and Code
*
*
* @since 5.4.0
@@ -2341,9 +2343,8 @@ public class DaoConfig {
/**
*
- * This determines the internal search size that is run synchronously during operations such as:
- * 1. Delete with _expunge parameter.
- * 2. Searching for Code System IDs by System and Code
+ * This determines the internal search size that is run synchronously during operations such as searching for
+ * Code System IDs by System and Code
*
*
* @since 5.4.0
@@ -2529,6 +2530,30 @@ public class DaoConfig {
myTriggerSubscriptionsForNonVersioningChanges = theTriggerSubscriptionsForNonVersioningChanges;
}
+ public boolean canDeleteExpunge() {
+ return isAllowMultipleDelete() && isExpungeEnabled() && isDeleteExpungeEnabled();
+ }
+
+ public String cannotDeleteExpungeReason() {
+ List reasons = new ArrayList<>();
+ if (!isAllowMultipleDelete()) {
+ reasons.add("Multiple Delete");
+ }
+ if (!isExpungeEnabled()) {
+ reasons.add("Expunge");
+ }
+ if (!isDeleteExpungeEnabled()) {
+ reasons.add("Delete Expunge");
+ }
+ String retval = "Delete Expunge is not supported on this server. ";
+ if (reasons.size() == 1) {
+ retval += reasons.get(0) + " is disabled.";
+ } else {
+ retval += "The following configurations are disabled: " + StringUtils.join(reasons, ", ");
+ }
+ return retval;
+ }
+
public enum StoreMetaSourceInformationEnum {
NONE(false, false),
SOURCE_URI(true, false),
diff --git a/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/model/DeleteMethodOutcome.java b/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/model/DeleteMethodOutcome.java
index cf7126d91fd..00b5aef2fc4 100644
--- a/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/model/DeleteMethodOutcome.java
+++ b/hapi-fhir-jpaserver-api/src/main/java/ca/uhn/fhir/jpa/api/model/DeleteMethodOutcome.java
@@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.api.model;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.rest.api.MethodOutcome;
+import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
import java.util.List;
@@ -32,31 +33,51 @@ import java.util.List;
public class DeleteMethodOutcome extends MethodOutcome {
private List myDeletedEntities;
+ @Deprecated
private long myExpungedResourcesCount;
+ @Deprecated
private long myExpungedEntitiesCount;
+ public DeleteMethodOutcome() {
+ }
+
+ public DeleteMethodOutcome(IBaseOperationOutcome theBaseOperationOutcome) {
+ super(theBaseOperationOutcome);
+ }
+
public List getDeletedEntities() {
return myDeletedEntities;
}
+ /**
+ * Use {@link ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter#ENTITY_TOTAL_UPDATED_OR_DELETED}
+ */
+ @Deprecated
public DeleteMethodOutcome setDeletedEntities(List theDeletedEntities) {
myDeletedEntities = theDeletedEntities;
return this;
}
+ /**
+ * Use {@link ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener#RESOURCE_TOTAL_PROCESSED}
+ */
+ @Deprecated
public long getExpungedResourcesCount() {
return myExpungedResourcesCount;
}
+ @Deprecated
public DeleteMethodOutcome setExpungedResourcesCount(long theExpungedResourcesCount) {
myExpungedResourcesCount = theExpungedResourcesCount;
return this;
}
+ @Deprecated
public long getExpungedEntitiesCount() {
return myExpungedEntitiesCount;
}
+ @Deprecated
public DeleteMethodOutcome setExpungedEntitiesCount(long theExpungedEntitiesCount) {
myExpungedEntitiesCount = theExpungedEntitiesCount;
return this;
diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml
index 1cbd0536f8b..b63bf859954 100644
--- a/hapi-fhir-jpaserver-base/pom.xml
+++ b/hapi-fhir-jpaserver-base/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 5.5.0-PRE3-SNAPSHOT
+ 5.5.0-PRE4-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/BatchJobsConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/BatchJobsConfig.java
index 17bc6cc4f8f..ba2318149a4 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/BatchJobsConfig.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/BatchJobsConfig.java
@@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.batch;
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.bulk.imprt.job.BulkImportJobConfig;
+import ca.uhn.fhir.jpa.delete.job.DeleteExpungeJobConfig;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
@@ -32,9 +33,10 @@ import java.util.Set;
@Configuration
//When you define a new batch job, add it here.
@Import({
- CommonBatchJobConfig.class,
- BulkExportJobConfig.class,
- BulkImportJobConfig.class
+ CommonBatchJobConfig.class,
+ BulkExportJobConfig.class,
+ BulkImportJobConfig.class,
+ DeleteExpungeJobConfig.class
})
public class BatchJobsConfig {
@@ -73,4 +75,8 @@ public class BatchJobsConfig {
RECORD_PROCESSING_STEP_NAMES = Collections.unmodifiableSet(recordProcessingStepNames);
}
+ /**
+ * Delete Expunge
+ */
+ public static final String DELETE_EXPUNGE_JOB_NAME = "deleteExpungeJob";
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/CommonBatchJobConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/CommonBatchJobConfig.java
index 340c7be3393..dc0ec59b1ff 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/CommonBatchJobConfig.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/CommonBatchJobConfig.java
@@ -20,8 +20,8 @@ package ca.uhn.fhir.jpa.batch;
* #L%
*/
-import ca.uhn.fhir.jpa.batch.processors.GoldenResourceAnnotatingProcessor;
-import ca.uhn.fhir.jpa.batch.processors.PidToIBaseResourceProcessor;
+import ca.uhn.fhir.jpa.batch.processor.GoldenResourceAnnotatingProcessor;
+import ca.uhn.fhir.jpa.batch.processor.PidToIBaseResourceProcessor;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/listener/PidReaderCounterListener.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/listener/PidReaderCounterListener.java
new file mode 100644
index 00000000000..6a3bf1f60a1
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/listener/PidReaderCounterListener.java
@@ -0,0 +1,48 @@
+package ca.uhn.fhir.jpa.batch.listener;
+
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2021 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+import org.springframework.batch.core.StepExecution;
+import org.springframework.batch.core.annotation.AfterProcess;
+import org.springframework.batch.core.annotation.BeforeStep;
+
+import java.util.List;
+
+/**
+ * Add the number of pids processed to the execution context so we can track progress of the job
+ */
+public class PidReaderCounterListener {
+ public static final String RESOURCE_TOTAL_PROCESSED = "resource.total.processed";
+
+ private StepExecution myStepExecution;
+ private Long myTotalPidsProcessed = 0L;
+
+ @BeforeStep
+ public void setStepExecution(StepExecution stepExecution) {
+ myStepExecution = stepExecution;
+ }
+
+ @AfterProcess
+ public void afterProcess(List thePids, List theSqlList) {
+ myTotalPidsProcessed += thePids.size();
+ myStepExecution.getExecutionContext().putLong(RESOURCE_TOTAL_PROCESSED, myTotalPidsProcessed);
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/processors/GoldenResourceAnnotatingProcessor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/processor/GoldenResourceAnnotatingProcessor.java
similarity index 99%
rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/processors/GoldenResourceAnnotatingProcessor.java
rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/processor/GoldenResourceAnnotatingProcessor.java
index 46c418a1e74..5bc6169f675 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/processors/GoldenResourceAnnotatingProcessor.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/processor/GoldenResourceAnnotatingProcessor.java
@@ -1,4 +1,4 @@
-package ca.uhn.fhir.jpa.batch.processors;
+package ca.uhn.fhir.jpa.batch.processor;
/*-
* #%L
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/processors/PidToIBaseResourceProcessor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/processor/PidToIBaseResourceProcessor.java
similarity index 98%
rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/processors/PidToIBaseResourceProcessor.java
rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/processor/PidToIBaseResourceProcessor.java
index 0554b7da31d..20e2825e020 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/processors/PidToIBaseResourceProcessor.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/processor/PidToIBaseResourceProcessor.java
@@ -1,4 +1,4 @@
-package ca.uhn.fhir.jpa.batch.processors;
+package ca.uhn.fhir.jpa.batch.processor;
/*-
* #%L
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/reader/ReverseCronologicalBatchResourcePidReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/reader/ReverseCronologicalBatchResourcePidReader.java
new file mode 100644
index 00000000000..c9fc0fc10bc
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/reader/ReverseCronologicalBatchResourcePidReader.java
@@ -0,0 +1,200 @@
+package ca.uhn.fhir.jpa.batch.reader;
+
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2021 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.jpa.api.config.DaoConfig;
+import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
+import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
+import ca.uhn.fhir.jpa.delete.model.PartitionedUrl;
+import ca.uhn.fhir.jpa.delete.model.RequestListJson;
+import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
+import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
+import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
+import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
+import ca.uhn.fhir.rest.api.Constants;
+import ca.uhn.fhir.rest.api.SortOrderEnum;
+import ca.uhn.fhir.rest.api.SortSpec;
+import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
+import ca.uhn.fhir.rest.param.DateRangeParam;
+import org.hl7.fhir.instance.model.api.IBaseResource;
+import org.jetbrains.annotations.NotNull;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.batch.item.ExecutionContext;
+import org.springframework.batch.item.ItemReader;
+import org.springframework.batch.item.ItemStream;
+import org.springframework.batch.item.ItemStreamException;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+/**
+ * This Spring Batch reader takes 4 parameters:
+ * {@link #JOB_PARAM_REQUEST_LIST}: A list of URLs to search for along with the partitions those searches should be performed on
+ * {@link #JOB_PARAM_BATCH_SIZE}: The number of resources to return with each search. If ommitted, {@link DaoConfig#getExpungeBatchSize} will be used.
+ * {@link #JOB_PARAM_START_TIME}: The latest timestamp of resources to search for
+ *
+ * The reader will return at most {@link #JOB_PARAM_BATCH_SIZE} pids every time it is called, or null
+ * once no more matching resources are available. It returns the resources in reverse chronological order
+ * and stores where it's at in the Spring Batch execution context with the key {@link #CURRENT_THRESHOLD_HIGH}
+ * appended with "." and the index number of the url list item it has gotten up to. This is to permit
+ * restarting jobs that use this reader so it can pick up where it left off.
+ */
+public class ReverseCronologicalBatchResourcePidReader implements ItemReader>, ItemStream {
+
+ public static final String JOB_PARAM_REQUEST_LIST = "url-list";
+ public static final String JOB_PARAM_BATCH_SIZE = "batch-size";
+ public static final String JOB_PARAM_START_TIME = "start-time";
+
+ public static final String CURRENT_URL_INDEX = "current.url-index";
+ public static final String CURRENT_THRESHOLD_HIGH = "current.threshold-high";
+ private static final Logger ourLog = LoggerFactory.getLogger(ReverseCronologicalBatchResourcePidReader.class);
+
+ @Autowired
+ private FhirContext myFhirContext;
+ @Autowired
+ private MatchUrlService myMatchUrlService;
+ @Autowired
+ private DaoRegistry myDaoRegistry;
+ @Autowired
+ private DaoConfig myDaoConfig;
+
+ private List myPartitionedUrls;
+ private Integer myBatchSize;
+ private final Map myThresholdHighByUrlIndex = new HashMap<>();
+ private int myUrlIndex = 0;
+ private Date myStartTime;
+
+ @Autowired
+ public void setRequestListJson(@Value("#{jobParameters['" + JOB_PARAM_REQUEST_LIST + "']}") String theRequestListJson) {
+ RequestListJson requestListJson = RequestListJson.fromJson(theRequestListJson);
+ myPartitionedUrls = requestListJson.getPartitionedUrls();
+ }
+
+ @Autowired
+ public void setBatchSize(@Value("#{jobParameters['" + JOB_PARAM_BATCH_SIZE + "']}") Integer theBatchSize) {
+ myBatchSize = theBatchSize;
+ }
+
+ @Autowired
+ public void setStartTime(@Value("#{jobParameters['" + JOB_PARAM_START_TIME + "']}") Date theStartTime) {
+ myStartTime = theStartTime;
+ }
+
+ @Override
+ public List read() throws Exception {
+ while (myUrlIndex < myPartitionedUrls.size()) {
+ List nextBatch;
+ nextBatch = getNextBatch();
+ if (nextBatch.isEmpty()) {
+ ++myUrlIndex;
+ continue;
+ }
+
+ return nextBatch;
+ }
+ return null;
+ }
+
+ private List getNextBatch() {
+ ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(myPartitionedUrls.get(myUrlIndex).getUrl());
+ SearchParameterMap map = buildSearchParameterMap(resourceSearch);
+
+ // Perform the search
+ IFhirResourceDao> dao = myDaoRegistry.getResourceDao(resourceSearch.getResourceName());
+ List retval = dao.searchForIds(map, buildSystemRequestDetails()).stream()
+ .map(ResourcePersistentId::getIdAsLong)
+ .collect(Collectors.toList());
+
+ if (ourLog.isDebugEnabled()) {
+ ourLog.debug("Search for {}{} returned {} results", resourceSearch.getResourceName(), map.toNormalizedQueryString(myFhirContext), retval.size());
+ ourLog.debug("Results: {}", retval);
+ }
+
+ if (!retval.isEmpty()) {
+ // Adjust the high threshold to be the earliest resource in the batch we found
+ Long pidOfOldestResourceInBatch = retval.get(retval.size() - 1);
+ IBaseResource earliestResource = dao.readByPid(new ResourcePersistentId(pidOfOldestResourceInBatch));
+ myThresholdHighByUrlIndex.put(myUrlIndex, earliestResource.getMeta().getLastUpdated());
+ }
+
+ return retval;
+ }
+
+ @NotNull
+ private SearchParameterMap buildSearchParameterMap(ResourceSearch resourceSearch) {
+ SearchParameterMap map = resourceSearch.getSearchParameterMap();
+ map.setLastUpdated(new DateRangeParam().setUpperBoundInclusive(myThresholdHighByUrlIndex.get(myUrlIndex)));
+ map.setLoadSynchronousUpTo(myBatchSize);
+ map.setSort(new SortSpec(Constants.PARAM_LASTUPDATED, SortOrderEnum.DESC));
+ return map;
+ }
+
+ @NotNull
+ private SystemRequestDetails buildSystemRequestDetails() {
+ SystemRequestDetails retval = new SystemRequestDetails();
+ retval.setRequestPartitionId(myPartitionedUrls.get(myUrlIndex).getRequestPartitionId());
+ return retval;
+ }
+
+ @Override
+ public void open(ExecutionContext executionContext) throws ItemStreamException {
+ if (myBatchSize == null) {
+ myBatchSize = myDaoConfig.getExpungeBatchSize();
+ }
+ if (executionContext.containsKey(CURRENT_URL_INDEX)) {
+ myUrlIndex = new Long(executionContext.getLong(CURRENT_URL_INDEX)).intValue();
+ }
+ for (int index = 0; index < myPartitionedUrls.size(); ++index) {
+ String key = highKey(index);
+ if (executionContext.containsKey(key)) {
+ myThresholdHighByUrlIndex.put(index, new Date(executionContext.getLong(key)));
+ } else {
+ myThresholdHighByUrlIndex.put(index, myStartTime);
+ }
+ }
+ }
+
+ private static String highKey(int theIndex) {
+ return CURRENT_THRESHOLD_HIGH + "." + theIndex;
+ }
+
+ @Override
+ public void update(ExecutionContext executionContext) throws ItemStreamException {
+ executionContext.putLong(CURRENT_URL_INDEX, myUrlIndex);
+ for (int index = 0; index < myPartitionedUrls.size(); ++index) {
+ Date date = myThresholdHighByUrlIndex.get(index);
+ if (date != null) {
+ executionContext.putLong(highKey(index), date.getTime());
+ }
+ }
+ }
+
+ @Override
+ public void close() throws ItemStreamException {
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/writer/SqlExecutorWriter.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/writer/SqlExecutorWriter.java
new file mode 100644
index 00000000000..51a295ffb64
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/writer/SqlExecutorWriter.java
@@ -0,0 +1,67 @@
+package ca.uhn.fhir.jpa.batch.writer;
+
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2021 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.batch.core.StepExecution;
+import org.springframework.batch.core.annotation.BeforeStep;
+import org.springframework.batch.item.ItemWriter;
+
+import javax.persistence.EntityManager;
+import javax.persistence.PersistenceContext;
+import javax.persistence.PersistenceContextType;
+import java.util.List;
+
+/**
+ * This Spring Batch writer accepts a list of SQL commands and executes them.
+ * The total number of entities updated or deleted is stored in the execution context
+ * with the key {@link #ENTITY_TOTAL_UPDATED_OR_DELETED}. The entire list is committed within a
+ * single transaction (provided by Spring Batch).
+ */
+public class SqlExecutorWriter implements ItemWriter> {
+ private static final Logger ourLog = LoggerFactory.getLogger(SqlExecutorWriter.class);
+
+ public static final String ENTITY_TOTAL_UPDATED_OR_DELETED = "entity.total.updated-or-deleted";
+
+ @PersistenceContext(type = PersistenceContextType.TRANSACTION)
+ private EntityManager myEntityManager;
+ private Long totalUpdated = 0L;
+ private StepExecution myStepExecution;
+
+ @BeforeStep
+ public void setStepExecution(StepExecution stepExecution) {
+ myStepExecution = stepExecution;
+ }
+
+ @Override
+ public void write(List extends List> theSqlLists) throws Exception {
+ for (List sqlList : theSqlLists) {
+ ourLog.info("Executing {} sql commands", sqlList.size());
+ for (String sql : sqlList) {
+ ourLog.trace("Executing sql " + sql);
+ totalUpdated += myEntityManager.createNativeQuery(sql).executeUpdate();
+ myStepExecution.getExecutionContext().putLong(ENTITY_TOTAL_UPDATED_OR_DELETED, totalUpdated);
+ }
+ }
+ ourLog.debug("{} records updated", totalUpdated);
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobConfig.java
index a6bb4af289a..f442095c0e1 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobConfig.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobConfig.java
@@ -21,8 +21,8 @@ package ca.uhn.fhir.jpa.bulk.export.job;
*/
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
-import ca.uhn.fhir.jpa.batch.processors.GoldenResourceAnnotatingProcessor;
-import ca.uhn.fhir.jpa.batch.processors.PidToIBaseResourceProcessor;
+import ca.uhn.fhir.jpa.batch.processor.GoldenResourceAnnotatingProcessor;
+import ca.uhn.fhir.jpa.batch.processor.PidToIBaseResourceProcessor;
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java
index 48c24a0d06d..e79a9acfdb2 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java
@@ -61,6 +61,7 @@ import ca.uhn.fhir.jpa.dao.predicate.PredicateBuilderUri;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.delete.DeleteConflictFinderService;
import ca.uhn.fhir.jpa.delete.DeleteConflictService;
+import ca.uhn.fhir.jpa.delete.DeleteExpungeJobSubmitterImpl;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.graphql.JpaStorageServices;
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
@@ -132,9 +133,11 @@ import ca.uhn.fhir.jpa.util.MemoryCacheService;
import ca.uhn.fhir.jpa.validation.JpaResourceLoader;
import ca.uhn.fhir.jpa.validation.ValidationSettings;
import ca.uhn.fhir.rest.api.server.RequestDetails;
+import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationInterceptor;
import ca.uhn.fhir.rest.server.interceptor.consent.IConsentContextServices;
import ca.uhn.fhir.rest.server.interceptor.partition.RequestTenantPartitionInterceptor;
+import ca.uhn.fhir.rest.server.provider.DeleteExpungeProvider;
import org.hibernate.jpa.HibernatePersistenceProvider;
import org.hl7.fhir.common.hapi.validation.support.UnknownCodeSystemWarningValidationSupport;
import org.hl7.fhir.instance.model.api.IBaseResource;
@@ -528,6 +531,18 @@ public abstract class BaseConfig {
return new BulkDataExportProvider();
}
+ @Bean
+ @Lazy
+ public IDeleteExpungeJobSubmitter myDeleteExpungeJobSubmitter() {
+ return new DeleteExpungeJobSubmitterImpl();
+ }
+
+ @Bean
+ @Lazy
+ public DeleteExpungeProvider deleteExpungeProvider(FhirContext theFhirContext, IDeleteExpungeJobSubmitter theDeleteExpungeJobSubmitter) {
+ return new DeleteExpungeProvider(theFhirContext, theDeleteExpungeJobSubmitter);
+ }
+
@Bean
@Lazy
public IBulkDataImportSvc bulkDataImportSvc() {
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
index 9021c1a4ce5..381493d8997 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
@@ -607,10 +607,7 @@ public abstract class BaseHapiFhirDao extends BaseStora
}
- boolean skipUpdatingTags = false;
- if (myConfig.isMassIngestionMode() && theEntity.isHasTags()) {
- skipUpdatingTags = true;
- }
+ boolean skipUpdatingTags = myConfig.isMassIngestionMode() && theEntity.isHasTags();
if (!skipUpdatingTags) {
Set allDefs = new HashSet<>();
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
index 379bf1becda..e3bbf194245 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
@@ -34,7 +34,6 @@ import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
import ca.uhn.fhir.jpa.api.model.LazyDaoMethodOutcome;
-import ca.uhn.fhir.jpa.dao.expunge.DeleteExpungeService;
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.delete.DeleteConflictService;
@@ -56,6 +55,7 @@ import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
import ca.uhn.fhir.jpa.search.cache.SearchCacheStatusEnum;
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
+import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams;
import ca.uhn.fhir.jpa.util.MemoryCacheService;
@@ -77,6 +77,7 @@ import ca.uhn.fhir.rest.api.server.IPreResourceShowDetails;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.SimplePreResourceAccessDetails;
import ca.uhn.fhir.rest.api.server.SimplePreResourceShowDetails;
+import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import ca.uhn.fhir.rest.param.HasParam;
@@ -112,9 +113,10 @@ import org.hl7.fhir.instance.model.api.IBaseParameters;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
+import org.springframework.batch.core.JobExecution;
+import org.springframework.batch.core.JobParametersInvalidException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Required;
-import org.springframework.data.domain.SliceImpl;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.annotation.Propagation;
@@ -132,6 +134,7 @@ import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
+import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.Iterator;
@@ -169,7 +172,7 @@ public abstract class BaseHapiFhirResourceDao extends B
@Autowired
private MatchUrlService myMatchUrlService;
@Autowired
- private DeleteExpungeService myDeleteExpungeService;
+ private IDeleteExpungeJobSubmitter myDeleteExpungeJobSubmitter;
private IInstanceValidatorModule myInstanceValidator;
private String myResourceName;
@@ -516,12 +519,17 @@ public abstract class BaseHapiFhirResourceDao extends B
}
@Override
- public DeleteMethodOutcome deleteByUrl(String theUrl, RequestDetails theRequestDetails) {
+ public DeleteMethodOutcome deleteByUrl(String theUrl, RequestDetails theRequest) {
validateDeleteEnabled();
+ ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(theUrl);
- return myTransactionService.execute(theRequestDetails, tx -> {
+ if (resourceSearch.isDeleteExpunge()) {
+ return deleteExpunge(theUrl, theRequest);
+ }
+
+ return myTransactionService.execute(theRequest, tx -> {
DeleteConflictList deleteConflicts = new DeleteConflictList();
- DeleteMethodOutcome outcome = deleteByUrl(theUrl, deleteConflicts, theRequestDetails);
+ DeleteMethodOutcome outcome = deleteByUrl(theUrl, deleteConflicts, theRequest);
DeleteConflictService.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts);
return outcome;
});
@@ -540,8 +548,8 @@ public abstract class BaseHapiFhirResourceDao extends B
@Nonnull
private DeleteMethodOutcome doDeleteByUrl(String theUrl, DeleteConflictList deleteConflicts, RequestDetails theRequest) {
- RuntimeResourceDefinition resourceDef = getContext().getResourceDefinition(myResourceType);
- SearchParameterMap paramMap = myMatchUrlService.translateMatchUrl(theUrl, resourceDef);
+ ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(theUrl);
+ SearchParameterMap paramMap = resourceSearch.getSearchParameterMap();
paramMap.setLoadSynchronous(true);
Set resourceIds = myMatchResourceUrlService.search(paramMap, myResourceType, theRequest);
@@ -552,19 +560,21 @@ public abstract class BaseHapiFhirResourceDao extends B
}
}
- if (paramMap.isDeleteExpunge()) {
- return deleteExpunge(theUrl, theRequest, resourceIds);
- } else {
- return deletePidList(theUrl, resourceIds, deleteConflicts, theRequest);
- }
+ return deletePidList(theUrl, resourceIds, deleteConflicts, theRequest);
}
- private DeleteMethodOutcome deleteExpunge(String theUrl, RequestDetails theTheRequest, Set theResourceIds) {
- if (!getConfig().isExpungeEnabled() || !getConfig().isDeleteExpungeEnabled()) {
- throw new MethodNotAllowedException("_expunge is not enabled on this server");
+ private DeleteMethodOutcome deleteExpunge(String theUrl, RequestDetails theRequest) {
+ if (!getConfig().canDeleteExpunge()) {
+ throw new MethodNotAllowedException("_expunge is not enabled on this server: " + getConfig().cannotDeleteExpungeReason());
}
- return myDeleteExpungeService.expungeByResourcePids(theUrl, myResourceName, new SliceImpl<>(ResourcePersistentId.toLongList(theResourceIds)), theTheRequest);
+ List urlsToDeleteExpunge = Collections.singletonList(theUrl);
+ try {
+ JobExecution jobExecution = myDeleteExpungeJobSubmitter.submitJob(getConfig().getExpungeBatchSize(), theRequest, urlsToDeleteExpunge);
+ return new DeleteMethodOutcome(createInfoOperationOutcome("Delete job submitted with id " + jobExecution.getId()));
+ } catch (JobParametersInvalidException e) {
+ throw new InvalidRequestException("Invalid Delete Expunge Request: " + e.getMessage(), e);
+ }
}
@Nonnull
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java
index 7fed32b7455..2e3c0c7c6f2 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java
@@ -1145,10 +1145,7 @@ public abstract class BaseTransactionProcessor {
IBasePersistedResource updateOutcome = null;
if (updatedEntities.contains(nextOutcome.getEntity())) {
- boolean forceUpdateVersion = false;
- if (!theReferencesToAutoVersion.isEmpty()) {
- forceUpdateVersion = true;
- }
+ boolean forceUpdateVersion = !theReferencesToAutoVersion.isEmpty();
updateOutcome = jpaDao.updateInternal(theRequest, nextResource, true, forceUpdateVersion, nextOutcome.getEntity(), nextResource.getIdElement(), nextOutcome.getPreviousResource(), theTransactionDetails);
} else if (!nonUpdatedEntities.contains(nextOutcome.getId())) {
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoSubscriptionDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoSubscriptionDstu2.java
index e7bbf79a09c..ec8bdf794d6 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoSubscriptionDstu2.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoSubscriptionDstu2.java
@@ -25,9 +25,9 @@ import ca.uhn.fhir.jpa.dao.data.ISubscriptionTableDao;
import ca.uhn.fhir.jpa.entity.SubscriptionTable;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
-import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import ca.uhn.fhir.model.dstu2.resource.Subscription;
import ca.uhn.fhir.rest.api.server.RequestDetails;
+import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.springframework.beans.factory.annotation.Autowired;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/MatchResourceUrlService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/MatchResourceUrlService.java
index 857d6956243..44e223a27f2 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/MatchResourceUrlService.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/MatchResourceUrlService.java
@@ -49,8 +49,6 @@ import javax.annotation.Nullable;
import java.util.Collections;
import java.util.Set;
-import static org.apache.commons.lang3.StringUtils.isNotBlank;
-
@Service
public class MatchResourceUrlService {
@Autowired
@@ -138,7 +136,7 @@ public class MatchResourceUrlService {
// Interceptor broadcast: JPA_PERFTRACE_INFO
if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INFO, myInterceptorBroadcaster, theRequest)) {
StorageProcessingMessage message = new StorageProcessingMessage();
- message.setMessage("Processed conditional resource URL with " + retVal.size() + " result(s) in " + sw.toString());
+ message.setMessage("Processed conditional resource URL with " + retVal.size() + " result(s) in " + sw);
HookParams params = new HookParams()
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest)
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoSubscriptionDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoSubscriptionDstu3.java
index add98b15d65..c618f59b91b 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoSubscriptionDstu3.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoSubscriptionDstu3.java
@@ -26,8 +26,8 @@ import ca.uhn.fhir.jpa.dao.data.ISubscriptionTableDao;
import ca.uhn.fhir.jpa.entity.SubscriptionTable;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
-import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import ca.uhn.fhir.rest.api.server.RequestDetails;
+import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import org.hl7.fhir.dstu3.model.Subscription;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/DeleteExpungeService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/DeleteExpungeService.java
index 1695ff7c7c8..71d00b0ec76 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/DeleteExpungeService.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/DeleteExpungeService.java
@@ -30,10 +30,10 @@ import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
-import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
+import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.util.OperationOutcomeUtil;
import ca.uhn.fhir.util.StopWatch;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
@@ -55,6 +55,10 @@ import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
@Service
+/**
+ * DeleteExpunge is now performed using the {@link ca.uhn.fhir.jpa.delete.DeleteExpungeJobSubmitterImpl} Spring Batch job.
+ */
+@Deprecated
public class DeleteExpungeService {
private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeService.class);
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java
index 4e64404cc05..1b89c3c9b7f 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java
@@ -28,7 +28,6 @@ import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
import ca.uhn.fhir.jpa.model.cross.ResourceLookup;
import ca.uhn.fhir.jpa.model.entity.ForcedId;
-import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.util.MemoryCacheService;
import ca.uhn.fhir.jpa.util.QueryChunker;
@@ -72,7 +71,6 @@ import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
-import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/**
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoSubscriptionR4.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoSubscriptionR4.java
index a0c7eab2a16..bd1e6f7376a 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoSubscriptionR4.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoSubscriptionR4.java
@@ -26,8 +26,8 @@ import ca.uhn.fhir.jpa.dao.data.ISubscriptionTableDao;
import ca.uhn.fhir.jpa.entity.SubscriptionTable;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
-import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import ca.uhn.fhir.rest.api.server.RequestDetails;
+import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Subscription;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoSubscriptionR5.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoSubscriptionR5.java
index 4cc8b80ebb4..ff6776170dc 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoSubscriptionR5.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoSubscriptionR5.java
@@ -26,8 +26,8 @@ import ca.uhn.fhir.jpa.dao.data.ISubscriptionTableDao;
import ca.uhn.fhir.jpa.entity.SubscriptionTable;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
-import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import ca.uhn.fhir.rest.api.server.RequestDetails;
+import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r5.model.Subscription;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/DeleteExpungeJobSubmitterImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/DeleteExpungeJobSubmitterImpl.java
new file mode 100644
index 00000000000..bd4fad3a1a2
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/DeleteExpungeJobSubmitterImpl.java
@@ -0,0 +1,100 @@
+package ca.uhn.fhir.jpa.delete;
+
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2021 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.interceptor.api.HookParams;
+import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
+import ca.uhn.fhir.interceptor.api.Pointcut;
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
+import ca.uhn.fhir.jpa.api.config.DaoConfig;
+import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
+import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
+import ca.uhn.fhir.jpa.delete.job.DeleteExpungeJobConfig;
+import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
+import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
+import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
+import ca.uhn.fhir.rest.api.server.RequestDetails;
+import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
+import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
+import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
+import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
+import org.springframework.batch.core.Job;
+import org.springframework.batch.core.JobExecution;
+import org.springframework.batch.core.JobParameters;
+import org.springframework.batch.core.JobParametersInvalidException;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+
+import javax.transaction.Transactional;
+import java.util.ArrayList;
+import java.util.List;
+
+public class DeleteExpungeJobSubmitterImpl implements IDeleteExpungeJobSubmitter {
+ @Autowired
+ private IBatchJobSubmitter myBatchJobSubmitter;
+ @Autowired
+ @Qualifier(BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME)
+ private Job myDeleteExpungeJob;
+ @Autowired
+ FhirContext myFhirContext;
+ @Autowired
+ MatchUrlService myMatchUrlService;
+ @Autowired
+ IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
+ @Autowired
+ DaoConfig myDaoConfig;
+ @Autowired
+ IInterceptorBroadcaster myInterceptorBroadcaster;
+
+ @Override
+ @Transactional(Transactional.TxType.NEVER)
+ public JobExecution submitJob(Integer theBatchSize, RequestDetails theRequest, List theUrlsToDeleteExpunge) throws JobParametersInvalidException {
+ List requestPartitionIds = requestPartitionIdsFromRequestAndUrls(theRequest, theUrlsToDeleteExpunge);
+ if (!myDaoConfig.canDeleteExpunge()) {
+ throw new ForbiddenOperationException("Delete Expunge not allowed: " + myDaoConfig.cannotDeleteExpungeReason());
+ }
+
+ for (String url : theUrlsToDeleteExpunge) {
+ HookParams params = new HookParams()
+ .add(RequestDetails.class, theRequest)
+ .addIfMatchesType(ServletRequestDetails.class, theRequest)
+ .add(String.class, url);
+ CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRE_DELETE_EXPUNGE, params);
+ }
+
+ JobParameters jobParameters = DeleteExpungeJobConfig.buildJobParameters(theBatchSize, theUrlsToDeleteExpunge, requestPartitionIds);
+ return myBatchJobSubmitter.runJob(myDeleteExpungeJob, jobParameters);
+ }
+
+ /**
+ * This method will throw an exception if the user is not allowed to add the requested resource type on the partition determined by the request
+ */
+ private List requestPartitionIdsFromRequestAndUrls(RequestDetails theRequest, List theUrlsToDeleteExpunge) {
+ List retval = new ArrayList<>();
+ for (String url : theUrlsToDeleteExpunge) {
+ ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(url);
+ RequestPartitionId requestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequest, resourceSearch.getResourceName());
+ retval.add(requestPartitionId);
+ }
+ return retval;
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeJobConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeJobConfig.java
new file mode 100644
index 00000000000..54a398f0331
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeJobConfig.java
@@ -0,0 +1,139 @@
+package ca.uhn.fhir.jpa.delete.job;
+
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2021 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
+import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
+import ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener;
+import ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader;
+import ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter;
+import ca.uhn.fhir.jpa.delete.model.RequestListJson;
+import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
+import org.apache.commons.lang3.time.DateUtils;
+import org.springframework.batch.core.Job;
+import org.springframework.batch.core.JobParameter;
+import org.springframework.batch.core.JobParameters;
+import org.springframework.batch.core.JobParametersValidator;
+import org.springframework.batch.core.Step;
+import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
+import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
+import org.springframework.batch.core.configuration.annotation.StepScope;
+import org.springframework.batch.core.listener.ExecutionContextPromotionListener;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.Lazy;
+
+import javax.annotation.Nonnull;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.DELETE_EXPUNGE_JOB_NAME;
+
+/**
+ * Spring batch Job configuration file. Contains all necessary plumbing to run a
+ * Delete Expunge job.
+ */
+@Configuration
+public class DeleteExpungeJobConfig {
+ public static final String DELETE_EXPUNGE_URL_LIST_STEP_NAME = "delete-expunge-url-list-step";
+ private static final int MINUTES_IN_FUTURE_TO_DELETE_FROM = 1;
+
+ @Autowired
+ private StepBuilderFactory myStepBuilderFactory;
+ @Autowired
+ private JobBuilderFactory myJobBuilderFactory;
+
+ @Bean(name = DELETE_EXPUNGE_JOB_NAME)
+ @Lazy
+ public Job deleteExpungeJob(FhirContext theFhirContext, MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) throws Exception {
+ return myJobBuilderFactory.get(DELETE_EXPUNGE_JOB_NAME)
+ .validator(deleteExpungeJobParameterValidator(theFhirContext, theMatchUrlService, theDaoRegistry))
+ .start(deleteExpungeUrlListStep())
+ .build();
+ }
+
+ @Nonnull
+ public static JobParameters buildJobParameters(Integer theBatchSize, List theUrlList, List theRequestPartitionIds) {
+ Map map = new HashMap<>();
+ RequestListJson requestListJson = RequestListJson.fromUrlStringsAndRequestPartitionIds(theUrlList, theRequestPartitionIds);
+ map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_REQUEST_LIST, new JobParameter(requestListJson.toString()));
+ map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), MINUTES_IN_FUTURE_TO_DELETE_FROM)));
+ if (theBatchSize != null) {
+ map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue()));
+ }
+ JobParameters parameters = new JobParameters(map);
+ return parameters;
+ }
+
+ @Bean
+ public Step deleteExpungeUrlListStep() {
+ return myStepBuilderFactory.get(DELETE_EXPUNGE_URL_LIST_STEP_NAME)
+ ., List>chunk(1)
+ .reader(reverseCronologicalBatchResourcePidReader())
+ .processor(deleteExpungeProcessor())
+ .writer(sqlExecutorWriter())
+ .listener(pidCountRecorderListener())
+ .listener(promotionListener())
+ .build();
+ }
+
+ @Bean
+ @StepScope
+ public PidReaderCounterListener pidCountRecorderListener() {
+ return new PidReaderCounterListener();
+ }
+
+ @Bean
+ @StepScope
+ public ReverseCronologicalBatchResourcePidReader reverseCronologicalBatchResourcePidReader() {
+ return new ReverseCronologicalBatchResourcePidReader();
+ }
+
+ @Bean
+ @StepScope
+ public DeleteExpungeProcessor deleteExpungeProcessor() {
+ return new DeleteExpungeProcessor();
+ }
+
+ @Bean
+ @StepScope
+ public SqlExecutorWriter sqlExecutorWriter() {
+ return new SqlExecutorWriter();
+ }
+
+ @Bean
+ public JobParametersValidator deleteExpungeJobParameterValidator(FhirContext theFhirContext, MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) {
+ return new DeleteExpungeJobParameterValidator(theMatchUrlService, theDaoRegistry);
+ }
+
+ @Bean
+ public ExecutionContextPromotionListener promotionListener() {
+ ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener();
+
+ listener.setKeys(new String[]{SqlExecutorWriter.ENTITY_TOTAL_UPDATED_OR_DELETED, PidReaderCounterListener.RESOURCE_TOTAL_PROCESSED});
+
+ return listener;
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeJobParameterValidator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeJobParameterValidator.java
new file mode 100644
index 00000000000..e94c44f9ca8
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeJobParameterValidator.java
@@ -0,0 +1,67 @@
+package ca.uhn.fhir.jpa.delete.job;
+
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2021 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
+import ca.uhn.fhir.jpa.delete.model.PartitionedUrl;
+import ca.uhn.fhir.jpa.delete.model.RequestListJson;
+import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
+import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
+import ca.uhn.fhir.rest.server.provider.ProviderConstants;
+import org.springframework.batch.core.JobParameters;
+import org.springframework.batch.core.JobParametersInvalidException;
+import org.springframework.batch.core.JobParametersValidator;
+
+import static ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader.JOB_PARAM_REQUEST_LIST;
+
+/**
+ * This class will prevent a job from running any of the provided URLs are not valid on this server.
+ */
+public class DeleteExpungeJobParameterValidator implements JobParametersValidator {
+ private final MatchUrlService myMatchUrlService;
+ private final DaoRegistry myDaoRegistry;
+
+ public DeleteExpungeJobParameterValidator(MatchUrlService theMatchUrlService, DaoRegistry theDaoRegistry) {
+ myMatchUrlService = theMatchUrlService;
+ myDaoRegistry = theDaoRegistry;
+ }
+
+ @Override
+ public void validate(JobParameters theJobParameters) throws JobParametersInvalidException {
+ if (theJobParameters == null) {
+ throw new JobParametersInvalidException("This job requires Parameters: [urlList]");
+ }
+
+ RequestListJson requestListJson = RequestListJson.fromJson(theJobParameters.getString(JOB_PARAM_REQUEST_LIST));
+ for (PartitionedUrl partitionedUrl : requestListJson.getPartitionedUrls()) {
+ String url = partitionedUrl.getUrl();
+ try {
+ ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(url);
+ String resourceName = resourceSearch.getResourceName();
+ if (!myDaoRegistry.isResourceTypeSupported(resourceName)) {
+ throw new JobParametersInvalidException("The resource type " + resourceName + " is not supported on this server.");
+ }
+ } catch (UnsupportedOperationException e) {
+ throw new JobParametersInvalidException("Failed to parse " + ProviderConstants.OPERATION_DELETE_EXPUNGE + " " + JOB_PARAM_REQUEST_LIST + " item " + url + ": " + e.getMessage());
+ }
+ }
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeProcessor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeProcessor.java
new file mode 100644
index 00000000000..a06a5d38377
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/job/DeleteExpungeProcessor.java
@@ -0,0 +1,123 @@
+package ca.uhn.fhir.jpa.delete.job;
+
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2021 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+import ca.uhn.fhir.jpa.api.config.DaoConfig;
+import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
+import ca.uhn.fhir.jpa.dao.expunge.PartitionRunner;
+import ca.uhn.fhir.jpa.dao.expunge.ResourceForeignKey;
+import ca.uhn.fhir.jpa.dao.expunge.ResourceTableFKProvider;
+import ca.uhn.fhir.jpa.dao.index.IdHelperService;
+import ca.uhn.fhir.jpa.model.entity.ResourceLink;
+import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.batch.item.ItemProcessor;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.data.domain.Slice;
+import org.springframework.data.domain.SliceImpl;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * Input: list of pids of resources to be deleted and expunged
+ * Output: list of sql statements to be executed
+ */
+public class DeleteExpungeProcessor implements ItemProcessor, List> {
+ private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeProcessor.class);
+
+ @Autowired
+ ResourceTableFKProvider myResourceTableFKProvider;
+ @Autowired
+ DaoConfig myDaoConfig;
+ @Autowired
+ IdHelperService myIdHelper;
+ @Autowired
+ IResourceLinkDao myResourceLinkDao;
+ @Autowired
+ PartitionRunner myPartitionRunner;
+
+ @Override
+ public List process(List thePids) throws Exception {
+ validateOkToDeleteAndExpunge(new SliceImpl<>(thePids));
+
+ List retval = new ArrayList<>();
+
+ String pidListString = thePids.toString().replace("[", "(").replace("]", ")");
+ List resourceForeignKeys = myResourceTableFKProvider.getResourceForeignKeys();
+
+ for (ResourceForeignKey resourceForeignKey : resourceForeignKeys) {
+ retval.add(deleteRecordsByColumnSql(pidListString, resourceForeignKey));
+ }
+
+ // Lastly we need to delete records from the resource table all of these other tables link to:
+ ResourceForeignKey resourceTablePk = new ResourceForeignKey("HFJ_RESOURCE", "RES_ID");
+ retval.add(deleteRecordsByColumnSql(pidListString, resourceTablePk));
+ return retval;
+ }
+
+ public void validateOkToDeleteAndExpunge(Slice thePids) {
+ if (!myDaoConfig.isEnforceReferentialIntegrityOnDelete()) {
+ ourLog.info("Referential integrity on delete disabled. Skipping referential integrity check.");
+ return;
+ }
+
+ List conflictResourceLinks = Collections.synchronizedList(new ArrayList<>());
+ myPartitionRunner.runInPartitionedThreads(thePids, someTargetPids -> findResourceLinksWithTargetPidIn(thePids.getContent(), someTargetPids, conflictResourceLinks));
+
+ if (conflictResourceLinks.isEmpty()) {
+ return;
+ }
+
+ ResourceLink firstConflict = conflictResourceLinks.get(0);
+
+ //NB-GGG: We previously instantiated these ID values from firstConflict.getSourceResource().getIdDt(), but in a situation where we
+ //actually had to run delete conflict checks in multiple partitions, the executor service starts its own sessions on a per thread basis, and by the time
+ //we arrive here, those sessions are closed. So instead, we resolve them from PIDs, which are eagerly loaded.
+ String sourceResourceId = myIdHelper.resourceIdFromPidOrThrowException(firstConflict.getSourceResourcePid()).toVersionless().getValue();
+ String targetResourceId = myIdHelper.resourceIdFromPidOrThrowException(firstConflict.getTargetResourcePid()).toVersionless().getValue();
+
+ throw new InvalidRequestException("DELETE with _expunge=true failed. Unable to delete " +
+ targetResourceId + " because " + sourceResourceId + " refers to it via the path " + firstConflict.getSourcePath());
+ }
+
+ public void findResourceLinksWithTargetPidIn(List theAllTargetPids, List theSomeTargetPids, List theConflictResourceLinks) {
+ // We only need to find one conflict, so if we found one already in an earlier partition run, we can skip the rest of the searches
+ if (theConflictResourceLinks.isEmpty()) {
+ List conflictResourceLinks = myResourceLinkDao.findWithTargetPidIn(theSomeTargetPids).stream()
+ // Filter out resource links for which we are planning to delete the source.
+ // theAllTargetPids contains a list of all the pids we are planning to delete. So we only want
+ // to consider a link to be a conflict if the source of that link is not in theAllTargetPids.
+ .filter(link -> !theAllTargetPids.contains(link.getSourceResourcePid()))
+ .collect(Collectors.toList());
+
+ // We do this in two steps to avoid lock contention on this synchronized list
+ theConflictResourceLinks.addAll(conflictResourceLinks);
+ }
+ }
+
+ private String deleteRecordsByColumnSql(String thePidListString, ResourceForeignKey theResourceForeignKey) {
+ return "DELETE FROM " + theResourceForeignKey.table + " WHERE " + theResourceForeignKey.key + " IN " + thePidListString;
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/model/PartitionedUrl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/model/PartitionedUrl.java
new file mode 100644
index 00000000000..a183773f400
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/model/PartitionedUrl.java
@@ -0,0 +1,37 @@
+package ca.uhn.fhir.jpa.delete.model;
+
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
+import ca.uhn.fhir.model.api.IModelJson;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public class PartitionedUrl implements IModelJson {
+ @JsonProperty("url")
+ private String myUrl;
+
+ @JsonProperty("requestPartitionId")
+ private RequestPartitionId myRequestPartitionId;
+
+ public PartitionedUrl() {
+ }
+
+ public PartitionedUrl(String theUrl, RequestPartitionId theRequestPartitionId) {
+ myUrl = theUrl;
+ myRequestPartitionId = theRequestPartitionId;
+ }
+
+ public String getUrl() {
+ return myUrl;
+ }
+
+ public void setUrl(String theUrl) {
+ myUrl = theUrl;
+ }
+
+ public RequestPartitionId getRequestPartitionId() {
+ return myRequestPartitionId;
+ }
+
+ public void setRequestPartitionId(RequestPartitionId theRequestPartitionId) {
+ myRequestPartitionId = theRequestPartitionId;
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/model/RequestListJson.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/model/RequestListJson.java
new file mode 100644
index 00000000000..4824091aa20
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/model/RequestListJson.java
@@ -0,0 +1,79 @@
+package ca.uhn.fhir.jpa.delete.model;
+
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2021 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
+import ca.uhn.fhir.model.api.IModelJson;
+import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
+import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Serialize a list of URLs and partition ids so Spring Batch can store it as a String
+ */
+public class RequestListJson implements IModelJson {
+ static final ObjectMapper ourObjectMapper = new ObjectMapper();
+
+ @JsonProperty("partitionedUrls")
+ private List myPartitionedUrls;
+
+ public static RequestListJson fromUrlStringsAndRequestPartitionIds(List theUrls, List theRequestPartitionIds) {
+ assert theUrls.size() == theRequestPartitionIds.size();
+
+ RequestListJson retval = new RequestListJson();
+ List partitionedUrls = new ArrayList<>();
+ for (int i = 0; i < theUrls.size(); ++i) {
+ partitionedUrls.add(new PartitionedUrl(theUrls.get(i), theRequestPartitionIds.get(i)));
+ }
+ retval.setPartitionedUrls(partitionedUrls);
+ return retval;
+ }
+
+ public static RequestListJson fromJson(String theJson) {
+ try {
+ return ourObjectMapper.readValue(theJson, RequestListJson.class);
+ } catch (JsonProcessingException e) {
+ throw new InternalErrorException("Failed to decode " + RequestListJson.class);
+ }
+ }
+
+ @Override
+ public String toString() {
+ try {
+ return ourObjectMapper.writeValueAsString(this);
+ } catch (JsonProcessingException e) {
+ throw new InvalidRequestException("Failed to encode " + RequestListJson.class, e);
+ }
+ }
+
+ public List getPartitionedUrls() {
+ return myPartitionedUrls;
+ }
+
+ public void setPartitionedUrls(List thePartitionedUrls) {
+ myPartitionedUrls = thePartitionedUrls;
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java
index efdbb706efc..2d98a716a05 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java
@@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.partition;
*/
import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
@@ -109,7 +110,7 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc {
}
if (theRequest instanceof SystemRequestDetails) {
- requestPartitionId = getSystemRequestPartitionId(theRequest, nonPartitionableResource);
+ requestPartitionId = getSystemRequestPartitionId((SystemRequestDetails) theRequest, nonPartitionableResource);
// Interceptor call: STORAGE_PARTITION_IDENTIFY_READ
} else if (hasHooks(Pointcut.STORAGE_PARTITION_IDENTIFY_READ, myInterceptorBroadcaster, theRequest)) {
HookParams params = new HookParams()
@@ -122,22 +123,18 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc {
validateRequestPartitionNotNull(requestPartitionId, Pointcut.STORAGE_PARTITION_IDENTIFY_READ);
- return validateNormalizeAndNotifyHooksForRead(requestPartitionId, theRequest);
+ return validateNormalizeAndNotifyHooksForRead(requestPartitionId, theRequest, theResourceType);
}
return RequestPartitionId.allPartitions();
}
/**
- *
* For system requests, read partition from tenant ID if present, otherwise set to DEFAULT. If the resource they are attempting to partition
* is non-partitionable scream in the logs and set the partition to DEFAULT.
*
- * @param theRequest
- * @param theNonPartitionableResource
- * @return
*/
- private RequestPartitionId getSystemRequestPartitionId(RequestDetails theRequest, boolean theNonPartitionableResource) {
+ private RequestPartitionId getSystemRequestPartitionId(SystemRequestDetails theRequest, boolean theNonPartitionableResource) {
RequestPartitionId requestPartitionId;
requestPartitionId = getSystemRequestPartitionId(theRequest);
if (theNonPartitionableResource && !requestPartitionId.isDefaultPartition()) {
@@ -148,7 +145,7 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc {
/**
* Determine the partition for a System Call (defined by the fact that the request is of type SystemRequestDetails)
- *
+ *