diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/i18n/HapiLocalizer.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/i18n/HapiLocalizer.java
index 403ea58cc21..a7fd6d24edf 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/i18n/HapiLocalizer.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/i18n/HapiLocalizer.java
@@ -10,6 +10,8 @@ import java.util.concurrent.ConcurrentHashMap;
import static org.apache.commons.lang3.StringUtils.*;
+
+
/*
* #%L
* HAPI FHIR - Core Library
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java
index db980fb5e9a..733caacdae1 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java
@@ -1420,6 +1420,12 @@ public enum Pointcut implements IPointcut {
*
* ca.uhn.fhir.rest.api.server.storage.TransactionDetails - The outer transaction details object (since 5.0.0)
*
+ *
+ * Boolean - Whether this pointcut invocation was deferred or not(since 5.4.0)
+ *
+ *
+ * ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum - The timing at which the invocation of the interceptor took place. Options are ACTIVE and DEFERRED.
+ *
*
*
* Hooks should return void
.
@@ -1429,7 +1435,8 @@ public enum Pointcut implements IPointcut {
"org.hl7.fhir.instance.model.api.IBaseResource",
"ca.uhn.fhir.rest.api.server.RequestDetails",
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails",
- "ca.uhn.fhir.rest.api.server.storage.TransactionDetails"
+ "ca.uhn.fhir.rest.api.server.storage.TransactionDetails",
+ "ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum"
),
/**
@@ -1446,7 +1453,7 @@ public enum Pointcut implements IPointcut {
* Hooks may accept the following parameters:
*
* - org.hl7.fhir.instance.model.api.IBaseResource - The previous contents of the resource
- * - org.hl7.fhir.instance.model.api.IBaseResource - The proposed new new contents of the resource
+ * - org.hl7.fhir.instance.model.api.IBaseResource - The proposed new contents of the resource
* -
* ca.uhn.fhir.rest.api.server.RequestDetails - A bean containing details about the request that is about to be processed, including details such as the
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
@@ -1463,6 +1470,9 @@ public enum Pointcut implements IPointcut {
*
-
* ca.uhn.fhir.rest.api.server.storage.TransactionDetails - The outer transaction details object (since 5.0.0)
*
+ * -
+ * ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum - The timing at which the invocation of the interceptor took place. Options are ACTIVE and DEFERRED.
+ *
*
*
* Hooks should return void
.
@@ -1473,7 +1483,8 @@ public enum Pointcut implements IPointcut {
"org.hl7.fhir.instance.model.api.IBaseResource",
"ca.uhn.fhir.rest.api.server.RequestDetails",
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails",
- "ca.uhn.fhir.rest.api.server.storage.TransactionDetails"
+ "ca.uhn.fhir.rest.api.server.storage.TransactionDetails",
+ "ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum"
),
@@ -1503,6 +1514,9 @@ public enum Pointcut implements IPointcut {
*
* ca.uhn.fhir.rest.api.server.storage.TransactionDetails - The outer transaction details object (since 5.0.0)
*
+ *
+ * ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum - The timing at which the invocation of the interceptor took place. Options are ACTIVE and DEFERRED.
+ *
*
*
* Hooks should return void
.
@@ -1512,9 +1526,53 @@ public enum Pointcut implements IPointcut {
"org.hl7.fhir.instance.model.api.IBaseResource",
"ca.uhn.fhir.rest.api.server.RequestDetails",
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails",
+ "ca.uhn.fhir.rest.api.server.storage.TransactionDetails",
+ "ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum"
+ ),
+
+ /**
+ * Storage Hook:
+ * Invoked after all entries in a transaction bundle have been executed
+ *
+ * Hooks will have access to the original bundle, as well as all the deferred interceptor broadcasts related to the
+ * processing of the transaction bundle
+ *
+ * Hooks may accept the following parameters:
+ *
+ * - org.hl7.fhir.instance.model.api.IBaseResource - The resource being deleted
+ * -
+ * ca.uhn.fhir.rest.api.server.RequestDetails - A bean containing details about the request that is about to be processed, including details such as the
+ * resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
+ * pulled out of the servlet request. Note that the bean
+ * properties are not all guaranteed to be populated, depending on how early during processing the
+ * exception occurred.
+ *
+ * -
+ * ca.uhn.fhir.rest.server.servlet.ServletRequestDetails - A bean containing details about the request that is about to be processed, including details such as the
+ * resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
+ * pulled out of the servlet request. This parameter is identical to the RequestDetails parameter above but will
+ * only be populated when operating in a RestfulServer implementation. It is provided as a convenience.
+ *
+ * -
+ * ca.uhn.fhir.rest.api.server.storage.TransactionDetails - The outer transaction details object (since 5.0.0)
+ *
+ * -
+ * ca.uhn.fhir.rest.api.server.storage.DeferredInterceptorBroadcasts- A collection of pointcut invocations and their parameters which were deferred.
+ *
+ *
+ *
+ * Hooks should return void
.
+ *
+ */
+ STORAGE_TRANSACTION_PROCESSED(void.class,
+ "org.hl7.fhir.instance.model.api.IBaseBundle",
+ "ca.uhn.fhir.rest.api.server.storage.DeferredInterceptorBroadcasts",
+ "ca.uhn.fhir.rest.api.server.RequestDetails",
+ "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails",
"ca.uhn.fhir.rest.api.server.storage.TransactionDetails"
),
+
/**
* Storage Hook:
* Invoked when a resource delete operation is about to fail due to referential integrity checks. Intended for use with {@literal ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor}.
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/InterceptorInvocationTimingEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/InterceptorInvocationTimingEnum.java
new file mode 100644
index 00000000000..044f65656bd
--- /dev/null
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/InterceptorInvocationTimingEnum.java
@@ -0,0 +1,30 @@
+package ca.uhn.fhir.rest.api;
+
+/*-
+ * #%L
+ * HAPI FHIR - Core Library
+ * %%
+ * Copyright (C) 2014 - 2021 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+/**
+ * during invocation of certain pointcuts, it is important to know whether they are being executed in
+ * active or deferred fashion. This enum allows the pointcuts to see how they were invoked.
+ */
+public enum InterceptorInvocationTimingEnum {
+ ACTIVE,
+ DEFERRED
+}
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleBuilder.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleBuilder.java
index 657c3f23cc4..39c83e204b8 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleBuilder.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleBuilder.java
@@ -403,7 +403,6 @@ public class BundleBuilder {
public void conditional(String theConditionalUrl) {
myUrl.setValueAsString(theConditionalUrl);
}
-
}
public class CreateBuilder {
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleUtil.java
index d48969ae5ca..f53af2cd4ca 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleUtil.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleUtil.java
@@ -1,6 +1,10 @@
package ca.uhn.fhir.util;
-import ca.uhn.fhir.context.*;
+import ca.uhn.fhir.context.BaseRuntimeChildDefinition;
+import ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition;
+import ca.uhn.fhir.context.BaseRuntimeElementDefinition;
+import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.rest.api.PatchTypeEnum;
import ca.uhn.fhir.rest.api.RequestTypeEnum;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
@@ -9,15 +13,23 @@ import ca.uhn.fhir.util.bundle.BundleEntryParts;
import ca.uhn.fhir.util.bundle.EntryListAccumulator;
import ca.uhn.fhir.util.bundle.ModifiableBundleEntry;
import org.apache.commons.lang3.tuple.Pair;
-import org.hl7.fhir.instance.model.api.*;
+import org.hl7.fhir.instance.model.api.IBase;
+import org.hl7.fhir.instance.model.api.IBaseBinary;
+import org.hl7.fhir.instance.model.api.IBaseBundle;
+import org.hl7.fhir.instance.model.api.IBaseResource;
+import org.hl7.fhir.instance.model.api.IPrimitiveType;
import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedHashSet;
import java.util.List;
+import java.util.Map;
import java.util.Objects;
+import java.util.Set;
import java.util.function.Consumer;
+import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
-
/*
* #%L
* HAPI FHIR - Core Library
@@ -42,6 +54,8 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
* Fetch resources from a bundle
*/
public class BundleUtil {
+ private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BundleUtil.class);
+
/**
* @return Returns null
if the link isn't found or has no value
@@ -170,16 +184,200 @@ public class BundleUtil {
return entryListAccumulator.getList();
}
+ static int WHITE = 1;
+ static int GRAY = 2;
+ static int BLACK = 3;
+ /**
+ * Function which will do an in-place sort of a bundles' entries, to the correct processing order, which is:
+ * 1. Deletes
+ * 2. Creates
+ * 3. Updates
+ *
+ * Furthermore, within these operation types, the entries will be sorted based on the order in which they should be processed
+ * e.g. if you have 2 CREATEs, one for a Patient, and one for an Observation which has this Patient as its Subject,
+ * the patient will come first, then the observation.
+ *
+ * In cases of there being a cyclic dependency (e.g. Organization/1 is partOf Organization/2 and Organization/2 is partOf Organization/1)
+ * this function will throw an IllegalStateException.
+ *
+ * @param theContext The FhirContext.
+ * @param theBundle The {@link IBaseBundle} which contains the entries you would like sorted into processing order.
+ */
+ public static void sortEntriesIntoProcessingOrder(FhirContext theContext, IBaseBundle theBundle) throws IllegalStateException {
+ Map partsToIBaseMap = getPartsToIBaseMap(theContext, theBundle);
+ LinkedHashSet retVal = new LinkedHashSet<>();
+
+ //Get all deletions.
+ LinkedHashSet deleteParts = sortEntriesOfTypeIntoProcessingOrder(theContext, RequestTypeEnum.DELETE, partsToIBaseMap);
+ validatePartsNotNull(deleteParts);
+ retVal.addAll(deleteParts);
+
+ //Get all Creations
+ LinkedHashSet createParts= sortEntriesOfTypeIntoProcessingOrder(theContext, RequestTypeEnum.POST, partsToIBaseMap);
+ validatePartsNotNull(createParts);
+ retVal.addAll(createParts);
+
+ // Get all Updates
+ LinkedHashSet updateParts= sortEntriesOfTypeIntoProcessingOrder(theContext, RequestTypeEnum.PUT, partsToIBaseMap);
+ validatePartsNotNull(updateParts);
+ retVal.addAll(updateParts);
+
+ //Once we are done adding all DELETE, POST, PUT operations, add everything else.
+ //Since this is a set, it will just fail to add already-added operations.
+ retVal.addAll(partsToIBaseMap.values());
+
+ //Blow away the entries and reset them in the right order.
+ TerserUtil.clearField(theContext, "entry", theBundle);
+ TerserUtil.setField(theContext, "entry", theBundle, retVal.toArray(new IBase[0]));
+ }
+
+ private static void validatePartsNotNull(LinkedHashSet theDeleteParts) {
+ if (theDeleteParts == null) {
+ throw new IllegalStateException("This transaction contains a cycle, so it cannot be sorted.");
+ }
+ }
+
+ private static LinkedHashSet sortEntriesOfTypeIntoProcessingOrder(FhirContext theContext, RequestTypeEnum theRequestTypeEnum, Map thePartsToIBaseMap) {
+ SortLegality legality = new SortLegality();
+ HashMap color = new HashMap<>();
+ HashMap> adjList = new HashMap<>();
+ List topologicalOrder = new ArrayList<>();
+ Set bundleEntryParts = thePartsToIBaseMap.keySet().stream().filter(part -> part.getRequestType().equals(theRequestTypeEnum)).collect(Collectors.toSet());
+ HashMap resourceIdToBundleEntryMap = new HashMap<>();
+
+ for (BundleEntryParts bundleEntryPart : bundleEntryParts) {
+ IBaseResource resource = bundleEntryPart.getResource();
+ if (resource != null) {
+ String resourceId = resource.getIdElement().toVersionless().toString();
+ resourceIdToBundleEntryMap.put(resourceId, bundleEntryPart);
+ if (resourceId == null) {
+ if (bundleEntryPart.getFullUrl() != null) {
+ resourceId = bundleEntryPart.getFullUrl();
+ }
+ }
+
+ color.put(resourceId, WHITE);
+ }
+ }
+
+ for (BundleEntryParts bundleEntryPart : bundleEntryParts) {
+ IBaseResource resource = bundleEntryPart.getResource();
+ if (resource != null) {
+ String resourceId = resource.getIdElement().toVersionless().toString();
+ resourceIdToBundleEntryMap.put(resourceId, bundleEntryPart);
+ if (resourceId == null) {
+ if (bundleEntryPart.getFullUrl() != null) {
+ resourceId = bundleEntryPart.getFullUrl();
+ }
+ }
+ List allResourceReferences = theContext.newTerser().getAllResourceReferences(resource);
+ String finalResourceId = resourceId;
+ allResourceReferences
+ .forEach(refInfo -> {
+ String referencedResourceId = refInfo.getResourceReference().getReferenceElement().toVersionless().getValue();
+ if (color.containsKey(referencedResourceId)) {
+ if (!adjList.containsKey(finalResourceId)) {
+ adjList.put(finalResourceId, new ArrayList<>());
+ }
+ adjList.get(finalResourceId).add(referencedResourceId);
+ }
+ });
+ }
+ }
+
+ for (Map.Entry entry:color.entrySet()) {
+ if (entry.getValue() == WHITE) {
+ depthFirstSearch(entry.getKey(), color, adjList, topologicalOrder, legality);
+ }
+ }
+
+ if (legality.isLegal()) {
+ if (ourLog.isDebugEnabled()) {
+ ourLog.debug("Topological order is: {}", String.join(",", topologicalOrder));
+ }
+
+ LinkedHashSet orderedEntries = new LinkedHashSet<>();
+ for (int i = 0; i < topologicalOrder.size(); i++) {
+ BundleEntryParts bep;
+ if (theRequestTypeEnum.equals(RequestTypeEnum.DELETE)) {
+ int index = topologicalOrder.size() - i - 1;
+ bep = resourceIdToBundleEntryMap.get(topologicalOrder.get(index));
+ } else {
+ bep = resourceIdToBundleEntryMap.get(topologicalOrder.get(i));
+ }
+ IBase base = thePartsToIBaseMap.get(bep);
+ orderedEntries.add(base);
+ }
+
+ return orderedEntries;
+
+ } else {
+ return null;
+ }
+ }
+
+ private static void depthFirstSearch(String theResourceId, HashMap theResourceIdToColor, HashMap> theAdjList, List theTopologicalOrder, SortLegality theLegality) {
+
+ if (!theLegality.isLegal()) {
+ ourLog.debug("Found a cycle while trying to sort bundle entries. This bundle is not sortable.");
+ return;
+ }
+
+ //We are currently recursing over this node (gray)
+ theResourceIdToColor.put(theResourceId, GRAY);
+
+ for (String neighbourResourceId: theAdjList.getOrDefault(theResourceId, new ArrayList<>())) {
+ if (theResourceIdToColor.get(neighbourResourceId) == WHITE) {
+ depthFirstSearch(neighbourResourceId, theResourceIdToColor, theAdjList, theTopologicalOrder, theLegality);
+ } else if (theResourceIdToColor.get(neighbourResourceId) == GRAY) {
+ theLegality.setLegal(false);
+ return;
+ }
+ }
+ //Mark the node as black
+ theResourceIdToColor.put(theResourceId, BLACK);
+ theTopologicalOrder.add(theResourceId);
+ }
+
+ private static Map getPartsToIBaseMap(FhirContext theContext, IBaseBundle theBundle) {
+ RuntimeResourceDefinition bundleDef = theContext.getResourceDefinition(theBundle);
+ BaseRuntimeChildDefinition entryChildDef = bundleDef.getChildByName("entry");
+ List entries = entryChildDef.getAccessor().getValues(theBundle);
+
+ BaseRuntimeElementCompositeDefinition> entryChildContentsDef = (BaseRuntimeElementCompositeDefinition>) entryChildDef.getChildByName("entry");
+ BaseRuntimeChildDefinition fullUrlChildDef = entryChildContentsDef.getChildByName("fullUrl");
+ BaseRuntimeChildDefinition resourceChildDef = entryChildContentsDef.getChildByName("resource");
+ BaseRuntimeChildDefinition requestChildDef = entryChildContentsDef.getChildByName("request");
+ BaseRuntimeElementCompositeDefinition> requestChildContentsDef = (BaseRuntimeElementCompositeDefinition>) requestChildDef.getChildByName("request");
+ BaseRuntimeChildDefinition requestUrlChildDef = requestChildContentsDef.getChildByName("url");
+ BaseRuntimeChildDefinition requestIfNoneExistChildDef = requestChildContentsDef.getChildByName("ifNoneExist");
+ BaseRuntimeChildDefinition methodChildDef = requestChildContentsDef.getChildByName("method");
+ Map map = new HashMap<>();
+ for (IBase nextEntry : entries) {
+ BundleEntryParts parts = getBundleEntryParts(fullUrlChildDef, resourceChildDef, requestChildDef, requestUrlChildDef, requestIfNoneExistChildDef, methodChildDef, nextEntry);
+ /*
+ * All 3 might be null - That's ok because we still want to know the
+ * order in the original bundle.
+ */
+ map.put(parts, nextEntry);
+ }
+ return map;
+ }
+
+ /**
+ * Given a bundle, and a consumer, apply the consumer to each entry in the bundle.
+ * @param theContext The FHIR Context
+ * @param theBundle The bundle to have its entries processed.
+ * @param theProcessor a {@link Consumer} which will operate on all the entries of a bundle.
+ */
public static void processEntries(FhirContext theContext, IBaseBundle theBundle, Consumer theProcessor) {
RuntimeResourceDefinition bundleDef = theContext.getResourceDefinition(theBundle);
BaseRuntimeChildDefinition entryChildDef = bundleDef.getChildByName("entry");
List entries = entryChildDef.getAccessor().getValues(theBundle);
BaseRuntimeElementCompositeDefinition> entryChildContentsDef = (BaseRuntimeElementCompositeDefinition>) entryChildDef.getChildByName("entry");
-
BaseRuntimeChildDefinition fullUrlChildDef = entryChildContentsDef.getChildByName("fullUrl");
-
BaseRuntimeChildDefinition resourceChildDef = entryChildContentsDef.getChildByName("resource");
BaseRuntimeChildDefinition requestChildDef = entryChildContentsDef.getChildByName("request");
BaseRuntimeElementCompositeDefinition> requestChildContentsDef = (BaseRuntimeElementCompositeDefinition>) requestChildDef.getChildByName("request");
@@ -188,57 +386,62 @@ public class BundleUtil {
BaseRuntimeChildDefinition methodChildDef = requestChildContentsDef.getChildByName("method");
for (IBase nextEntry : entries) {
- IBaseResource resource = null;
- String url = null;
- RequestTypeEnum requestType = null;
- String conditionalUrl = null;
- String fullUrl = fullUrlChildDef
- .getAccessor()
- .getFirstValueOrNull(nextEntry)
- .map(t->((IPrimitiveType>)t).getValueAsString())
- .orElse(null);
-
- for (IBase nextResource : resourceChildDef.getAccessor().getValues(nextEntry)) {
- resource = (IBaseResource) nextResource;
- }
- for (IBase nextRequest : requestChildDef.getAccessor().getValues(nextEntry)) {
- for (IBase nextUrl : requestUrlChildDef.getAccessor().getValues(nextRequest)) {
- url = ((IPrimitiveType>) nextUrl).getValueAsString();
- }
- for (IBase nextMethod : methodChildDef.getAccessor().getValues(nextRequest)) {
- String methodString = ((IPrimitiveType>) nextMethod).getValueAsString();
- if (isNotBlank(methodString)) {
- requestType = RequestTypeEnum.valueOf(methodString);
- }
- }
-
- if (requestType != null) {
- //noinspection EnumSwitchStatementWhichMissesCases
- switch (requestType) {
- case PUT:
- conditionalUrl = url != null && url.contains("?") ? url : null;
- break;
- case POST:
- List ifNoneExistReps = requestIfNoneExistChildDef.getAccessor().getValues(nextRequest);
- if (ifNoneExistReps.size() > 0) {
- IPrimitiveType> ifNoneExist = (IPrimitiveType>) ifNoneExistReps.get(0);
- conditionalUrl = ifNoneExist.getValueAsString();
- }
- break;
- }
- }
- }
-
+ BundleEntryParts parts = getBundleEntryParts(fullUrlChildDef, resourceChildDef, requestChildDef, requestUrlChildDef, requestIfNoneExistChildDef, methodChildDef, nextEntry);
/*
* All 3 might be null - That's ok because we still want to know the
* order in the original bundle.
*/
BundleEntryMutator mutator = new BundleEntryMutator(theContext, nextEntry, requestChildDef, requestChildContentsDef, entryChildContentsDef);
- ModifiableBundleEntry entry = new ModifiableBundleEntry(new BundleEntryParts(fullUrl, requestType, url, resource, conditionalUrl), mutator);
+ ModifiableBundleEntry entry = new ModifiableBundleEntry(parts, mutator);
theProcessor.accept(entry);
}
}
+ private static BundleEntryParts getBundleEntryParts(BaseRuntimeChildDefinition fullUrlChildDef, BaseRuntimeChildDefinition resourceChildDef, BaseRuntimeChildDefinition requestChildDef, BaseRuntimeChildDefinition requestUrlChildDef, BaseRuntimeChildDefinition requestIfNoneExistChildDef, BaseRuntimeChildDefinition methodChildDef, IBase nextEntry) {
+ IBaseResource resource = null;
+ String url = null;
+ RequestTypeEnum requestType = null;
+ String conditionalUrl = null;
+ String fullUrl = fullUrlChildDef
+ .getAccessor()
+ .getFirstValueOrNull(nextEntry)
+ .map(t->((IPrimitiveType>)t).getValueAsString())
+ .orElse(null);
+
+ for (IBase nextResource : resourceChildDef.getAccessor().getValues(nextEntry)) {
+ resource = (IBaseResource) nextResource;
+ }
+ for (IBase nextRequest : requestChildDef.getAccessor().getValues(nextEntry)) {
+ for (IBase nextUrl : requestUrlChildDef.getAccessor().getValues(nextRequest)) {
+ url = ((IPrimitiveType>) nextUrl).getValueAsString();
+ }
+ for (IBase nextMethod : methodChildDef.getAccessor().getValues(nextRequest)) {
+ String methodString = ((IPrimitiveType>) nextMethod).getValueAsString();
+ if (isNotBlank(methodString)) {
+ requestType = RequestTypeEnum.valueOf(methodString);
+ }
+ }
+
+ if (requestType != null) {
+ //noinspection EnumSwitchStatementWhichMissesCases
+ switch (requestType) {
+ case PUT:
+ conditionalUrl = url != null && url.contains("?") ? url : null;
+ break;
+ case POST:
+ List ifNoneExistReps = requestIfNoneExistChildDef.getAccessor().getValues(nextRequest);
+ if (ifNoneExistReps.size() > 0) {
+ IPrimitiveType> ifNoneExist = (IPrimitiveType>) ifNoneExistReps.get(0);
+ conditionalUrl = ifNoneExist.getValueAsString();
+ }
+ break;
+ }
+ }
+ }
+ BundleEntryParts parts = new BundleEntryParts(fullUrl, requestType, url, resource, conditionalUrl);
+ return parts;
+ }
+
/**
* Extract all of the resources from a given bundle
*/
@@ -290,4 +493,20 @@ public class BundleUtil {
}
return isPatch;
}
+
+ private static class SortLegality {
+ private boolean myIsLegal;
+
+ SortLegality() {
+ this.myIsLegal = true;
+ }
+ private void setLegal(boolean theLegal) {
+ myIsLegal = theLegal;
+ }
+
+ public boolean isLegal() {
+ return myIsLegal;
+ }
+ }
+
}
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/bundle/BundleEntryParts.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/bundle/BundleEntryParts.java
index 5d698cec7b3..7bc9ecfa456 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/bundle/BundleEntryParts.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/bundle/BundleEntryParts.java
@@ -58,5 +58,4 @@ public class BundleEntryParts {
public String getUrl() {
return myUrl;
}
-
}
diff --git a/hapi-fhir-base/src/test/java/ca/uhn/fhir/interceptor/executor/InterceptorServiceTest.java b/hapi-fhir-base/src/test/java/ca/uhn/fhir/interceptor/executor/InterceptorServiceTest.java
index 8799cd1608b..4eb9de9fafe 100644
--- a/hapi-fhir-base/src/test/java/ca/uhn/fhir/interceptor/executor/InterceptorServiceTest.java
+++ b/hapi-fhir-base/src/test/java/ca/uhn/fhir/interceptor/executor/InterceptorServiceTest.java
@@ -455,11 +455,12 @@ public class InterceptorServiceTest {
params.add(String.class, "D");
params.add(String.class, "E");
params.add(String.class, "F");
+ params.add(String.class, "G");
try {
svc.haveAppropriateParams(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, params);
fail();
} catch (IllegalArgumentException e) {
- assertEquals("Wrong number of params for pointcut STORAGE_PRECOMMIT_RESOURCE_UPDATED - Wanted ca.uhn.fhir.rest.api.server.RequestDetails,ca.uhn.fhir.rest.api.server.storage.TransactionDetails,ca.uhn.fhir.rest.server.servlet.ServletRequestDetails,org.hl7.fhir.instance.model.api.IBaseResource,org.hl7.fhir.instance.model.api.IBaseResource but found [String, String, String, String, String, String]", e.getMessage());
+ assertEquals("Wrong number of params for pointcut STORAGE_PRECOMMIT_RESOURCE_UPDATED - Wanted ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum,ca.uhn.fhir.rest.api.server.RequestDetails,ca.uhn.fhir.rest.api.server.storage.TransactionDetails,ca.uhn.fhir.rest.server.servlet.ServletRequestDetails,org.hl7.fhir.instance.model.api.IBaseResource,org.hl7.fhir.instance.model.api.IBaseResource but found [String, String, String, String, String, String, String]", e.getMessage());
}
}
@@ -474,6 +475,7 @@ public class InterceptorServiceTest {
params.add((Class) String.class, 3);
params.add((Class) String.class, 4);
params.add((Class) String.class, 5);
+ params.add((Class) String.class, 6);
try {
svc.haveAppropriateParams(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, params);
fail();
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2534-new-pointcut.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2534-new-pointcut.yaml
new file mode 100644
index 00000000000..cc4e0776fe5
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2534-new-pointcut.yaml
@@ -0,0 +1,4 @@
+---
+type: add
+issue: 2534
+title: "Add new pointcut `STORAGE_TRANSACTION_PROCESSED`, which fires after all operations in a transaction have executed."
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
index 02ef443c876..005320e8e67 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
@@ -70,6 +70,7 @@ import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.parser.LenientErrorHandler;
import ca.uhn.fhir.rest.api.Constants;
+import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
@@ -1364,7 +1365,8 @@ public abstract class BaseHapiFhirDao extends BaseStora
.add(IBaseResource.class, theResource)
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
- .add(TransactionDetails.class, theTransactionDetails);
+ .add(TransactionDetails.class, theTransactionDetails)
+ .add(InterceptorInvocationTimingEnum.class, theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED));
doCallHooks(theTransactionDetails, theRequestDetails, Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, hookParams);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
index 2d4661baf01..cf768c557d7 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
@@ -54,6 +54,7 @@ import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
import ca.uhn.fhir.jpa.search.cache.SearchCacheStatusEnum;
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
+import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum;
import ca.uhn.fhir.rest.api.SearchContainedModeEnum;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
@@ -376,7 +377,8 @@ public abstract class BaseHapiFhirResourceDao extends B
.add(IBaseResource.class, theResource)
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest)
- .add(TransactionDetails.class, theTransactionDetails);
+ .add(TransactionDetails.class, theTransactionDetails)
+ .add(InterceptorInvocationTimingEnum.class, theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED));
doCallHooks(theTransactionDetails, theRequest, Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED, hookParams);
}
@@ -483,13 +485,11 @@ public abstract class BaseHapiFhirResourceDao extends B
.add(IBaseResource.class, resourceToDelete)
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
- .add(TransactionDetails.class, theTransactionDetails);
+ .add(TransactionDetails.class, theTransactionDetails)
+ .add(InterceptorInvocationTimingEnum.class, theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED));
- if (theTransactionDetails.isAcceptingDeferredInterceptorBroadcasts()) {
- theTransactionDetails.addDeferredInterceptorBroadcast(Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED, hookParams);
- } else {
- doCallHooks(theTransactionDetails, theRequestDetails, Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED, hookParams);
- }
+
+ doCallHooks(theTransactionDetails, theRequestDetails, Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED, hookParams);
DaoMethodOutcome outcome = toMethodOutcome(theRequestDetails, savedEntity, resourceToDelete).setCreated(true);
@@ -597,7 +597,8 @@ public abstract class BaseHapiFhirResourceDao extends B
.add(IBaseResource.class, resourceToDelete)
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest)
- .add(TransactionDetails.class, transactionDetails);
+ .add(TransactionDetails.class, transactionDetails)
+ .add(InterceptorInvocationTimingEnum.class, transactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED));
doCallHooks(transactionDetails, theRequest, Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED, hookParams);
}
});
@@ -681,16 +682,24 @@ public abstract class BaseHapiFhirResourceDao extends B
myEntityManager.merge(theEntity);
// Interceptor call: STORAGE_PRECOMMIT_RESOURCE_UPDATED
- // Interceptor call: STORAGE_PRESTORAGE_RESOURCE_UPDATED
IBaseResource newVersion = toResource(theEntity, false);
- HookParams params = new HookParams()
+ HookParams preStorageParams = new HookParams()
.add(IBaseResource.class, oldVersion)
.add(IBaseResource.class, newVersion)
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
.add(TransactionDetails.class, theTransactionDetails);
- myInterceptorBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED, params);
- myInterceptorBroadcaster.callHooks(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, params);
+ myInterceptorBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED, preStorageParams);
+
+ // Interceptor call: STORAGE_PRECOMMIT_RESOURCE_UPDATED
+ HookParams preCommitParams = new HookParams()
+ .add(IBaseResource.class, oldVersion)
+ .add(IBaseResource.class, newVersion)
+ .add(RequestDetails.class, theRequestDetails)
+ .addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
+ .add(TransactionDetails.class, theTransactionDetails)
+ .add(InterceptorInvocationTimingEnum.class, theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED));
+ myInterceptorBroadcaster.callHooks(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, preCommitParams);
}
@@ -720,14 +729,23 @@ public abstract class BaseHapiFhirResourceDao extends B
// Interceptor call: STORAGE_PRECOMMIT_RESOURCE_UPDATED
IBaseResource newVersion = toResource(theEntity, false);
- HookParams params = new HookParams()
+ HookParams preStorageParams = new HookParams()
.add(IBaseResource.class, oldVersion)
.add(IBaseResource.class, newVersion)
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
.add(TransactionDetails.class, theTransactionDetails);
- myInterceptorBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED, params);
- myInterceptorBroadcaster.callHooks(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, params);
+ myInterceptorBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED, preStorageParams);
+
+ HookParams preCommitParams = new HookParams()
+ .add(IBaseResource.class, oldVersion)
+ .add(IBaseResource.class, newVersion)
+ .add(RequestDetails.class, theRequestDetails)
+ .addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
+ .add(TransactionDetails.class, theTransactionDetails)
+ .add(InterceptorInvocationTimingEnum.class, theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED));
+
+ myInterceptorBroadcaster.callHooks(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, preCommitParams);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java
index 73a77fc629d..859b6f90c65 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java
@@ -49,6 +49,7 @@ import ca.uhn.fhir.rest.api.PatchTypeEnum;
import ca.uhn.fhir.rest.api.PreferReturnEnum;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
+import ca.uhn.fhir.rest.api.server.storage.DeferredInterceptorBroadcasts;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import ca.uhn.fhir.rest.param.ParameterUtil;
@@ -1005,25 +1006,16 @@ public abstract class BaseTransactionProcessor {
flushSession(theIdToPersistedOutcome);
theTransactionStopWatch.endCurrentTask();
- if (conditionalRequestUrls.size() > 0) {
- theTransactionStopWatch.startTask("Check for conflicts in conditional resources");
- }
+
+
/*
* Double check we didn't allow any duplicates we shouldn't have
*/
- for (Map.Entry> nextEntry : conditionalRequestUrls.entrySet()) {
- String matchUrl = nextEntry.getKey();
- Class extends IBaseResource> resType = nextEntry.getValue();
- if (isNotBlank(matchUrl)) {
- Set val = myMatchResourceUrlService.processMatchUrl(matchUrl, resType, theRequest);
- if (val.size() > 1) {
- throw new InvalidRequestException(
- "Unable to process " + theActionName + " - Request would cause multiple resources to match URL: \"" + matchUrl + "\". Does transaction request contain duplicates?");
- }
- }
+ if (conditionalRequestUrls.size() > 0) {
+ theTransactionStopWatch.startTask("Check for conflicts in conditional resources");
}
-
+ validateNoDuplicates(theRequest, theActionName, conditionalRequestUrls);
theTransactionStopWatch.endCurrentTask();
for (IIdType next : theAllIds) {
@@ -1044,6 +1036,19 @@ public abstract class BaseTransactionProcessor {
JpaInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, nextPointcut, nextParams);
}
+ DeferredInterceptorBroadcasts deferredInterceptorBroadcasts = new DeferredInterceptorBroadcasts(deferredBroadcastEvents);
+ HookParams params = new HookParams()
+ .add(RequestDetails.class, theRequest)
+ .addIfMatchesType(ServletRequestDetails.class, theRequest)
+ .add(DeferredInterceptorBroadcasts.class, deferredInterceptorBroadcasts)
+ .add(TransactionDetails.class, theTransactionDetails)
+ .add(IBaseBundle.class, theResponse);
+ JpaInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_TRANSACTION_PROCESSED, params);
+
+ theTransactionDetails.deferredBroadcastProcessingFinished();
+
+ //finishedCallingDeferredInterceptorBroadcasts
+
return entriesToProcess;
} finally {
@@ -1053,6 +1058,20 @@ public abstract class BaseTransactionProcessor {
}
}
+ private void validateNoDuplicates(RequestDetails theRequest, String theActionName, Map> conditionalRequestUrls) {
+ for (Map.Entry> nextEntry : conditionalRequestUrls.entrySet()) {
+ String matchUrl = nextEntry.getKey();
+ Class extends IBaseResource> resType = nextEntry.getValue();
+ if (isNotBlank(matchUrl)) {
+ Set val = myMatchResourceUrlService.processMatchUrl(matchUrl, resType, theRequest);
+ if (val.size() > 1) {
+ throw new InvalidRequestException(
+ "Unable to process " + theActionName + " - Request would cause multiple resources to match URL: \"" + matchUrl + "\". Does transaction request contain duplicates?");
+ }
+ }
+ }
+ }
+
protected abstract void flushSession(Map theIdToPersistedOutcome);
private void validateResourcePresent(IBaseResource theResource, Integer theOrder, String theVerb) {
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/TransactionHookTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/TransactionHookTest.java
new file mode 100644
index 00000000000..4a63a512a64
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/TransactionHookTest.java
@@ -0,0 +1,257 @@
+package ca.uhn.fhir.jpa.dao.r4;
+
+import ca.uhn.fhir.interceptor.api.HookParams;
+import ca.uhn.fhir.interceptor.api.IInterceptorService;
+import ca.uhn.fhir.interceptor.api.Pointcut;
+import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
+import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum;
+import ca.uhn.fhir.rest.api.server.storage.DeferredInterceptorBroadcasts;
+import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
+import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
+import ca.uhn.test.concurrency.PointcutLatch;
+import com.google.common.collect.ListMultimap;
+import org.hl7.fhir.instance.model.api.IBaseResource;
+import org.hl7.fhir.instance.model.api.IIdType;
+import org.hl7.fhir.r4.model.Bundle;
+import org.hl7.fhir.r4.model.DiagnosticReport;
+import org.hl7.fhir.r4.model.Observation;
+import org.hl7.fhir.r4.model.Patient;
+import org.hl7.fhir.r4.model.Reference;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import java.util.List;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.empty;
+import static org.hamcrest.Matchers.hasSize;
+import static org.hamcrest.Matchers.matchesPattern;
+import static org.hl7.fhir.r4.model.Bundle.HTTPVerb.DELETE;
+import static org.hl7.fhir.r4.model.Bundle.HTTPVerb.POST;
+import static org.hl7.fhir.r4.model.Bundle.HTTPVerb.PUT;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
+
+public class TransactionHookTest extends BaseJpaR4SystemTest {
+
+ @AfterEach
+ public void after() {
+ myDaoConfig.setEnforceReferentialIntegrityOnDelete(true);
+ }
+
+ PointcutLatch myPointcutLatch = new PointcutLatch(Pointcut.STORAGE_TRANSACTION_PROCESSED);
+ @Autowired
+ private IInterceptorService myInterceptorService;
+
+
+ @BeforeEach
+ public void beforeEach() {
+ myInterceptorService.registerAnonymousInterceptor(Pointcut.STORAGE_TRANSACTION_PROCESSED, myPointcutLatch);
+ myInterceptorService.registerAnonymousInterceptor(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED, myPointcutLatch);
+ myInterceptorService.registerAnonymousInterceptor(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, myPointcutLatch);
+ myInterceptorService.registerAnonymousInterceptor(Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED, myPointcutLatch);
+ }
+
+ @Test
+ public void testHookShouldContainParamsForAllCreateUpdateDeleteInvocations() throws InterruptedException {
+
+ String urnReference = "urn:uuid:3bc44de3-069d-442d-829b-f3ef68cae371";
+
+ final Observation obsToDelete = new Observation();
+ obsToDelete.setStatus(Observation.ObservationStatus.FINAL);
+ DaoMethodOutcome daoMethodOutcome = myObservationDao.create(obsToDelete);
+
+ Patient pat1 = new Patient();
+
+ final Observation obs1 = new Observation();
+ obs1.setStatus(Observation.ObservationStatus.FINAL);
+ obs1.setSubject(new Reference(urnReference));
+
+ Bundle b = new Bundle();
+ Bundle.BundleEntryComponent bundleEntryComponent = b.addEntry();
+
+ bundleEntryComponent.setResource(obs1);
+ bundleEntryComponent.getRequest().setMethod(POST).setUrl("Observation");
+
+ Bundle.BundleEntryComponent patientComponent = b.addEntry();
+ patientComponent.setFullUrl(urnReference);
+ patientComponent.setResource(pat1);
+ patientComponent.getRequest().setMethod(POST).setUrl("Patient");
+
+
+ //Delete an observation
+ b.addEntry().getRequest().setMethod(DELETE).setUrl(daoMethodOutcome.getId().toUnqualifiedVersionless().getValue());
+
+
+ myPointcutLatch.setExpectedCount(4);
+ mySystemDao.transaction(mySrd, b);
+ List hookParams = myPointcutLatch.awaitExpected();
+
+ DeferredInterceptorBroadcasts broadcastsParam = hookParams.get(3).get(DeferredInterceptorBroadcasts.class);
+ ListMultimap deferredInterceptorBroadcasts = broadcastsParam.getDeferredInterceptorBroadcasts();
+ assertThat(deferredInterceptorBroadcasts.entries(), hasSize(3));
+
+ List createPointcutInvocations = deferredInterceptorBroadcasts.get(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED);
+ assertThat(createPointcutInvocations, hasSize(2));
+
+ IBaseResource firstCreatedResource = createPointcutInvocations.get(0).get(IBaseResource.class);
+ InterceptorInvocationTimingEnum timing = createPointcutInvocations.get(0).get(InterceptorInvocationTimingEnum.class);
+ assertTrue(firstCreatedResource instanceof Observation);
+ assertTrue(timing.equals(InterceptorInvocationTimingEnum.DEFERRED));
+
+ IBaseResource secondCreatedResource = createPointcutInvocations.get(1).get(IBaseResource.class);
+ timing = createPointcutInvocations.get(1).get(InterceptorInvocationTimingEnum.class);
+ assertTrue(secondCreatedResource instanceof Patient);
+ assertTrue(timing.equals(InterceptorInvocationTimingEnum.DEFERRED));
+
+ assertThat(deferredInterceptorBroadcasts.get(Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED), hasSize(1));
+ }
+
+ @Test
+ public void testDeleteInTransactionShouldSucceedWhenReferencesAreAlsoRemoved() {
+ final Observation obs1 = new Observation();
+ obs1.setStatus(Observation.ObservationStatus.FINAL);
+ IIdType obs1id = myObservationDao.create(obs1).getId().toUnqualifiedVersionless();
+
+ final Observation obs2 = new Observation();
+ obs2.setStatus(Observation.ObservationStatus.FINAL);
+ IIdType obs2id = myObservationDao.create(obs2).getId().toUnqualifiedVersionless();
+
+ final DiagnosticReport rpt = new DiagnosticReport();
+ rpt.addResult(new Reference(obs2id));
+ IIdType rptId = myDiagnosticReportDao.create(rpt).getId().toUnqualifiedVersionless();
+
+ myObservationDao.read(obs1id);
+ myObservationDao.read(obs2id);
+ myDiagnosticReportDao.read(rptId);
+
+ Bundle b = new Bundle();
+ b.addEntry().getRequest().setMethod(DELETE).setUrl(rptId.getValue());
+ b.addEntry().getRequest().setMethod(DELETE).setUrl(obs2id.getValue());
+
+ try {
+ // transaction should succeed because the DiagnosticReport which references obs2 is also deleted
+ mySystemDao.transaction(mySrd, b);
+ } catch (ResourceVersionConflictException e) {
+ fail();
+ }
+ }
+
+
+ @Test
+ public void testDeleteWithHas_SourceModifiedToNoLongerIncludeReference() {
+
+ Observation obs1 = new Observation();
+ obs1.setStatus(Observation.ObservationStatus.FINAL);
+ IIdType obs1id = myObservationDao.create(obs1).getId().toUnqualifiedVersionless();
+
+ Observation obs2 = new Observation();
+ obs2.setStatus(Observation.ObservationStatus.FINAL);
+ IIdType obs2id = myObservationDao.create(obs2).getId().toUnqualifiedVersionless();
+
+ DiagnosticReport rpt = new DiagnosticReport();
+ rpt.addIdentifier().setSystem("foo").setValue("IDENTIFIER");
+ rpt.addResult(new Reference(obs2id));
+ IIdType rptId = myDiagnosticReportDao.create(rpt).getId().toUnqualifiedVersionless();
+
+ myObservationDao.read(obs1id);
+ myObservationDao.read(obs2id);
+
+ rpt = new DiagnosticReport();
+ rpt.addIdentifier().setSystem("foo").setValue("IDENTIFIER");
+
+ Bundle b = new Bundle();
+ b.addEntry().getRequest().setMethod(DELETE).setUrl("Observation?_has:DiagnosticReport:result:identifier=foo|IDENTIFIER");
+ b.addEntry().setResource(rpt).getRequest().setMethod(PUT).setUrl("DiagnosticReport?identifier=foo|IDENTIFIER");
+ mySystemDao.transaction(mySrd, b);
+
+ myObservationDao.read(obs1id);
+ try {
+ myObservationDao.read(obs2id);
+ fail();
+ } catch (ResourceGoneException e) {
+ // good
+ }
+
+ rpt = myDiagnosticReportDao.read(rptId);
+ assertThat(rpt.getResult(), empty());
+ }
+
+ @Test
+ public void testDeleteWithId_SourceModifiedToNoLongerIncludeReference() {
+
+ Observation obs1 = new Observation();
+ obs1.setStatus(Observation.ObservationStatus.FINAL);
+ IIdType obs1id = myObservationDao.create(obs1).getId().toUnqualifiedVersionless();
+
+ Observation obs2 = new Observation();
+ obs2.setStatus(Observation.ObservationStatus.FINAL);
+ IIdType obs2id = myObservationDao.create(obs2).getId().toUnqualifiedVersionless();
+
+ DiagnosticReport rpt = new DiagnosticReport();
+ rpt.addResult(new Reference(obs1id));
+ IIdType rptId = myDiagnosticReportDao.create(rpt).getId().toUnqualifiedVersionless();
+
+ myObservationDao.read(obs1id);
+ myObservationDao.read(obs2id);
+
+ rpt = new DiagnosticReport();
+ rpt.addResult(new Reference(obs2id));
+
+ Bundle b = new Bundle();
+ b.addEntry().getRequest().setMethod(DELETE).setUrl(obs1id.getValue());
+ b.addEntry().setResource(rpt).getRequest().setMethod(PUT).setUrl(rptId.getValue());
+ mySystemDao.transaction(mySrd, b);
+
+ myObservationDao.read(obs2id);
+ myDiagnosticReportDao.read(rptId);
+ try {
+ myObservationDao.read(obs1id);
+ fail();
+ } catch (ResourceGoneException e) {
+ // good
+ }
+
+ }
+
+
+ @Test
+ public void testDeleteWithHas_SourceModifiedToStillIncludeReference() {
+
+ Observation obs1 = new Observation();
+ obs1.setStatus(Observation.ObservationStatus.FINAL);
+ IIdType obs1id = myObservationDao.create(obs1).getId().toUnqualifiedVersionless();
+
+ Observation obs2 = new Observation();
+ obs2.setStatus(Observation.ObservationStatus.FINAL);
+ IIdType obs2id = myObservationDao.create(obs2).getId().toUnqualifiedVersionless();
+
+ DiagnosticReport rpt = new DiagnosticReport();
+ rpt.addIdentifier().setSystem("foo").setValue("IDENTIFIER");
+ rpt.addResult(new Reference(obs2id));
+ IIdType rptId = myDiagnosticReportDao.create(rpt).getId().toUnqualifiedVersionless();
+
+ myObservationDao.read(obs1id);
+ myObservationDao.read(obs2id);
+
+ rpt = new DiagnosticReport();
+ rpt.addIdentifier().setSystem("foo").setValue("IDENTIFIER");
+ rpt.addResult(new Reference(obs2id));
+
+ Bundle b = new Bundle();
+ b.addEntry().getRequest().setMethod(DELETE).setUrl("Observation?_has:DiagnosticReport:result:identifier=foo|IDENTIFIER");
+ b.addEntry().setResource(rpt).getRequest().setMethod(PUT).setUrl("DiagnosticReport?identifier=foo|IDENTIFIER");
+ try {
+ mySystemDao.transaction(mySrd, b);
+ fail();
+ } catch (ResourceVersionConflictException e ) {
+ assertThat(e.getMessage(), matchesPattern("Unable to delete Observation/[0-9]+ because at least one resource has a reference to this resource. First reference found was resource DiagnosticReport/[0-9]+ in path DiagnosticReport.result"));
+ }
+
+ myObservationDao.read(obs1id);
+ myObservationDao.read(obs2id);
+ myDiagnosticReportDao.read(rptId);
+ }
+}
diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/DeferredInterceptorBroadcasts.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/DeferredInterceptorBroadcasts.java
new file mode 100644
index 00000000000..beb54f96137
--- /dev/null
+++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/DeferredInterceptorBroadcasts.java
@@ -0,0 +1,38 @@
+package ca.uhn.fhir.rest.api.server.storage;
+
+/*-
+ * #%L
+ * HAPI FHIR - Server Framework
+ * %%
+ * Copyright (C) 2014 - 2021 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+import ca.uhn.fhir.interceptor.api.HookParams;
+import ca.uhn.fhir.interceptor.api.Pointcut;
+import com.google.common.collect.ListMultimap;
+
+public class DeferredInterceptorBroadcasts {
+
+ ListMultimap myDeferredInterceptorBroadcasts;
+
+ public DeferredInterceptorBroadcasts(ListMultimap theDeferredInterceptorBroadcasts) {
+ myDeferredInterceptorBroadcasts = theDeferredInterceptorBroadcasts;
+ }
+
+ public ListMultimap getDeferredInterceptorBroadcasts() {
+ return myDeferredInterceptorBroadcasts;
+ }
+}
diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/TransactionDetails.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/TransactionDetails.java
index 57520bc2570..80af36bfbfe 100644
--- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/TransactionDetails.java
+++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/TransactionDetails.java
@@ -22,6 +22,7 @@ package ca.uhn.fhir.rest.api.server.storage;
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.Pointcut;
+import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import org.apache.commons.lang3.Validate;
@@ -32,6 +33,7 @@ import java.util.Collections;
import java.util.Date;
import java.util.EnumSet;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import java.util.function.Supplier;
@@ -53,6 +55,7 @@ public class TransactionDetails {
private Map myUserData;
private ListMultimap myDeferredInterceptorBroadcasts;
private EnumSet myDeferredInterceptorBroadcastPointcuts;
+ private boolean myIsPointcutDeferred;
/**
* Constructor
@@ -189,7 +192,20 @@ public class TransactionDetails {
*/
public void addDeferredInterceptorBroadcast(Pointcut thePointcut, HookParams theHookParams) {
Validate.isTrue(isAcceptingDeferredInterceptorBroadcasts(thePointcut));
+ myIsPointcutDeferred = true;
myDeferredInterceptorBroadcasts.put(thePointcut, theHookParams);
}
+
+ public InterceptorInvocationTimingEnum getInvocationTiming(Pointcut thePointcut) {
+ if (myDeferredInterceptorBroadcasts == null) {
+ return InterceptorInvocationTimingEnum.ACTIVE;
+ }
+ List hookParams = myDeferredInterceptorBroadcasts.get(thePointcut);
+ return hookParams == null ? InterceptorInvocationTimingEnum.ACTIVE : InterceptorInvocationTimingEnum.DEFERRED;
+ }
+
+ public void deferredBroadcastProcessingFinished() {
+ myIsPointcutDeferred = false;
+ }
}
diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/BaseResourceMessage.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/BaseResourceMessage.java
index f1ce661c719..dd7f266afb2 100644
--- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/BaseResourceMessage.java
+++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/BaseResourceMessage.java
@@ -160,6 +160,7 @@ public abstract class BaseResourceMessage implements IResourceMessage, IModelJso
CREATE,
UPDATE,
DELETE,
- MANUALLY_TRIGGERED
+ MANUALLY_TRIGGERED,
+ TRANSACTION
}
}
diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/HashMapResourceProvider.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/HashMapResourceProvider.java
index 23223aa09df..34e5da5c78a 100644
--- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/HashMapResourceProvider.java
+++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/HashMapResourceProvider.java
@@ -39,6 +39,7 @@ import ca.uhn.fhir.rest.annotation.RequiredParam;
import ca.uhn.fhir.rest.annotation.ResourceParam;
import ca.uhn.fhir.rest.annotation.Search;
import ca.uhn.fhir.rest.annotation.Update;
+import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum;
import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails;
import ca.uhn.fhir.rest.api.server.IPreResourceShowDetails;
@@ -387,25 +388,42 @@ public class HashMapResourceProvider implements IResour
if (!myIdToHistory.containsKey(theIdPart)) {
// Interceptor call: STORAGE_PRESTORAGE_RESOURCE_CREATED
- HookParams params = new HookParams()
+ HookParams preStorageParams = new HookParams()
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
.add(IBaseResource.class, theResource)
.add(TransactionDetails.class, theTransactionDetails);
- interceptorBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED, params);
- interceptorBroadcaster.callHooks(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED, params);
+ interceptorBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED, preStorageParams);
+
+ // Interceptor call: STORAGE_PRECOMMIT_RESOURCE_CREATED
+ HookParams preCommitParams = new HookParams()
+ .add(RequestDetails.class, theRequestDetails)
+ .addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
+ .add(IBaseResource.class, theResource)
+ .add(TransactionDetails.class, theTransactionDetails)
+ .add(InterceptorInvocationTimingEnum.class, theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED));
+ interceptorBroadcaster.callHooks(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED, preCommitParams);
} else {
// Interceptor call: STORAGE_PRESTORAGE_RESOURCE_UPDATED
- HookParams params = new HookParams()
+ HookParams preStorageParams = new HookParams()
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
.add(IBaseResource.class, myIdToHistory.get(theIdPart).getFirst())
.add(IBaseResource.class, theResource)
.add(TransactionDetails.class, theTransactionDetails);
- interceptorBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED, params);
- interceptorBroadcaster.callHooks(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, params);
+ interceptorBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED, preStorageParams);
+
+ // Interceptor call: STORAGE_PRECOMMIT_RESOURCE_UPDATED
+ HookParams preCommitParams = new HookParams()
+ .add(RequestDetails.class, theRequestDetails)
+ .addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
+ .add(IBaseResource.class, myIdToHistory.get(theIdPart).getFirst())
+ .add(IBaseResource.class, theResource)
+ .add(TransactionDetails.class, theTransactionDetails)
+ .add(InterceptorInvocationTimingEnum.class, theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED));
+ interceptorBroadcaster.callHooks(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, preCommitParams);
}
}
diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/util/bundle/BundleUtilTest.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/util/bundle/BundleUtilTest.java
index b27b77aec2c..2fe10217652 100644
--- a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/util/bundle/BundleUtilTest.java
+++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/util/bundle/BundleUtilTest.java
@@ -1,17 +1,35 @@
package ca.uhn.fhir.util.bundle;
import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.util.BundleBuilder;
import ca.uhn.fhir.util.BundleUtil;
import ca.uhn.fhir.util.TestUtil;
import org.hl7.fhir.r4.model.Bundle;
+import org.hl7.fhir.r4.model.ExplanationOfBenefit;
+import org.hl7.fhir.r4.model.Medication;
+import org.hl7.fhir.r4.model.Observation;
+import org.hl7.fhir.r4.model.Organization;
import org.hl7.fhir.r4.model.Patient;
+import org.hl7.fhir.r4.model.Quantity;
+import org.hl7.fhir.r4.model.Reference;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Test;
+import java.util.Collections;
import java.util.List;
import java.util.function.Consumer;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.hasSize;
+import static org.hamcrest.Matchers.is;
+import static org.hl7.fhir.r4.model.Bundle.HTTPVerb.DELETE;
+import static org.hl7.fhir.r4.model.Bundle.HTTPVerb.GET;
+import static org.hl7.fhir.r4.model.Bundle.HTTPVerb.POST;
+import static org.hl7.fhir.r4.model.Bundle.HTTPVerb.PUT;
import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
public class BundleUtilTest {
@@ -87,6 +105,224 @@ public class BundleUtilTest {
assertEquals("Observation?foo=bar", bundle.getEntryFirstRep().getRequest().getUrl());
}
+ @Test
+ public void testTopologicalTransactionSortForCreates() {
+
+ Bundle b = new Bundle();
+ Bundle.BundleEntryComponent bundleEntryComponent = b.addEntry();
+ final Observation obs1 = new Observation();
+ obs1.setStatus(Observation.ObservationStatus.FINAL);
+ obs1.setSubject(new Reference("Patient/P1"));
+ obs1.setValue(new Quantity(4));
+ obs1.setId("Observation/O1");
+ bundleEntryComponent.setResource(obs1);
+ bundleEntryComponent.getRequest().setMethod(POST).setUrl("Observation");
+
+ bundleEntryComponent = b.addEntry();
+ final Observation obs2 = new Observation();
+ obs2.setStatus(Observation.ObservationStatus.FINAL);
+ obs2.setValue(new Quantity(4));
+ obs2.setId("Observation/O2");
+ bundleEntryComponent.setResource(obs2);
+ bundleEntryComponent.getRequest().setMethod(POST).setUrl("Observation");
+
+ Bundle.BundleEntryComponent patientComponent = b.addEntry();
+ Patient pat1 = new Patient();
+ pat1.setId("Patient/P1");
+ pat1.setManagingOrganization(new Reference("Organization/Org1"));
+ patientComponent.setResource(pat1);
+ patientComponent.getRequest().setMethod(POST).setUrl("Patient");
+
+ Bundle.BundleEntryComponent organizationComponent = b.addEntry();
+ Organization org1 = new Organization();
+ org1.setId("Organization/Org1");
+ organizationComponent.setResource(org1);
+ organizationComponent.getRequest().setMethod(POST).setUrl("Patient");
+
+ BundleUtil.sortEntriesIntoProcessingOrder(ourCtx, b);
+
+ assertThat(b.getEntry(), hasSize(4));
+
+ List entry = b.getEntry();
+ int observationIndex = getIndexOfEntryWithId("Observation/O1", b);
+ int patientIndex = getIndexOfEntryWithId("Patient/P1", b);
+ int organizationIndex = getIndexOfEntryWithId("Organization/Org1", b);
+
+ assertTrue(organizationIndex < patientIndex);
+ assertTrue(patientIndex < observationIndex);
+ }
+
+ @Test
+ public void testTransactionSorterFailsOnCyclicReference() {
+ Bundle b = new Bundle();
+ Bundle.BundleEntryComponent bundleEntryComponent = b.addEntry();
+ final Observation obs1 = new Observation();
+ obs1.setStatus(Observation.ObservationStatus.FINAL);
+ obs1.setSubject(new Reference("Patient/P1"));
+ obs1.setValue(new Quantity(4));
+ obs1.setId("Observation/O1/_history/1");
+ obs1.setHasMember(Collections.singletonList(new Reference("Observation/O2")));
+ bundleEntryComponent.setResource(obs1);
+ bundleEntryComponent.getRequest().setMethod(POST).setUrl("Observation");
+
+ bundleEntryComponent = b.addEntry();
+ final Observation obs2 = new Observation();
+ obs2.setStatus(Observation.ObservationStatus.FINAL);
+ obs2.setValue(new Quantity(4));
+ obs2.setId("Observation/O2/_history/1");
+ //We use a random history version here to ensure cycles are counted without versions.
+ obs2.setHasMember(Collections.singletonList(new Reference("Observation/O1/_history/300")));
+ bundleEntryComponent.setResource(obs2);
+ bundleEntryComponent.getRequest().setMethod(POST).setUrl("Observation");
+ try {
+ BundleUtil.sortEntriesIntoProcessingOrder(ourCtx, b);
+ fail();
+ } catch (IllegalStateException e ) {
+
+ }
+ }
+
+ @Test
+ public void testTransactionSortingReturnsOperationsInCorrectOrder() {
+
+ Bundle b = new Bundle();
+
+ //UPDATE patient
+ Bundle.BundleEntryComponent patientUpdateComponent= b.addEntry();
+ final Patient p2 = new Patient();
+ p2.setId("Patient/P2");
+ p2.getNameFirstRep().setFamily("Test!");
+ patientUpdateComponent.setResource(p2);
+ patientUpdateComponent.getRequest().setMethod(PUT).setUrl("Patient/P2");
+
+ //CREATE observation
+ Bundle.BundleEntryComponent bundleEntryComponent = b.addEntry();
+ final Observation obs1 = new Observation();
+ obs1.setStatus(Observation.ObservationStatus.FINAL);
+ obs1.setSubject(new Reference("Patient/P1"));
+ obs1.setValue(new Quantity(4));
+ obs1.setId("Observation/O1");
+ bundleEntryComponent.setResource(obs1);
+ bundleEntryComponent.getRequest().setMethod(POST).setUrl("Observation");
+
+ //DELETE medication
+ Bundle.BundleEntryComponent medicationComponent= b.addEntry();
+ final Medication med1 = new Medication();
+ med1.setId("Medication/M1");
+ medicationComponent.setResource(med1);
+ medicationComponent.getRequest().setMethod(DELETE).setUrl("Medication");
+
+ //GET medication
+ Bundle.BundleEntryComponent searchComponent = b.addEntry();
+ searchComponent.getRequest().setMethod(GET).setUrl("Medication?code=123");
+
+ //CREATE patient
+ Bundle.BundleEntryComponent patientComponent = b.addEntry();
+ Patient pat1 = new Patient();
+ pat1.setId("Patient/P1");
+ pat1.setManagingOrganization(new Reference("Organization/Org1"));
+ patientComponent.setResource(pat1);
+ patientComponent.getRequest().setMethod(POST).setUrl("Patient");
+
+ //CREATE organization
+ Bundle.BundleEntryComponent organizationComponent = b.addEntry();
+ Organization org1 = new Organization();
+ org1.setId("Organization/Org1");
+ organizationComponent.setResource(org1);
+ organizationComponent.getRequest().setMethod(POST).setUrl("Organization");
+
+ //DELETE ExplanationOfBenefit
+ Bundle.BundleEntryComponent explanationOfBenefitComponent= b.addEntry();
+ final ExplanationOfBenefit eob1 = new ExplanationOfBenefit();
+ eob1.setId("ExplanationOfBenefit/E1");
+ explanationOfBenefitComponent.setResource(eob1);
+ explanationOfBenefitComponent.getRequest().setMethod(DELETE).setUrl("ExplanationOfBenefit");
+
+ BundleUtil.sortEntriesIntoProcessingOrder(ourCtx, b);
+
+ assertThat(b.getEntry(), hasSize(7));
+
+ List entry = b.getEntry();
+
+ // DELETEs first
+ assertThat(entry.get(0).getRequest().getMethod(), is(equalTo(DELETE)));
+ assertThat(entry.get(1).getRequest().getMethod(), is(equalTo(DELETE)));
+ // Then POSTs
+ assertThat(entry.get(2).getRequest().getMethod(), is(equalTo(POST)));
+ assertThat(entry.get(3).getRequest().getMethod(), is(equalTo(POST)));
+ assertThat(entry.get(4).getRequest().getMethod(), is(equalTo(POST)));
+ // Then PUTs
+ assertThat(entry.get(5).getRequest().getMethod(), is(equalTo(PUT)));
+ // Then GETs
+ assertThat(entry.get(6).getRequest().getMethod(), is(equalTo(GET)));
+ }
+
+ @Test
+ public void testBundleSortsCanHandlesDeletesThatContainNoResources() {
+ Patient p = new Patient();
+ p.setId("Patient/123");
+ BundleBuilder builder = new BundleBuilder(ourCtx);
+ builder.addTransactionDeleteEntry(p);
+ BundleUtil.sortEntriesIntoProcessingOrder(ourCtx, builder.getBundle());
+ }
+
+ @Test
+ public void testTransactionSorterReturnsDeletesInCorrectProcessingOrder() {
+ Bundle b = new Bundle();
+ Bundle.BundleEntryComponent bundleEntryComponent = b.addEntry();
+ final Observation obs1 = new Observation();
+ obs1.setStatus(Observation.ObservationStatus.FINAL);
+ obs1.setSubject(new Reference("Patient/P1"));
+ obs1.setValue(new Quantity(4));
+ obs1.setId("Observation/O1");
+ bundleEntryComponent.setResource(obs1);
+ bundleEntryComponent.getRequest().setMethod(DELETE).setUrl("Observation");
+
+ bundleEntryComponent = b.addEntry();
+ final Observation obs2 = new Observation();
+ obs2.setStatus(Observation.ObservationStatus.FINAL);
+ obs2.setValue(new Quantity(4));
+ obs2.setId("Observation/O2");
+ bundleEntryComponent.setResource(obs2);
+ bundleEntryComponent.getRequest().setMethod(DELETE).setUrl("Observation");
+
+ Bundle.BundleEntryComponent patientComponent = b.addEntry();
+ Patient pat1 = new Patient();
+ pat1.setId("Patient/P1");
+ pat1.setManagingOrganization(new Reference("Organization/Org1"));
+ patientComponent.setResource(pat1);
+ patientComponent.getRequest().setMethod(DELETE).setUrl("Patient");
+
+ Bundle.BundleEntryComponent organizationComponent = b.addEntry();
+ Organization org1 = new Organization();
+ org1.setId("Organization/Org1");
+ organizationComponent.setResource(org1);
+ organizationComponent.getRequest().setMethod(DELETE).setUrl("Organization");
+
+ BundleUtil.sortEntriesIntoProcessingOrder(ourCtx, b);
+
+ assertThat(b.getEntry(), hasSize(4));
+
+ int observationIndex = getIndexOfEntryWithId("Observation/O1", b);
+ int patientIndex = getIndexOfEntryWithId("Patient/P1", b);
+ int organizationIndex = getIndexOfEntryWithId("Organization/Org1", b);
+
+ assertTrue(patientIndex < organizationIndex);
+ assertTrue(observationIndex < patientIndex);
+ }
+
+ private int getIndexOfEntryWithId(String theResourceId, Bundle theBundle) {
+ List entries = theBundle.getEntry();
+ for (int i = 0; i < entries.size(); i++) {
+ String id = entries.get(i).getResource().getIdElement().toUnqualifiedVersionless().toString();
+ if (id.equals(theResourceId)) {
+ return i;
+ }
+ }
+ fail("Didn't find resource with ID " + theResourceId);
+ return -1;
+ }
+
@AfterAll
public static void afterClassClearContext() {
TestUtil.clearAllStaticFieldsForUnitTest();