Merge branch 'master' into 2566_update_s13n_and_validation_handling
This commit is contained in:
commit
21d8cdfefc
|
@ -10,6 +10,8 @@ import java.util.concurrent.ConcurrentHashMap;
|
|||
|
||||
import static org.apache.commons.lang3.StringUtils.*;
|
||||
|
||||
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR - Core Library
|
||||
|
|
|
@ -1420,6 +1420,12 @@ public enum Pointcut implements IPointcut {
|
|||
* <li>
|
||||
* ca.uhn.fhir.rest.api.server.storage.TransactionDetails - The outer transaction details object (since 5.0.0)
|
||||
* </li>
|
||||
* <li>
|
||||
* Boolean - Whether this pointcut invocation was deferred or not(since 5.4.0)
|
||||
* </li>
|
||||
* <li>
|
||||
* ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum - The timing at which the invocation of the interceptor took place. Options are ACTIVE and DEFERRED.
|
||||
* </li>
|
||||
* </ul>
|
||||
* <p>
|
||||
* Hooks should return <code>void</code>.
|
||||
|
@ -1429,7 +1435,8 @@ public enum Pointcut implements IPointcut {
|
|||
"org.hl7.fhir.instance.model.api.IBaseResource",
|
||||
"ca.uhn.fhir.rest.api.server.RequestDetails",
|
||||
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails",
|
||||
"ca.uhn.fhir.rest.api.server.storage.TransactionDetails"
|
||||
"ca.uhn.fhir.rest.api.server.storage.TransactionDetails",
|
||||
"ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum"
|
||||
),
|
||||
|
||||
/**
|
||||
|
@ -1446,7 +1453,7 @@ public enum Pointcut implements IPointcut {
|
|||
* Hooks may accept the following parameters:
|
||||
* <ul>
|
||||
* <li>org.hl7.fhir.instance.model.api.IBaseResource - The previous contents of the resource</li>
|
||||
* <li>org.hl7.fhir.instance.model.api.IBaseResource - The proposed new new contents of the resource</li>
|
||||
* <li>org.hl7.fhir.instance.model.api.IBaseResource - The proposed new contents of the resource</li>
|
||||
* <li>
|
||||
* ca.uhn.fhir.rest.api.server.RequestDetails - A bean containing details about the request that is about to be processed, including details such as the
|
||||
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
|
||||
|
@ -1463,6 +1470,9 @@ public enum Pointcut implements IPointcut {
|
|||
* <li>
|
||||
* ca.uhn.fhir.rest.api.server.storage.TransactionDetails - The outer transaction details object (since 5.0.0)
|
||||
* </li>
|
||||
* <li>
|
||||
* ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum - The timing at which the invocation of the interceptor took place. Options are ACTIVE and DEFERRED.
|
||||
* </li>
|
||||
* </ul>
|
||||
* <p>
|
||||
* Hooks should return <code>void</code>.
|
||||
|
@ -1473,7 +1483,8 @@ public enum Pointcut implements IPointcut {
|
|||
"org.hl7.fhir.instance.model.api.IBaseResource",
|
||||
"ca.uhn.fhir.rest.api.server.RequestDetails",
|
||||
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails",
|
||||
"ca.uhn.fhir.rest.api.server.storage.TransactionDetails"
|
||||
"ca.uhn.fhir.rest.api.server.storage.TransactionDetails",
|
||||
"ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum"
|
||||
),
|
||||
|
||||
|
||||
|
@ -1503,6 +1514,9 @@ public enum Pointcut implements IPointcut {
|
|||
* <li>
|
||||
* ca.uhn.fhir.rest.api.server.storage.TransactionDetails - The outer transaction details object (since 5.0.0)
|
||||
* </li>
|
||||
* <li>
|
||||
* ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum - The timing at which the invocation of the interceptor took place. Options are ACTIVE and DEFERRED.
|
||||
* </li>
|
||||
* </ul>
|
||||
* <p>
|
||||
* Hooks should return <code>void</code>.
|
||||
|
@ -1512,9 +1526,53 @@ public enum Pointcut implements IPointcut {
|
|||
"org.hl7.fhir.instance.model.api.IBaseResource",
|
||||
"ca.uhn.fhir.rest.api.server.RequestDetails",
|
||||
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails",
|
||||
"ca.uhn.fhir.rest.api.server.storage.TransactionDetails",
|
||||
"ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum"
|
||||
),
|
||||
|
||||
/**
|
||||
* <b>Storage Hook:</b>
|
||||
* Invoked after all entries in a transaction bundle have been executed
|
||||
* <p>
|
||||
* Hooks will have access to the original bundle, as well as all the deferred interceptor broadcasts related to the
|
||||
* processing of the transaction bundle
|
||||
* </p>
|
||||
* Hooks may accept the following parameters:
|
||||
* <ul>
|
||||
* <li>org.hl7.fhir.instance.model.api.IBaseResource - The resource being deleted</li>
|
||||
* <li>
|
||||
* ca.uhn.fhir.rest.api.server.RequestDetails - A bean containing details about the request that is about to be processed, including details such as the
|
||||
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
|
||||
* pulled out of the servlet request. Note that the bean
|
||||
* properties are not all guaranteed to be populated, depending on how early during processing the
|
||||
* exception occurred.
|
||||
* </li>
|
||||
* <li>
|
||||
* ca.uhn.fhir.rest.server.servlet.ServletRequestDetails - A bean containing details about the request that is about to be processed, including details such as the
|
||||
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
|
||||
* pulled out of the servlet request. This parameter is identical to the RequestDetails parameter above but will
|
||||
* only be populated when operating in a RestfulServer implementation. It is provided as a convenience.
|
||||
* </li>
|
||||
* <li>
|
||||
* ca.uhn.fhir.rest.api.server.storage.TransactionDetails - The outer transaction details object (since 5.0.0)
|
||||
* </li>
|
||||
* <li>
|
||||
* ca.uhn.fhir.rest.api.server.storage.DeferredInterceptorBroadcasts- A collection of pointcut invocations and their parameters which were deferred.
|
||||
* </li>
|
||||
* </ul>
|
||||
* <p>
|
||||
* Hooks should return <code>void</code>.
|
||||
* </p>
|
||||
*/
|
||||
STORAGE_TRANSACTION_PROCESSED(void.class,
|
||||
"org.hl7.fhir.instance.model.api.IBaseBundle",
|
||||
"ca.uhn.fhir.rest.api.server.storage.DeferredInterceptorBroadcasts",
|
||||
"ca.uhn.fhir.rest.api.server.RequestDetails",
|
||||
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails",
|
||||
"ca.uhn.fhir.rest.api.server.storage.TransactionDetails"
|
||||
),
|
||||
|
||||
|
||||
/**
|
||||
* <b>Storage Hook:</b>
|
||||
* Invoked when a resource delete operation is about to fail due to referential integrity checks. Intended for use with {@literal ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor}.
|
||||
|
|
|
@ -0,0 +1,30 @@
|
|||
package ca.uhn.fhir.rest.api;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR - Core Library
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
/**
|
||||
* during invocation of certain pointcuts, it is important to know whether they are being executed in
|
||||
* active or deferred fashion. This enum allows the pointcuts to see how they were invoked.
|
||||
*/
|
||||
public enum InterceptorInvocationTimingEnum {
|
||||
ACTIVE,
|
||||
DEFERRED
|
||||
}
|
|
@ -403,7 +403,6 @@ public class BundleBuilder {
|
|||
public void conditional(String theConditionalUrl) {
|
||||
myUrl.setValueAsString(theConditionalUrl);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public class CreateBuilder {
|
||||
|
|
|
@ -1,6 +1,10 @@
|
|||
package ca.uhn.fhir.util;
|
||||
|
||||
import ca.uhn.fhir.context.*;
|
||||
import ca.uhn.fhir.context.BaseRuntimeChildDefinition;
|
||||
import ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition;
|
||||
import ca.uhn.fhir.context.BaseRuntimeElementDefinition;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.rest.api.PatchTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.RequestTypeEnum;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
|
@ -9,15 +13,23 @@ import ca.uhn.fhir.util.bundle.BundleEntryParts;
|
|||
import ca.uhn.fhir.util.bundle.EntryListAccumulator;
|
||||
import ca.uhn.fhir.util.bundle.ModifiableBundleEntry;
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.hl7.fhir.instance.model.api.*;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBinary;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR - Core Library
|
||||
|
@ -42,6 +54,8 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
* Fetch resources from a bundle
|
||||
*/
|
||||
public class BundleUtil {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BundleUtil.class);
|
||||
|
||||
|
||||
/**
|
||||
* @return Returns <code>null</code> if the link isn't found or has no value
|
||||
|
@ -170,16 +184,200 @@ public class BundleUtil {
|
|||
return entryListAccumulator.getList();
|
||||
}
|
||||
|
||||
static int WHITE = 1;
|
||||
static int GRAY = 2;
|
||||
static int BLACK = 3;
|
||||
|
||||
/**
|
||||
* Function which will do an in-place sort of a bundles' entries, to the correct processing order, which is:
|
||||
* 1. Deletes
|
||||
* 2. Creates
|
||||
* 3. Updates
|
||||
*
|
||||
* Furthermore, within these operation types, the entries will be sorted based on the order in which they should be processed
|
||||
* e.g. if you have 2 CREATEs, one for a Patient, and one for an Observation which has this Patient as its Subject,
|
||||
* the patient will come first, then the observation.
|
||||
*
|
||||
* In cases of there being a cyclic dependency (e.g. Organization/1 is partOf Organization/2 and Organization/2 is partOf Organization/1)
|
||||
* this function will throw an IllegalStateException.
|
||||
*
|
||||
* @param theContext The FhirContext.
|
||||
* @param theBundle The {@link IBaseBundle} which contains the entries you would like sorted into processing order.
|
||||
*/
|
||||
public static void sortEntriesIntoProcessingOrder(FhirContext theContext, IBaseBundle theBundle) throws IllegalStateException {
|
||||
Map<BundleEntryParts, IBase> partsToIBaseMap = getPartsToIBaseMap(theContext, theBundle);
|
||||
LinkedHashSet<IBase> retVal = new LinkedHashSet<>();
|
||||
|
||||
//Get all deletions.
|
||||
LinkedHashSet<IBase> deleteParts = sortEntriesOfTypeIntoProcessingOrder(theContext, RequestTypeEnum.DELETE, partsToIBaseMap);
|
||||
validatePartsNotNull(deleteParts);
|
||||
retVal.addAll(deleteParts);
|
||||
|
||||
//Get all Creations
|
||||
LinkedHashSet<IBase> createParts= sortEntriesOfTypeIntoProcessingOrder(theContext, RequestTypeEnum.POST, partsToIBaseMap);
|
||||
validatePartsNotNull(createParts);
|
||||
retVal.addAll(createParts);
|
||||
|
||||
// Get all Updates
|
||||
LinkedHashSet<IBase> updateParts= sortEntriesOfTypeIntoProcessingOrder(theContext, RequestTypeEnum.PUT, partsToIBaseMap);
|
||||
validatePartsNotNull(updateParts);
|
||||
retVal.addAll(updateParts);
|
||||
|
||||
//Once we are done adding all DELETE, POST, PUT operations, add everything else.
|
||||
//Since this is a set, it will just fail to add already-added operations.
|
||||
retVal.addAll(partsToIBaseMap.values());
|
||||
|
||||
//Blow away the entries and reset them in the right order.
|
||||
TerserUtil.clearField(theContext, "entry", theBundle);
|
||||
TerserUtil.setField(theContext, "entry", theBundle, retVal.toArray(new IBase[0]));
|
||||
}
|
||||
|
||||
private static void validatePartsNotNull(LinkedHashSet<IBase> theDeleteParts) {
|
||||
if (theDeleteParts == null) {
|
||||
throw new IllegalStateException("This transaction contains a cycle, so it cannot be sorted.");
|
||||
}
|
||||
}
|
||||
|
||||
private static LinkedHashSet<IBase> sortEntriesOfTypeIntoProcessingOrder(FhirContext theContext, RequestTypeEnum theRequestTypeEnum, Map<BundleEntryParts, IBase> thePartsToIBaseMap) {
|
||||
SortLegality legality = new SortLegality();
|
||||
HashMap<String, Integer> color = new HashMap<>();
|
||||
HashMap<String, List<String>> adjList = new HashMap<>();
|
||||
List<String> topologicalOrder = new ArrayList<>();
|
||||
Set<BundleEntryParts> bundleEntryParts = thePartsToIBaseMap.keySet().stream().filter(part -> part.getRequestType().equals(theRequestTypeEnum)).collect(Collectors.toSet());
|
||||
HashMap<String, BundleEntryParts> resourceIdToBundleEntryMap = new HashMap<>();
|
||||
|
||||
for (BundleEntryParts bundleEntryPart : bundleEntryParts) {
|
||||
IBaseResource resource = bundleEntryPart.getResource();
|
||||
if (resource != null) {
|
||||
String resourceId = resource.getIdElement().toVersionless().toString();
|
||||
resourceIdToBundleEntryMap.put(resourceId, bundleEntryPart);
|
||||
if (resourceId == null) {
|
||||
if (bundleEntryPart.getFullUrl() != null) {
|
||||
resourceId = bundleEntryPart.getFullUrl();
|
||||
}
|
||||
}
|
||||
|
||||
color.put(resourceId, WHITE);
|
||||
}
|
||||
}
|
||||
|
||||
for (BundleEntryParts bundleEntryPart : bundleEntryParts) {
|
||||
IBaseResource resource = bundleEntryPart.getResource();
|
||||
if (resource != null) {
|
||||
String resourceId = resource.getIdElement().toVersionless().toString();
|
||||
resourceIdToBundleEntryMap.put(resourceId, bundleEntryPart);
|
||||
if (resourceId == null) {
|
||||
if (bundleEntryPart.getFullUrl() != null) {
|
||||
resourceId = bundleEntryPart.getFullUrl();
|
||||
}
|
||||
}
|
||||
List<ResourceReferenceInfo> allResourceReferences = theContext.newTerser().getAllResourceReferences(resource);
|
||||
String finalResourceId = resourceId;
|
||||
allResourceReferences
|
||||
.forEach(refInfo -> {
|
||||
String referencedResourceId = refInfo.getResourceReference().getReferenceElement().toVersionless().getValue();
|
||||
if (color.containsKey(referencedResourceId)) {
|
||||
if (!adjList.containsKey(finalResourceId)) {
|
||||
adjList.put(finalResourceId, new ArrayList<>());
|
||||
}
|
||||
adjList.get(finalResourceId).add(referencedResourceId);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for (Map.Entry<String, Integer> entry:color.entrySet()) {
|
||||
if (entry.getValue() == WHITE) {
|
||||
depthFirstSearch(entry.getKey(), color, adjList, topologicalOrder, legality);
|
||||
}
|
||||
}
|
||||
|
||||
if (legality.isLegal()) {
|
||||
if (ourLog.isDebugEnabled()) {
|
||||
ourLog.debug("Topological order is: {}", String.join(",", topologicalOrder));
|
||||
}
|
||||
|
||||
LinkedHashSet<IBase> orderedEntries = new LinkedHashSet<>();
|
||||
for (int i = 0; i < topologicalOrder.size(); i++) {
|
||||
BundleEntryParts bep;
|
||||
if (theRequestTypeEnum.equals(RequestTypeEnum.DELETE)) {
|
||||
int index = topologicalOrder.size() - i - 1;
|
||||
bep = resourceIdToBundleEntryMap.get(topologicalOrder.get(index));
|
||||
} else {
|
||||
bep = resourceIdToBundleEntryMap.get(topologicalOrder.get(i));
|
||||
}
|
||||
IBase base = thePartsToIBaseMap.get(bep);
|
||||
orderedEntries.add(base);
|
||||
}
|
||||
|
||||
return orderedEntries;
|
||||
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static void depthFirstSearch(String theResourceId, HashMap<String, Integer> theResourceIdToColor, HashMap<String, List<String>> theAdjList, List<String> theTopologicalOrder, SortLegality theLegality) {
|
||||
|
||||
if (!theLegality.isLegal()) {
|
||||
ourLog.debug("Found a cycle while trying to sort bundle entries. This bundle is not sortable.");
|
||||
return;
|
||||
}
|
||||
|
||||
//We are currently recursing over this node (gray)
|
||||
theResourceIdToColor.put(theResourceId, GRAY);
|
||||
|
||||
for (String neighbourResourceId: theAdjList.getOrDefault(theResourceId, new ArrayList<>())) {
|
||||
if (theResourceIdToColor.get(neighbourResourceId) == WHITE) {
|
||||
depthFirstSearch(neighbourResourceId, theResourceIdToColor, theAdjList, theTopologicalOrder, theLegality);
|
||||
} else if (theResourceIdToColor.get(neighbourResourceId) == GRAY) {
|
||||
theLegality.setLegal(false);
|
||||
return;
|
||||
}
|
||||
}
|
||||
//Mark the node as black
|
||||
theResourceIdToColor.put(theResourceId, BLACK);
|
||||
theTopologicalOrder.add(theResourceId);
|
||||
}
|
||||
|
||||
private static Map<BundleEntryParts, IBase> getPartsToIBaseMap(FhirContext theContext, IBaseBundle theBundle) {
|
||||
RuntimeResourceDefinition bundleDef = theContext.getResourceDefinition(theBundle);
|
||||
BaseRuntimeChildDefinition entryChildDef = bundleDef.getChildByName("entry");
|
||||
List<IBase> entries = entryChildDef.getAccessor().getValues(theBundle);
|
||||
|
||||
BaseRuntimeElementCompositeDefinition<?> entryChildContentsDef = (BaseRuntimeElementCompositeDefinition<?>) entryChildDef.getChildByName("entry");
|
||||
BaseRuntimeChildDefinition fullUrlChildDef = entryChildContentsDef.getChildByName("fullUrl");
|
||||
BaseRuntimeChildDefinition resourceChildDef = entryChildContentsDef.getChildByName("resource");
|
||||
BaseRuntimeChildDefinition requestChildDef = entryChildContentsDef.getChildByName("request");
|
||||
BaseRuntimeElementCompositeDefinition<?> requestChildContentsDef = (BaseRuntimeElementCompositeDefinition<?>) requestChildDef.getChildByName("request");
|
||||
BaseRuntimeChildDefinition requestUrlChildDef = requestChildContentsDef.getChildByName("url");
|
||||
BaseRuntimeChildDefinition requestIfNoneExistChildDef = requestChildContentsDef.getChildByName("ifNoneExist");
|
||||
BaseRuntimeChildDefinition methodChildDef = requestChildContentsDef.getChildByName("method");
|
||||
Map<BundleEntryParts, IBase> map = new HashMap<>();
|
||||
for (IBase nextEntry : entries) {
|
||||
BundleEntryParts parts = getBundleEntryParts(fullUrlChildDef, resourceChildDef, requestChildDef, requestUrlChildDef, requestIfNoneExistChildDef, methodChildDef, nextEntry);
|
||||
/*
|
||||
* All 3 might be null - That's ok because we still want to know the
|
||||
* order in the original bundle.
|
||||
*/
|
||||
map.put(parts, nextEntry);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a bundle, and a consumer, apply the consumer to each entry in the bundle.
|
||||
* @param theContext The FHIR Context
|
||||
* @param theBundle The bundle to have its entries processed.
|
||||
* @param theProcessor a {@link Consumer} which will operate on all the entries of a bundle.
|
||||
*/
|
||||
public static void processEntries(FhirContext theContext, IBaseBundle theBundle, Consumer<ModifiableBundleEntry> theProcessor) {
|
||||
RuntimeResourceDefinition bundleDef = theContext.getResourceDefinition(theBundle);
|
||||
BaseRuntimeChildDefinition entryChildDef = bundleDef.getChildByName("entry");
|
||||
List<IBase> entries = entryChildDef.getAccessor().getValues(theBundle);
|
||||
|
||||
BaseRuntimeElementCompositeDefinition<?> entryChildContentsDef = (BaseRuntimeElementCompositeDefinition<?>) entryChildDef.getChildByName("entry");
|
||||
|
||||
BaseRuntimeChildDefinition fullUrlChildDef = entryChildContentsDef.getChildByName("fullUrl");
|
||||
|
||||
BaseRuntimeChildDefinition resourceChildDef = entryChildContentsDef.getChildByName("resource");
|
||||
BaseRuntimeChildDefinition requestChildDef = entryChildContentsDef.getChildByName("request");
|
||||
BaseRuntimeElementCompositeDefinition<?> requestChildContentsDef = (BaseRuntimeElementCompositeDefinition<?>) requestChildDef.getChildByName("request");
|
||||
|
@ -188,57 +386,62 @@ public class BundleUtil {
|
|||
BaseRuntimeChildDefinition methodChildDef = requestChildContentsDef.getChildByName("method");
|
||||
|
||||
for (IBase nextEntry : entries) {
|
||||
IBaseResource resource = null;
|
||||
String url = null;
|
||||
RequestTypeEnum requestType = null;
|
||||
String conditionalUrl = null;
|
||||
String fullUrl = fullUrlChildDef
|
||||
.getAccessor()
|
||||
.getFirstValueOrNull(nextEntry)
|
||||
.map(t->((IPrimitiveType<?>)t).getValueAsString())
|
||||
.orElse(null);
|
||||
|
||||
for (IBase nextResource : resourceChildDef.getAccessor().getValues(nextEntry)) {
|
||||
resource = (IBaseResource) nextResource;
|
||||
}
|
||||
for (IBase nextRequest : requestChildDef.getAccessor().getValues(nextEntry)) {
|
||||
for (IBase nextUrl : requestUrlChildDef.getAccessor().getValues(nextRequest)) {
|
||||
url = ((IPrimitiveType<?>) nextUrl).getValueAsString();
|
||||
}
|
||||
for (IBase nextMethod : methodChildDef.getAccessor().getValues(nextRequest)) {
|
||||
String methodString = ((IPrimitiveType<?>) nextMethod).getValueAsString();
|
||||
if (isNotBlank(methodString)) {
|
||||
requestType = RequestTypeEnum.valueOf(methodString);
|
||||
}
|
||||
}
|
||||
|
||||
if (requestType != null) {
|
||||
//noinspection EnumSwitchStatementWhichMissesCases
|
||||
switch (requestType) {
|
||||
case PUT:
|
||||
conditionalUrl = url != null && url.contains("?") ? url : null;
|
||||
break;
|
||||
case POST:
|
||||
List<IBase> ifNoneExistReps = requestIfNoneExistChildDef.getAccessor().getValues(nextRequest);
|
||||
if (ifNoneExistReps.size() > 0) {
|
||||
IPrimitiveType<?> ifNoneExist = (IPrimitiveType<?>) ifNoneExistReps.get(0);
|
||||
conditionalUrl = ifNoneExist.getValueAsString();
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
BundleEntryParts parts = getBundleEntryParts(fullUrlChildDef, resourceChildDef, requestChildDef, requestUrlChildDef, requestIfNoneExistChildDef, methodChildDef, nextEntry);
|
||||
/*
|
||||
* All 3 might be null - That's ok because we still want to know the
|
||||
* order in the original bundle.
|
||||
*/
|
||||
BundleEntryMutator mutator = new BundleEntryMutator(theContext, nextEntry, requestChildDef, requestChildContentsDef, entryChildContentsDef);
|
||||
ModifiableBundleEntry entry = new ModifiableBundleEntry(new BundleEntryParts(fullUrl, requestType, url, resource, conditionalUrl), mutator);
|
||||
ModifiableBundleEntry entry = new ModifiableBundleEntry(parts, mutator);
|
||||
theProcessor.accept(entry);
|
||||
}
|
||||
}
|
||||
|
||||
private static BundleEntryParts getBundleEntryParts(BaseRuntimeChildDefinition fullUrlChildDef, BaseRuntimeChildDefinition resourceChildDef, BaseRuntimeChildDefinition requestChildDef, BaseRuntimeChildDefinition requestUrlChildDef, BaseRuntimeChildDefinition requestIfNoneExistChildDef, BaseRuntimeChildDefinition methodChildDef, IBase nextEntry) {
|
||||
IBaseResource resource = null;
|
||||
String url = null;
|
||||
RequestTypeEnum requestType = null;
|
||||
String conditionalUrl = null;
|
||||
String fullUrl = fullUrlChildDef
|
||||
.getAccessor()
|
||||
.getFirstValueOrNull(nextEntry)
|
||||
.map(t->((IPrimitiveType<?>)t).getValueAsString())
|
||||
.orElse(null);
|
||||
|
||||
for (IBase nextResource : resourceChildDef.getAccessor().getValues(nextEntry)) {
|
||||
resource = (IBaseResource) nextResource;
|
||||
}
|
||||
for (IBase nextRequest : requestChildDef.getAccessor().getValues(nextEntry)) {
|
||||
for (IBase nextUrl : requestUrlChildDef.getAccessor().getValues(nextRequest)) {
|
||||
url = ((IPrimitiveType<?>) nextUrl).getValueAsString();
|
||||
}
|
||||
for (IBase nextMethod : methodChildDef.getAccessor().getValues(nextRequest)) {
|
||||
String methodString = ((IPrimitiveType<?>) nextMethod).getValueAsString();
|
||||
if (isNotBlank(methodString)) {
|
||||
requestType = RequestTypeEnum.valueOf(methodString);
|
||||
}
|
||||
}
|
||||
|
||||
if (requestType != null) {
|
||||
//noinspection EnumSwitchStatementWhichMissesCases
|
||||
switch (requestType) {
|
||||
case PUT:
|
||||
conditionalUrl = url != null && url.contains("?") ? url : null;
|
||||
break;
|
||||
case POST:
|
||||
List<IBase> ifNoneExistReps = requestIfNoneExistChildDef.getAccessor().getValues(nextRequest);
|
||||
if (ifNoneExistReps.size() > 0) {
|
||||
IPrimitiveType<?> ifNoneExist = (IPrimitiveType<?>) ifNoneExistReps.get(0);
|
||||
conditionalUrl = ifNoneExist.getValueAsString();
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
BundleEntryParts parts = new BundleEntryParts(fullUrl, requestType, url, resource, conditionalUrl);
|
||||
return parts;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract all of the resources from a given bundle
|
||||
*/
|
||||
|
@ -290,4 +493,20 @@ public class BundleUtil {
|
|||
}
|
||||
return isPatch;
|
||||
}
|
||||
|
||||
private static class SortLegality {
|
||||
private boolean myIsLegal;
|
||||
|
||||
SortLegality() {
|
||||
this.myIsLegal = true;
|
||||
}
|
||||
private void setLegal(boolean theLegal) {
|
||||
myIsLegal = theLegal;
|
||||
}
|
||||
|
||||
public boolean isLegal() {
|
||||
return myIsLegal;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -58,5 +58,4 @@ public class BundleEntryParts {
|
|||
public String getUrl() {
|
||||
return myUrl;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -455,11 +455,12 @@ public class InterceptorServiceTest {
|
|||
params.add(String.class, "D");
|
||||
params.add(String.class, "E");
|
||||
params.add(String.class, "F");
|
||||
params.add(String.class, "G");
|
||||
try {
|
||||
svc.haveAppropriateParams(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, params);
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertEquals("Wrong number of params for pointcut STORAGE_PRECOMMIT_RESOURCE_UPDATED - Wanted ca.uhn.fhir.rest.api.server.RequestDetails,ca.uhn.fhir.rest.api.server.storage.TransactionDetails,ca.uhn.fhir.rest.server.servlet.ServletRequestDetails,org.hl7.fhir.instance.model.api.IBaseResource,org.hl7.fhir.instance.model.api.IBaseResource but found [String, String, String, String, String, String]", e.getMessage());
|
||||
assertEquals("Wrong number of params for pointcut STORAGE_PRECOMMIT_RESOURCE_UPDATED - Wanted ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum,ca.uhn.fhir.rest.api.server.RequestDetails,ca.uhn.fhir.rest.api.server.storage.TransactionDetails,ca.uhn.fhir.rest.server.servlet.ServletRequestDetails,org.hl7.fhir.instance.model.api.IBaseResource,org.hl7.fhir.instance.model.api.IBaseResource but found [String, String, String, String, String, String, String]", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -474,6 +475,7 @@ public class InterceptorServiceTest {
|
|||
params.add((Class) String.class, 3);
|
||||
params.add((Class) String.class, 4);
|
||||
params.add((Class) String.class, 5);
|
||||
params.add((Class) String.class, 6);
|
||||
try {
|
||||
svc.haveAppropriateParams(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, params);
|
||||
fail();
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2526
|
||||
title: "Added Operation section to Conformance Statement."
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2534
|
||||
title: "Add new pointcut `STORAGE_TRANSACTION_PROCESSED`, which fires after all operations in a transaction have executed."
|
|
@ -70,6 +70,7 @@ import ca.uhn.fhir.parser.DataFormatException;
|
|||
import ca.uhn.fhir.parser.IParser;
|
||||
import ca.uhn.fhir.parser.LenientErrorHandler;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum;
|
||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
|
@ -1364,7 +1365,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
.add(IBaseResource.class, theResource)
|
||||
.add(RequestDetails.class, theRequestDetails)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
|
||||
.add(TransactionDetails.class, theTransactionDetails);
|
||||
.add(TransactionDetails.class, theTransactionDetails)
|
||||
.add(InterceptorInvocationTimingEnum.class, theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED));
|
||||
doCallHooks(theTransactionDetails, theRequestDetails, Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, hookParams);
|
||||
}
|
||||
|
||||
|
|
|
@ -54,6 +54,7 @@ import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
|
|||
import ca.uhn.fhir.jpa.search.cache.SearchCacheStatusEnum;
|
||||
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum;
|
||||
import ca.uhn.fhir.rest.api.SearchContainedModeEnum;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
|
||||
|
@ -376,7 +377,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
.add(IBaseResource.class, theResource)
|
||||
.add(RequestDetails.class, theRequest)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequest)
|
||||
.add(TransactionDetails.class, theTransactionDetails);
|
||||
.add(TransactionDetails.class, theTransactionDetails)
|
||||
.add(InterceptorInvocationTimingEnum.class, theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED));
|
||||
doCallHooks(theTransactionDetails, theRequest, Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED, hookParams);
|
||||
}
|
||||
|
||||
|
@ -483,13 +485,11 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
.add(IBaseResource.class, resourceToDelete)
|
||||
.add(RequestDetails.class, theRequestDetails)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
|
||||
.add(TransactionDetails.class, theTransactionDetails);
|
||||
.add(TransactionDetails.class, theTransactionDetails)
|
||||
.add(InterceptorInvocationTimingEnum.class, theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED));
|
||||
|
||||
if (theTransactionDetails.isAcceptingDeferredInterceptorBroadcasts()) {
|
||||
theTransactionDetails.addDeferredInterceptorBroadcast(Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED, hookParams);
|
||||
} else {
|
||||
doCallHooks(theTransactionDetails, theRequestDetails, Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED, hookParams);
|
||||
}
|
||||
|
||||
doCallHooks(theTransactionDetails, theRequestDetails, Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED, hookParams);
|
||||
|
||||
DaoMethodOutcome outcome = toMethodOutcome(theRequestDetails, savedEntity, resourceToDelete).setCreated(true);
|
||||
|
||||
|
@ -597,7 +597,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
.add(IBaseResource.class, resourceToDelete)
|
||||
.add(RequestDetails.class, theRequest)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequest)
|
||||
.add(TransactionDetails.class, transactionDetails);
|
||||
.add(TransactionDetails.class, transactionDetails)
|
||||
.add(InterceptorInvocationTimingEnum.class, transactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED));
|
||||
doCallHooks(transactionDetails, theRequest, Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED, hookParams);
|
||||
}
|
||||
});
|
||||
|
@ -681,16 +682,24 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
myEntityManager.merge(theEntity);
|
||||
|
||||
// Interceptor call: STORAGE_PRECOMMIT_RESOURCE_UPDATED
|
||||
// Interceptor call: STORAGE_PRESTORAGE_RESOURCE_UPDATED
|
||||
IBaseResource newVersion = toResource(theEntity, false);
|
||||
HookParams params = new HookParams()
|
||||
HookParams preStorageParams = new HookParams()
|
||||
.add(IBaseResource.class, oldVersion)
|
||||
.add(IBaseResource.class, newVersion)
|
||||
.add(RequestDetails.class, theRequestDetails)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
|
||||
.add(TransactionDetails.class, theTransactionDetails);
|
||||
myInterceptorBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED, params);
|
||||
myInterceptorBroadcaster.callHooks(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, params);
|
||||
myInterceptorBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED, preStorageParams);
|
||||
|
||||
// Interceptor call: STORAGE_PRECOMMIT_RESOURCE_UPDATED
|
||||
HookParams preCommitParams = new HookParams()
|
||||
.add(IBaseResource.class, oldVersion)
|
||||
.add(IBaseResource.class, newVersion)
|
||||
.add(RequestDetails.class, theRequestDetails)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
|
||||
.add(TransactionDetails.class, theTransactionDetails)
|
||||
.add(InterceptorInvocationTimingEnum.class, theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED));
|
||||
myInterceptorBroadcaster.callHooks(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, preCommitParams);
|
||||
|
||||
}
|
||||
|
||||
|
@ -720,14 +729,23 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
// Interceptor call: STORAGE_PRECOMMIT_RESOURCE_UPDATED
|
||||
IBaseResource newVersion = toResource(theEntity, false);
|
||||
HookParams params = new HookParams()
|
||||
HookParams preStorageParams = new HookParams()
|
||||
.add(IBaseResource.class, oldVersion)
|
||||
.add(IBaseResource.class, newVersion)
|
||||
.add(RequestDetails.class, theRequestDetails)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
|
||||
.add(TransactionDetails.class, theTransactionDetails);
|
||||
myInterceptorBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED, params);
|
||||
myInterceptorBroadcaster.callHooks(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, params);
|
||||
myInterceptorBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED, preStorageParams);
|
||||
|
||||
HookParams preCommitParams = new HookParams()
|
||||
.add(IBaseResource.class, oldVersion)
|
||||
.add(IBaseResource.class, newVersion)
|
||||
.add(RequestDetails.class, theRequestDetails)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
|
||||
.add(TransactionDetails.class, theTransactionDetails)
|
||||
.add(InterceptorInvocationTimingEnum.class, theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED));
|
||||
|
||||
myInterceptorBroadcaster.callHooks(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, preCommitParams);
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -49,6 +49,7 @@ import ca.uhn.fhir.rest.api.PatchTypeEnum;
|
|||
import ca.uhn.fhir.rest.api.PreferReturnEnum;
|
||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.DeferredInterceptorBroadcasts;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import ca.uhn.fhir.rest.param.ParameterUtil;
|
||||
|
@ -1005,25 +1006,16 @@ public abstract class BaseTransactionProcessor {
|
|||
flushSession(theIdToPersistedOutcome);
|
||||
|
||||
theTransactionStopWatch.endCurrentTask();
|
||||
if (conditionalRequestUrls.size() > 0) {
|
||||
theTransactionStopWatch.startTask("Check for conflicts in conditional resources");
|
||||
}
|
||||
|
||||
|
||||
|
||||
/*
|
||||
* Double check we didn't allow any duplicates we shouldn't have
|
||||
*/
|
||||
for (Map.Entry<String, Class<? extends IBaseResource>> nextEntry : conditionalRequestUrls.entrySet()) {
|
||||
String matchUrl = nextEntry.getKey();
|
||||
Class<? extends IBaseResource> resType = nextEntry.getValue();
|
||||
if (isNotBlank(matchUrl)) {
|
||||
Set<ResourcePersistentId> val = myMatchResourceUrlService.processMatchUrl(matchUrl, resType, theRequest);
|
||||
if (val.size() > 1) {
|
||||
throw new InvalidRequestException(
|
||||
"Unable to process " + theActionName + " - Request would cause multiple resources to match URL: \"" + matchUrl + "\". Does transaction request contain duplicates?");
|
||||
}
|
||||
}
|
||||
if (conditionalRequestUrls.size() > 0) {
|
||||
theTransactionStopWatch.startTask("Check for conflicts in conditional resources");
|
||||
}
|
||||
|
||||
validateNoDuplicates(theRequest, theActionName, conditionalRequestUrls);
|
||||
theTransactionStopWatch.endCurrentTask();
|
||||
|
||||
for (IIdType next : theAllIds) {
|
||||
|
@ -1044,6 +1036,19 @@ public abstract class BaseTransactionProcessor {
|
|||
JpaInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, nextPointcut, nextParams);
|
||||
}
|
||||
|
||||
DeferredInterceptorBroadcasts deferredInterceptorBroadcasts = new DeferredInterceptorBroadcasts(deferredBroadcastEvents);
|
||||
HookParams params = new HookParams()
|
||||
.add(RequestDetails.class, theRequest)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequest)
|
||||
.add(DeferredInterceptorBroadcasts.class, deferredInterceptorBroadcasts)
|
||||
.add(TransactionDetails.class, theTransactionDetails)
|
||||
.add(IBaseBundle.class, theResponse);
|
||||
JpaInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_TRANSACTION_PROCESSED, params);
|
||||
|
||||
theTransactionDetails.deferredBroadcastProcessingFinished();
|
||||
|
||||
//finishedCallingDeferredInterceptorBroadcasts
|
||||
|
||||
return entriesToProcess;
|
||||
|
||||
} finally {
|
||||
|
@ -1053,6 +1058,20 @@ public abstract class BaseTransactionProcessor {
|
|||
}
|
||||
}
|
||||
|
||||
private void validateNoDuplicates(RequestDetails theRequest, String theActionName, Map<String, Class<? extends IBaseResource>> conditionalRequestUrls) {
|
||||
for (Map.Entry<String, Class<? extends IBaseResource>> nextEntry : conditionalRequestUrls.entrySet()) {
|
||||
String matchUrl = nextEntry.getKey();
|
||||
Class<? extends IBaseResource> resType = nextEntry.getValue();
|
||||
if (isNotBlank(matchUrl)) {
|
||||
Set<ResourcePersistentId> val = myMatchResourceUrlService.processMatchUrl(matchUrl, resType, theRequest);
|
||||
if (val.size() > 1) {
|
||||
throw new InvalidRequestException(
|
||||
"Unable to process " + theActionName + " - Request would cause multiple resources to match URL: \"" + matchUrl + "\". Does transaction request contain duplicates?");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void flushSession(Map<IIdType, DaoMethodOutcome> theIdToPersistedOutcome);
|
||||
|
||||
private void validateResourcePresent(IBaseResource theResource, Integer theOrder, String theVerb) {
|
||||
|
|
|
@ -0,0 +1,257 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorService;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum;
|
||||
import ca.uhn.fhir.rest.api.server.storage.DeferredInterceptorBroadcasts;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
|
||||
import ca.uhn.test.concurrency.PointcutLatch;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.DiagnosticReport;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.matchesPattern;
|
||||
import static org.hl7.fhir.r4.model.Bundle.HTTPVerb.DELETE;
|
||||
import static org.hl7.fhir.r4.model.Bundle.HTTPVerb.POST;
|
||||
import static org.hl7.fhir.r4.model.Bundle.HTTPVerb.PUT;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
public class TransactionHookTest extends BaseJpaR4SystemTest {
|
||||
|
||||
@AfterEach
|
||||
public void after() {
|
||||
myDaoConfig.setEnforceReferentialIntegrityOnDelete(true);
|
||||
}
|
||||
|
||||
PointcutLatch myPointcutLatch = new PointcutLatch(Pointcut.STORAGE_TRANSACTION_PROCESSED);
|
||||
@Autowired
|
||||
private IInterceptorService myInterceptorService;
|
||||
|
||||
|
||||
@BeforeEach
|
||||
public void beforeEach() {
|
||||
myInterceptorService.registerAnonymousInterceptor(Pointcut.STORAGE_TRANSACTION_PROCESSED, myPointcutLatch);
|
||||
myInterceptorService.registerAnonymousInterceptor(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED, myPointcutLatch);
|
||||
myInterceptorService.registerAnonymousInterceptor(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, myPointcutLatch);
|
||||
myInterceptorService.registerAnonymousInterceptor(Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED, myPointcutLatch);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHookShouldContainParamsForAllCreateUpdateDeleteInvocations() throws InterruptedException {
|
||||
|
||||
String urnReference = "urn:uuid:3bc44de3-069d-442d-829b-f3ef68cae371";
|
||||
|
||||
final Observation obsToDelete = new Observation();
|
||||
obsToDelete.setStatus(Observation.ObservationStatus.FINAL);
|
||||
DaoMethodOutcome daoMethodOutcome = myObservationDao.create(obsToDelete);
|
||||
|
||||
Patient pat1 = new Patient();
|
||||
|
||||
final Observation obs1 = new Observation();
|
||||
obs1.setStatus(Observation.ObservationStatus.FINAL);
|
||||
obs1.setSubject(new Reference(urnReference));
|
||||
|
||||
Bundle b = new Bundle();
|
||||
Bundle.BundleEntryComponent bundleEntryComponent = b.addEntry();
|
||||
|
||||
bundleEntryComponent.setResource(obs1);
|
||||
bundleEntryComponent.getRequest().setMethod(POST).setUrl("Observation");
|
||||
|
||||
Bundle.BundleEntryComponent patientComponent = b.addEntry();
|
||||
patientComponent.setFullUrl(urnReference);
|
||||
patientComponent.setResource(pat1);
|
||||
patientComponent.getRequest().setMethod(POST).setUrl("Patient");
|
||||
|
||||
|
||||
//Delete an observation
|
||||
b.addEntry().getRequest().setMethod(DELETE).setUrl(daoMethodOutcome.getId().toUnqualifiedVersionless().getValue());
|
||||
|
||||
|
||||
myPointcutLatch.setExpectedCount(4);
|
||||
mySystemDao.transaction(mySrd, b);
|
||||
List<HookParams> hookParams = myPointcutLatch.awaitExpected();
|
||||
|
||||
DeferredInterceptorBroadcasts broadcastsParam = hookParams.get(3).get(DeferredInterceptorBroadcasts.class);
|
||||
ListMultimap<Pointcut, HookParams> deferredInterceptorBroadcasts = broadcastsParam.getDeferredInterceptorBroadcasts();
|
||||
assertThat(deferredInterceptorBroadcasts.entries(), hasSize(3));
|
||||
|
||||
List<HookParams> createPointcutInvocations = deferredInterceptorBroadcasts.get(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED);
|
||||
assertThat(createPointcutInvocations, hasSize(2));
|
||||
|
||||
IBaseResource firstCreatedResource = createPointcutInvocations.get(0).get(IBaseResource.class);
|
||||
InterceptorInvocationTimingEnum timing = createPointcutInvocations.get(0).get(InterceptorInvocationTimingEnum.class);
|
||||
assertTrue(firstCreatedResource instanceof Observation);
|
||||
assertTrue(timing.equals(InterceptorInvocationTimingEnum.DEFERRED));
|
||||
|
||||
IBaseResource secondCreatedResource = createPointcutInvocations.get(1).get(IBaseResource.class);
|
||||
timing = createPointcutInvocations.get(1).get(InterceptorInvocationTimingEnum.class);
|
||||
assertTrue(secondCreatedResource instanceof Patient);
|
||||
assertTrue(timing.equals(InterceptorInvocationTimingEnum.DEFERRED));
|
||||
|
||||
assertThat(deferredInterceptorBroadcasts.get(Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED), hasSize(1));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeleteInTransactionShouldSucceedWhenReferencesAreAlsoRemoved() {
|
||||
final Observation obs1 = new Observation();
|
||||
obs1.setStatus(Observation.ObservationStatus.FINAL);
|
||||
IIdType obs1id = myObservationDao.create(obs1).getId().toUnqualifiedVersionless();
|
||||
|
||||
final Observation obs2 = new Observation();
|
||||
obs2.setStatus(Observation.ObservationStatus.FINAL);
|
||||
IIdType obs2id = myObservationDao.create(obs2).getId().toUnqualifiedVersionless();
|
||||
|
||||
final DiagnosticReport rpt = new DiagnosticReport();
|
||||
rpt.addResult(new Reference(obs2id));
|
||||
IIdType rptId = myDiagnosticReportDao.create(rpt).getId().toUnqualifiedVersionless();
|
||||
|
||||
myObservationDao.read(obs1id);
|
||||
myObservationDao.read(obs2id);
|
||||
myDiagnosticReportDao.read(rptId);
|
||||
|
||||
Bundle b = new Bundle();
|
||||
b.addEntry().getRequest().setMethod(DELETE).setUrl(rptId.getValue());
|
||||
b.addEntry().getRequest().setMethod(DELETE).setUrl(obs2id.getValue());
|
||||
|
||||
try {
|
||||
// transaction should succeed because the DiagnosticReport which references obs2 is also deleted
|
||||
mySystemDao.transaction(mySrd, b);
|
||||
} catch (ResourceVersionConflictException e) {
|
||||
fail();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testDeleteWithHas_SourceModifiedToNoLongerIncludeReference() {
|
||||
|
||||
Observation obs1 = new Observation();
|
||||
obs1.setStatus(Observation.ObservationStatus.FINAL);
|
||||
IIdType obs1id = myObservationDao.create(obs1).getId().toUnqualifiedVersionless();
|
||||
|
||||
Observation obs2 = new Observation();
|
||||
obs2.setStatus(Observation.ObservationStatus.FINAL);
|
||||
IIdType obs2id = myObservationDao.create(obs2).getId().toUnqualifiedVersionless();
|
||||
|
||||
DiagnosticReport rpt = new DiagnosticReport();
|
||||
rpt.addIdentifier().setSystem("foo").setValue("IDENTIFIER");
|
||||
rpt.addResult(new Reference(obs2id));
|
||||
IIdType rptId = myDiagnosticReportDao.create(rpt).getId().toUnqualifiedVersionless();
|
||||
|
||||
myObservationDao.read(obs1id);
|
||||
myObservationDao.read(obs2id);
|
||||
|
||||
rpt = new DiagnosticReport();
|
||||
rpt.addIdentifier().setSystem("foo").setValue("IDENTIFIER");
|
||||
|
||||
Bundle b = new Bundle();
|
||||
b.addEntry().getRequest().setMethod(DELETE).setUrl("Observation?_has:DiagnosticReport:result:identifier=foo|IDENTIFIER");
|
||||
b.addEntry().setResource(rpt).getRequest().setMethod(PUT).setUrl("DiagnosticReport?identifier=foo|IDENTIFIER");
|
||||
mySystemDao.transaction(mySrd, b);
|
||||
|
||||
myObservationDao.read(obs1id);
|
||||
try {
|
||||
myObservationDao.read(obs2id);
|
||||
fail();
|
||||
} catch (ResourceGoneException e) {
|
||||
// good
|
||||
}
|
||||
|
||||
rpt = myDiagnosticReportDao.read(rptId);
|
||||
assertThat(rpt.getResult(), empty());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeleteWithId_SourceModifiedToNoLongerIncludeReference() {
|
||||
|
||||
Observation obs1 = new Observation();
|
||||
obs1.setStatus(Observation.ObservationStatus.FINAL);
|
||||
IIdType obs1id = myObservationDao.create(obs1).getId().toUnqualifiedVersionless();
|
||||
|
||||
Observation obs2 = new Observation();
|
||||
obs2.setStatus(Observation.ObservationStatus.FINAL);
|
||||
IIdType obs2id = myObservationDao.create(obs2).getId().toUnqualifiedVersionless();
|
||||
|
||||
DiagnosticReport rpt = new DiagnosticReport();
|
||||
rpt.addResult(new Reference(obs1id));
|
||||
IIdType rptId = myDiagnosticReportDao.create(rpt).getId().toUnqualifiedVersionless();
|
||||
|
||||
myObservationDao.read(obs1id);
|
||||
myObservationDao.read(obs2id);
|
||||
|
||||
rpt = new DiagnosticReport();
|
||||
rpt.addResult(new Reference(obs2id));
|
||||
|
||||
Bundle b = new Bundle();
|
||||
b.addEntry().getRequest().setMethod(DELETE).setUrl(obs1id.getValue());
|
||||
b.addEntry().setResource(rpt).getRequest().setMethod(PUT).setUrl(rptId.getValue());
|
||||
mySystemDao.transaction(mySrd, b);
|
||||
|
||||
myObservationDao.read(obs2id);
|
||||
myDiagnosticReportDao.read(rptId);
|
||||
try {
|
||||
myObservationDao.read(obs1id);
|
||||
fail();
|
||||
} catch (ResourceGoneException e) {
|
||||
// good
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testDeleteWithHas_SourceModifiedToStillIncludeReference() {
|
||||
|
||||
Observation obs1 = new Observation();
|
||||
obs1.setStatus(Observation.ObservationStatus.FINAL);
|
||||
IIdType obs1id = myObservationDao.create(obs1).getId().toUnqualifiedVersionless();
|
||||
|
||||
Observation obs2 = new Observation();
|
||||
obs2.setStatus(Observation.ObservationStatus.FINAL);
|
||||
IIdType obs2id = myObservationDao.create(obs2).getId().toUnqualifiedVersionless();
|
||||
|
||||
DiagnosticReport rpt = new DiagnosticReport();
|
||||
rpt.addIdentifier().setSystem("foo").setValue("IDENTIFIER");
|
||||
rpt.addResult(new Reference(obs2id));
|
||||
IIdType rptId = myDiagnosticReportDao.create(rpt).getId().toUnqualifiedVersionless();
|
||||
|
||||
myObservationDao.read(obs1id);
|
||||
myObservationDao.read(obs2id);
|
||||
|
||||
rpt = new DiagnosticReport();
|
||||
rpt.addIdentifier().setSystem("foo").setValue("IDENTIFIER");
|
||||
rpt.addResult(new Reference(obs2id));
|
||||
|
||||
Bundle b = new Bundle();
|
||||
b.addEntry().getRequest().setMethod(DELETE).setUrl("Observation?_has:DiagnosticReport:result:identifier=foo|IDENTIFIER");
|
||||
b.addEntry().setResource(rpt).getRequest().setMethod(PUT).setUrl("DiagnosticReport?identifier=foo|IDENTIFIER");
|
||||
try {
|
||||
mySystemDao.transaction(mySrd, b);
|
||||
fail();
|
||||
} catch (ResourceVersionConflictException e ) {
|
||||
assertThat(e.getMessage(), matchesPattern("Unable to delete Observation/[0-9]+ because at least one resource has a reference to this resource. First reference found was resource DiagnosticReport/[0-9]+ in path DiagnosticReport.result"));
|
||||
}
|
||||
|
||||
myObservationDao.read(obs1id);
|
||||
myObservationDao.read(obs2id);
|
||||
myDiagnosticReportDao.read(rptId);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,38 @@
|
|||
package ca.uhn.fhir.rest.api.server.storage;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR - Server Framework
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
|
||||
public class DeferredInterceptorBroadcasts {
|
||||
|
||||
ListMultimap<Pointcut, HookParams> myDeferredInterceptorBroadcasts;
|
||||
|
||||
public DeferredInterceptorBroadcasts(ListMultimap<Pointcut, HookParams> theDeferredInterceptorBroadcasts) {
|
||||
myDeferredInterceptorBroadcasts = theDeferredInterceptorBroadcasts;
|
||||
}
|
||||
|
||||
public ListMultimap<Pointcut, HookParams> getDeferredInterceptorBroadcasts() {
|
||||
return myDeferredInterceptorBroadcasts;
|
||||
}
|
||||
}
|
|
@ -22,6 +22,7 @@ package ca.uhn.fhir.rest.api.server.storage;
|
|||
|
||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum;
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
|
@ -32,6 +33,7 @@ import java.util.Collections;
|
|||
import java.util.Date;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
|
@ -53,6 +55,7 @@ public class TransactionDetails {
|
|||
private Map<String, Object> myUserData;
|
||||
private ListMultimap<Pointcut, HookParams> myDeferredInterceptorBroadcasts;
|
||||
private EnumSet<Pointcut> myDeferredInterceptorBroadcastPointcuts;
|
||||
private boolean myIsPointcutDeferred;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
|
@ -189,7 +192,20 @@ public class TransactionDetails {
|
|||
*/
|
||||
public void addDeferredInterceptorBroadcast(Pointcut thePointcut, HookParams theHookParams) {
|
||||
Validate.isTrue(isAcceptingDeferredInterceptorBroadcasts(thePointcut));
|
||||
myIsPointcutDeferred = true;
|
||||
myDeferredInterceptorBroadcasts.put(thePointcut, theHookParams);
|
||||
}
|
||||
|
||||
public InterceptorInvocationTimingEnum getInvocationTiming(Pointcut thePointcut) {
|
||||
if (myDeferredInterceptorBroadcasts == null) {
|
||||
return InterceptorInvocationTimingEnum.ACTIVE;
|
||||
}
|
||||
List<HookParams> hookParams = myDeferredInterceptorBroadcasts.get(thePointcut);
|
||||
return hookParams == null ? InterceptorInvocationTimingEnum.ACTIVE : InterceptorInvocationTimingEnum.DEFERRED;
|
||||
}
|
||||
|
||||
public void deferredBroadcastProcessingFinished() {
|
||||
myIsPointcutDeferred = false;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -160,6 +160,7 @@ public abstract class BaseResourceMessage implements IResourceMessage, IModelJso
|
|||
CREATE,
|
||||
UPDATE,
|
||||
DELETE,
|
||||
MANUALLY_TRIGGERED
|
||||
MANUALLY_TRIGGERED,
|
||||
TRANSACTION
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,6 +39,7 @@ import ca.uhn.fhir.rest.annotation.RequiredParam;
|
|||
import ca.uhn.fhir.rest.annotation.ResourceParam;
|
||||
import ca.uhn.fhir.rest.annotation.Search;
|
||||
import ca.uhn.fhir.rest.annotation.Update;
|
||||
import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum;
|
||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||
import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails;
|
||||
import ca.uhn.fhir.rest.api.server.IPreResourceShowDetails;
|
||||
|
@ -387,25 +388,42 @@ public class HashMapResourceProvider<T extends IBaseResource> implements IResour
|
|||
if (!myIdToHistory.containsKey(theIdPart)) {
|
||||
|
||||
// Interceptor call: STORAGE_PRESTORAGE_RESOURCE_CREATED
|
||||
HookParams params = new HookParams()
|
||||
HookParams preStorageParams = new HookParams()
|
||||
.add(RequestDetails.class, theRequestDetails)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
|
||||
.add(IBaseResource.class, theResource)
|
||||
.add(TransactionDetails.class, theTransactionDetails);
|
||||
interceptorBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED, params);
|
||||
interceptorBroadcaster.callHooks(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED, params);
|
||||
interceptorBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED, preStorageParams);
|
||||
|
||||
// Interceptor call: STORAGE_PRECOMMIT_RESOURCE_CREATED
|
||||
HookParams preCommitParams = new HookParams()
|
||||
.add(RequestDetails.class, theRequestDetails)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
|
||||
.add(IBaseResource.class, theResource)
|
||||
.add(TransactionDetails.class, theTransactionDetails)
|
||||
.add(InterceptorInvocationTimingEnum.class, theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED));
|
||||
interceptorBroadcaster.callHooks(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED, preCommitParams);
|
||||
|
||||
} else {
|
||||
|
||||
// Interceptor call: STORAGE_PRESTORAGE_RESOURCE_UPDATED
|
||||
HookParams params = new HookParams()
|
||||
HookParams preStorageParams = new HookParams()
|
||||
.add(RequestDetails.class, theRequestDetails)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
|
||||
.add(IBaseResource.class, myIdToHistory.get(theIdPart).getFirst())
|
||||
.add(IBaseResource.class, theResource)
|
||||
.add(TransactionDetails.class, theTransactionDetails);
|
||||
interceptorBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED, params);
|
||||
interceptorBroadcaster.callHooks(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, params);
|
||||
interceptorBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED, preStorageParams);
|
||||
|
||||
// Interceptor call: STORAGE_PRECOMMIT_RESOURCE_UPDATED
|
||||
HookParams preCommitParams = new HookParams()
|
||||
.add(RequestDetails.class, theRequestDetails)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
|
||||
.add(IBaseResource.class, myIdToHistory.get(theIdPart).getFirst())
|
||||
.add(IBaseResource.class, theResource)
|
||||
.add(TransactionDetails.class, theTransactionDetails)
|
||||
.add(InterceptorInvocationTimingEnum.class, theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED));
|
||||
interceptorBroadcaster.callHooks(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, preCommitParams);
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,17 +1,35 @@
|
|||
package ca.uhn.fhir.util.bundle;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.util.BundleBuilder;
|
||||
import ca.uhn.fhir.util.BundleUtil;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.ExplanationOfBenefit;
|
||||
import org.hl7.fhir.r4.model.Medication;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Organization;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Quantity;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.junit.jupiter.api.AfterAll;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hl7.fhir.r4.model.Bundle.HTTPVerb.DELETE;
|
||||
import static org.hl7.fhir.r4.model.Bundle.HTTPVerb.GET;
|
||||
import static org.hl7.fhir.r4.model.Bundle.HTTPVerb.POST;
|
||||
import static org.hl7.fhir.r4.model.Bundle.HTTPVerb.PUT;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
public class BundleUtilTest {
|
||||
|
||||
|
@ -87,6 +105,224 @@ public class BundleUtilTest {
|
|||
assertEquals("Observation?foo=bar", bundle.getEntryFirstRep().getRequest().getUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTopologicalTransactionSortForCreates() {
|
||||
|
||||
Bundle b = new Bundle();
|
||||
Bundle.BundleEntryComponent bundleEntryComponent = b.addEntry();
|
||||
final Observation obs1 = new Observation();
|
||||
obs1.setStatus(Observation.ObservationStatus.FINAL);
|
||||
obs1.setSubject(new Reference("Patient/P1"));
|
||||
obs1.setValue(new Quantity(4));
|
||||
obs1.setId("Observation/O1");
|
||||
bundleEntryComponent.setResource(obs1);
|
||||
bundleEntryComponent.getRequest().setMethod(POST).setUrl("Observation");
|
||||
|
||||
bundleEntryComponent = b.addEntry();
|
||||
final Observation obs2 = new Observation();
|
||||
obs2.setStatus(Observation.ObservationStatus.FINAL);
|
||||
obs2.setValue(new Quantity(4));
|
||||
obs2.setId("Observation/O2");
|
||||
bundleEntryComponent.setResource(obs2);
|
||||
bundleEntryComponent.getRequest().setMethod(POST).setUrl("Observation");
|
||||
|
||||
Bundle.BundleEntryComponent patientComponent = b.addEntry();
|
||||
Patient pat1 = new Patient();
|
||||
pat1.setId("Patient/P1");
|
||||
pat1.setManagingOrganization(new Reference("Organization/Org1"));
|
||||
patientComponent.setResource(pat1);
|
||||
patientComponent.getRequest().setMethod(POST).setUrl("Patient");
|
||||
|
||||
Bundle.BundleEntryComponent organizationComponent = b.addEntry();
|
||||
Organization org1 = new Organization();
|
||||
org1.setId("Organization/Org1");
|
||||
organizationComponent.setResource(org1);
|
||||
organizationComponent.getRequest().setMethod(POST).setUrl("Patient");
|
||||
|
||||
BundleUtil.sortEntriesIntoProcessingOrder(ourCtx, b);
|
||||
|
||||
assertThat(b.getEntry(), hasSize(4));
|
||||
|
||||
List<Bundle.BundleEntryComponent> entry = b.getEntry();
|
||||
int observationIndex = getIndexOfEntryWithId("Observation/O1", b);
|
||||
int patientIndex = getIndexOfEntryWithId("Patient/P1", b);
|
||||
int organizationIndex = getIndexOfEntryWithId("Organization/Org1", b);
|
||||
|
||||
assertTrue(organizationIndex < patientIndex);
|
||||
assertTrue(patientIndex < observationIndex);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTransactionSorterFailsOnCyclicReference() {
|
||||
Bundle b = new Bundle();
|
||||
Bundle.BundleEntryComponent bundleEntryComponent = b.addEntry();
|
||||
final Observation obs1 = new Observation();
|
||||
obs1.setStatus(Observation.ObservationStatus.FINAL);
|
||||
obs1.setSubject(new Reference("Patient/P1"));
|
||||
obs1.setValue(new Quantity(4));
|
||||
obs1.setId("Observation/O1/_history/1");
|
||||
obs1.setHasMember(Collections.singletonList(new Reference("Observation/O2")));
|
||||
bundleEntryComponent.setResource(obs1);
|
||||
bundleEntryComponent.getRequest().setMethod(POST).setUrl("Observation");
|
||||
|
||||
bundleEntryComponent = b.addEntry();
|
||||
final Observation obs2 = new Observation();
|
||||
obs2.setStatus(Observation.ObservationStatus.FINAL);
|
||||
obs2.setValue(new Quantity(4));
|
||||
obs2.setId("Observation/O2/_history/1");
|
||||
//We use a random history version here to ensure cycles are counted without versions.
|
||||
obs2.setHasMember(Collections.singletonList(new Reference("Observation/O1/_history/300")));
|
||||
bundleEntryComponent.setResource(obs2);
|
||||
bundleEntryComponent.getRequest().setMethod(POST).setUrl("Observation");
|
||||
try {
|
||||
BundleUtil.sortEntriesIntoProcessingOrder(ourCtx, b);
|
||||
fail();
|
||||
} catch (IllegalStateException e ) {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTransactionSortingReturnsOperationsInCorrectOrder() {
|
||||
|
||||
Bundle b = new Bundle();
|
||||
|
||||
//UPDATE patient
|
||||
Bundle.BundleEntryComponent patientUpdateComponent= b.addEntry();
|
||||
final Patient p2 = new Patient();
|
||||
p2.setId("Patient/P2");
|
||||
p2.getNameFirstRep().setFamily("Test!");
|
||||
patientUpdateComponent.setResource(p2);
|
||||
patientUpdateComponent.getRequest().setMethod(PUT).setUrl("Patient/P2");
|
||||
|
||||
//CREATE observation
|
||||
Bundle.BundleEntryComponent bundleEntryComponent = b.addEntry();
|
||||
final Observation obs1 = new Observation();
|
||||
obs1.setStatus(Observation.ObservationStatus.FINAL);
|
||||
obs1.setSubject(new Reference("Patient/P1"));
|
||||
obs1.setValue(new Quantity(4));
|
||||
obs1.setId("Observation/O1");
|
||||
bundleEntryComponent.setResource(obs1);
|
||||
bundleEntryComponent.getRequest().setMethod(POST).setUrl("Observation");
|
||||
|
||||
//DELETE medication
|
||||
Bundle.BundleEntryComponent medicationComponent= b.addEntry();
|
||||
final Medication med1 = new Medication();
|
||||
med1.setId("Medication/M1");
|
||||
medicationComponent.setResource(med1);
|
||||
medicationComponent.getRequest().setMethod(DELETE).setUrl("Medication");
|
||||
|
||||
//GET medication
|
||||
Bundle.BundleEntryComponent searchComponent = b.addEntry();
|
||||
searchComponent.getRequest().setMethod(GET).setUrl("Medication?code=123");
|
||||
|
||||
//CREATE patient
|
||||
Bundle.BundleEntryComponent patientComponent = b.addEntry();
|
||||
Patient pat1 = new Patient();
|
||||
pat1.setId("Patient/P1");
|
||||
pat1.setManagingOrganization(new Reference("Organization/Org1"));
|
||||
patientComponent.setResource(pat1);
|
||||
patientComponent.getRequest().setMethod(POST).setUrl("Patient");
|
||||
|
||||
//CREATE organization
|
||||
Bundle.BundleEntryComponent organizationComponent = b.addEntry();
|
||||
Organization org1 = new Organization();
|
||||
org1.setId("Organization/Org1");
|
||||
organizationComponent.setResource(org1);
|
||||
organizationComponent.getRequest().setMethod(POST).setUrl("Organization");
|
||||
|
||||
//DELETE ExplanationOfBenefit
|
||||
Bundle.BundleEntryComponent explanationOfBenefitComponent= b.addEntry();
|
||||
final ExplanationOfBenefit eob1 = new ExplanationOfBenefit();
|
||||
eob1.setId("ExplanationOfBenefit/E1");
|
||||
explanationOfBenefitComponent.setResource(eob1);
|
||||
explanationOfBenefitComponent.getRequest().setMethod(DELETE).setUrl("ExplanationOfBenefit");
|
||||
|
||||
BundleUtil.sortEntriesIntoProcessingOrder(ourCtx, b);
|
||||
|
||||
assertThat(b.getEntry(), hasSize(7));
|
||||
|
||||
List<Bundle.BundleEntryComponent> entry = b.getEntry();
|
||||
|
||||
// DELETEs first
|
||||
assertThat(entry.get(0).getRequest().getMethod(), is(equalTo(DELETE)));
|
||||
assertThat(entry.get(1).getRequest().getMethod(), is(equalTo(DELETE)));
|
||||
// Then POSTs
|
||||
assertThat(entry.get(2).getRequest().getMethod(), is(equalTo(POST)));
|
||||
assertThat(entry.get(3).getRequest().getMethod(), is(equalTo(POST)));
|
||||
assertThat(entry.get(4).getRequest().getMethod(), is(equalTo(POST)));
|
||||
// Then PUTs
|
||||
assertThat(entry.get(5).getRequest().getMethod(), is(equalTo(PUT)));
|
||||
// Then GETs
|
||||
assertThat(entry.get(6).getRequest().getMethod(), is(equalTo(GET)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBundleSortsCanHandlesDeletesThatContainNoResources() {
|
||||
Patient p = new Patient();
|
||||
p.setId("Patient/123");
|
||||
BundleBuilder builder = new BundleBuilder(ourCtx);
|
||||
builder.addTransactionDeleteEntry(p);
|
||||
BundleUtil.sortEntriesIntoProcessingOrder(ourCtx, builder.getBundle());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTransactionSorterReturnsDeletesInCorrectProcessingOrder() {
|
||||
Bundle b = new Bundle();
|
||||
Bundle.BundleEntryComponent bundleEntryComponent = b.addEntry();
|
||||
final Observation obs1 = new Observation();
|
||||
obs1.setStatus(Observation.ObservationStatus.FINAL);
|
||||
obs1.setSubject(new Reference("Patient/P1"));
|
||||
obs1.setValue(new Quantity(4));
|
||||
obs1.setId("Observation/O1");
|
||||
bundleEntryComponent.setResource(obs1);
|
||||
bundleEntryComponent.getRequest().setMethod(DELETE).setUrl("Observation");
|
||||
|
||||
bundleEntryComponent = b.addEntry();
|
||||
final Observation obs2 = new Observation();
|
||||
obs2.setStatus(Observation.ObservationStatus.FINAL);
|
||||
obs2.setValue(new Quantity(4));
|
||||
obs2.setId("Observation/O2");
|
||||
bundleEntryComponent.setResource(obs2);
|
||||
bundleEntryComponent.getRequest().setMethod(DELETE).setUrl("Observation");
|
||||
|
||||
Bundle.BundleEntryComponent patientComponent = b.addEntry();
|
||||
Patient pat1 = new Patient();
|
||||
pat1.setId("Patient/P1");
|
||||
pat1.setManagingOrganization(new Reference("Organization/Org1"));
|
||||
patientComponent.setResource(pat1);
|
||||
patientComponent.getRequest().setMethod(DELETE).setUrl("Patient");
|
||||
|
||||
Bundle.BundleEntryComponent organizationComponent = b.addEntry();
|
||||
Organization org1 = new Organization();
|
||||
org1.setId("Organization/Org1");
|
||||
organizationComponent.setResource(org1);
|
||||
organizationComponent.getRequest().setMethod(DELETE).setUrl("Organization");
|
||||
|
||||
BundleUtil.sortEntriesIntoProcessingOrder(ourCtx, b);
|
||||
|
||||
assertThat(b.getEntry(), hasSize(4));
|
||||
|
||||
int observationIndex = getIndexOfEntryWithId("Observation/O1", b);
|
||||
int patientIndex = getIndexOfEntryWithId("Patient/P1", b);
|
||||
int organizationIndex = getIndexOfEntryWithId("Organization/Org1", b);
|
||||
|
||||
assertTrue(patientIndex < organizationIndex);
|
||||
assertTrue(observationIndex < patientIndex);
|
||||
}
|
||||
|
||||
private int getIndexOfEntryWithId(String theResourceId, Bundle theBundle) {
|
||||
List<Bundle.BundleEntryComponent> entries = theBundle.getEntry();
|
||||
for (int i = 0; i < entries.size(); i++) {
|
||||
String id = entries.get(i).getResource().getIdElement().toUnqualifiedVersionless().toString();
|
||||
if (id.equals(theResourceId)) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
fail("Didn't find resource with ID " + theResourceId);
|
||||
return -1;
|
||||
}
|
||||
|
||||
@AfterAll
|
||||
public static void afterClassClearContext() {
|
||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||
|
|
|
@ -284,7 +284,6 @@
|
|||
<artifactId>spring-core</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.jena</groupId>
|
||||
<artifactId>jena-core</artifactId>
|
||||
|
@ -295,6 +294,12 @@
|
|||
<artifactId>jena-arq</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-jpaserver-model</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@ package ca.uhn.fhir.rest.server;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.model.api.Include;
|
||||
import ca.uhn.fhir.model.api.annotation.Description;
|
||||
import ca.uhn.fhir.model.api.annotation.ResourceDef;
|
||||
|
@ -46,7 +47,6 @@ import ca.uhn.fhir.rest.server.provider.HashMapResourceProvider;
|
|||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import ca.uhn.fhir.rest.server.provider.ServerCapabilityStatementProvider;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.test.utilities.server.MockServletUtil;
|
||||
import ca.uhn.fhir.util.HapiExtensions;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import ca.uhn.fhir.validation.FhirValidator;
|
||||
|
@ -71,6 +71,7 @@ import org.hl7.fhir.r4.model.DiagnosticReport;
|
|||
import org.hl7.fhir.r4.model.Encounter;
|
||||
import org.hl7.fhir.r4.model.Enumerations.PublicationStatus;
|
||||
import org.hl7.fhir.r4.model.Extension;
|
||||
import org.hl7.fhir.r4.model.Group;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.OperationDefinition;
|
||||
|
@ -85,14 +86,17 @@ import org.junit.jupiter.api.Test;
|
|||
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.fhir.test.utilities.server.MockServletUtil.createServletConfig;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
@ -118,6 +122,19 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
private final FhirContext myCtx = FhirContext.forCached(FhirVersionEnum.R4);
|
||||
private FhirValidator myValidator;
|
||||
|
||||
private static Set<String> toStrings(Collection<? extends IPrimitiveType> theType) {
|
||||
HashSet<String> retVal = new HashSet<String>();
|
||||
for (IPrimitiveType next : theType) {
|
||||
retVal.add(next.getValueAsString());
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@AfterAll
|
||||
public static void afterClassClearContext() {
|
||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||
}
|
||||
|
||||
@BeforeEach
|
||||
public void before() {
|
||||
myValidator = myCtx.newValidator();
|
||||
|
@ -154,7 +171,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs);
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
CapabilityStatement cs = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs));
|
||||
List<String> formats = cs
|
||||
|
@ -172,7 +189,6 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testConditionalOperations() throws Exception {
|
||||
|
||||
|
@ -182,7 +198,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs);
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs));
|
||||
String conf = myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(conformance);
|
||||
|
@ -214,7 +230,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs);
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs));
|
||||
validate(conformance);
|
||||
|
@ -243,7 +259,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
};
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
OperationDefinition opDef = (OperationDefinition) sc.readOperationDefinition(new IdType("OperationDefinition/Patient-i-everything"), createRequestDetails(rs));
|
||||
validate(opDef);
|
||||
|
@ -264,7 +280,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs);
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs));
|
||||
String conf = validate(conformance);
|
||||
|
@ -282,7 +298,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs);
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
boolean found = false;
|
||||
Collection<ResourceBinding> resourceBindings = rs.getResourceBindings();
|
||||
|
@ -314,7 +330,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs);
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs));
|
||||
validate(conformance);
|
||||
|
@ -327,7 +343,6 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
assertNull(res.getConditionalUpdateElement().getValue());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testOperationParameterDocumentation() throws Exception {
|
||||
RestfulServer rs = new RestfulServer(myCtx);
|
||||
|
@ -337,7 +352,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs);
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs));
|
||||
validate(conformance);
|
||||
|
@ -362,7 +377,6 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
assertEquals("2002", exampleExtensions.get(1).getValueAsPrimitive().getValueAsString());
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* See #379
|
||||
*/
|
||||
|
@ -375,7 +389,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs);
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs));
|
||||
|
||||
|
@ -431,7 +445,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs);
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs));
|
||||
|
||||
|
@ -451,7 +465,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs);
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs));
|
||||
validate(conformance);
|
||||
|
@ -480,7 +494,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs);
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs));
|
||||
String conf = validate(conformance);
|
||||
|
@ -498,7 +512,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs);
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs));
|
||||
String conf = myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(conformance);
|
||||
|
@ -518,7 +532,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs);
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
boolean found = false;
|
||||
Collection<ResourceBinding> resourceBindings = rs.getResourceBindings();
|
||||
|
@ -545,7 +559,6 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testFormatIncludesSpecialNonMediaTypeFormats() throws ServletException {
|
||||
RestfulServer rs = new RestfulServer(myCtx);
|
||||
|
@ -554,7 +567,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs);
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
CapabilityStatement serverConformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs));
|
||||
|
||||
List<String> formatCodes = serverConformance.getFormat().stream().map(c -> c.getCode()).collect(Collectors.toList());
|
||||
|
@ -577,7 +590,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs);
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
boolean found = false;
|
||||
Collection<ResourceBinding> resourceBindings = rs.getResourceBindings();
|
||||
|
@ -609,7 +622,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs);
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
boolean found = false;
|
||||
Collection<ResourceBinding> resourceBindings = rs.getResourceBindings();
|
||||
|
@ -649,7 +662,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
rsNoType.registerProvider(new SearchProviderWithListNoType());
|
||||
ServerCapabilityStatementProvider scNoType = new ServerCapabilityStatementProvider(rsNoType);
|
||||
rsNoType.setServerConformanceProvider(scNoType);
|
||||
rsNoType.init(MockServletUtil.createServletConfig());
|
||||
rsNoType.init(createServletConfig());
|
||||
|
||||
CapabilityStatement conformance = (CapabilityStatement) scNoType.getServerConformance(createHttpServletRequest(), createRequestDetails(rsNoType));
|
||||
conformance.setId("");
|
||||
|
@ -666,7 +679,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
rsWithType.registerProvider(new SearchProviderWithListWithType());
|
||||
ServerCapabilityStatementProvider scWithType = new ServerCapabilityStatementProvider(rsWithType);
|
||||
rsWithType.setServerConformanceProvider(scWithType);
|
||||
rsWithType.init(MockServletUtil.createServletConfig());
|
||||
rsWithType.init(createServletConfig());
|
||||
|
||||
CapabilityStatement conformanceWithType = (CapabilityStatement) scWithType.getServerConformance(createHttpServletRequest(), createRequestDetails(rsWithType));
|
||||
conformanceWithType.setId("");
|
||||
|
@ -685,7 +698,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs);
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs));
|
||||
String conf = validate(conformance);
|
||||
|
@ -702,7 +715,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs);
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs));
|
||||
String conf = validate(conformance);
|
||||
|
@ -737,7 +750,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
};
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
CapabilityStatement opDef = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs));
|
||||
|
||||
|
@ -778,7 +791,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
};
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
CapabilityStatement opDef = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs));
|
||||
|
||||
|
@ -818,7 +831,6 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
assertTrue(operationDefinition.getInstance());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testPlainProviderGlobalSystemAndInstanceOperations() throws Exception {
|
||||
|
||||
|
@ -871,7 +883,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
};
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
CapabilityStatement opDef = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs));
|
||||
|
||||
|
@ -949,7 +961,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
};
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
CapabilityStatement opDef = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs));
|
||||
|
||||
|
@ -1013,7 +1025,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
};
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
CapabilityStatement opDef = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs));
|
||||
|
||||
|
@ -1034,7 +1046,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs);
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs));
|
||||
validate(conformance);
|
||||
|
@ -1079,7 +1091,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs);
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs));
|
||||
validate(conformance);
|
||||
|
@ -1121,7 +1133,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs);
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs));
|
||||
|
||||
|
@ -1148,7 +1160,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs);
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs));
|
||||
ourLog.info(myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(conformance));
|
||||
|
@ -1198,7 +1210,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
sc.setRestResourceRevIncludesEnabled(true);
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs));
|
||||
ourLog.info(myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(conformance));
|
||||
|
@ -1235,7 +1247,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
}
|
||||
|
||||
@Search
|
||||
public List<Observation> search(@OptionalParam(name = "subject") ReferenceParam theSubject) {
|
||||
public List<Observation> search(@OptionalParam(name = "subject") ReferenceParam theSubject) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
|
@ -1248,7 +1260,7 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
sc.setRestResourceRevIncludesEnabled(true);
|
||||
rs.setServerConformanceProvider(sc);
|
||||
|
||||
rs.init(MockServletUtil.createServletConfig());
|
||||
rs.init(createServletConfig());
|
||||
|
||||
CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs));
|
||||
ourLog.info(myCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(conformance));
|
||||
|
@ -1260,6 +1272,27 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
assertThat(toStrings(patientResource.getSearchRevInclude()), containsInAnyOrder("Observation:subject"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBulkDataExport() throws ServletException {
|
||||
RestfulServer rs = new RestfulServer(myCtx);
|
||||
rs.setResourceProviders(new BulkDataExportProvider());
|
||||
rs.setServerAddressStrategy(new HardcodedServerAddressStrategy("http://localhost/baseR4"));
|
||||
|
||||
ServerCapabilityStatementProvider sc = new ServerCapabilityStatementProvider(rs);
|
||||
rs.setServerConformanceProvider(sc);
|
||||
rs.init(createServletConfig());
|
||||
|
||||
CapabilityStatement conformance = (CapabilityStatement) sc.getServerConformance(createHttpServletRequest(), createRequestDetails(rs));
|
||||
ourLog.info(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(conformance));
|
||||
|
||||
List<CapabilityStatementRestResourceComponent> resources = conformance.getRestFirstRep().getResource();
|
||||
CapabilityStatementRestResourceComponent groupResource = resources.stream()
|
||||
.filter(resource -> "Group".equals(resource.getType()))
|
||||
.findFirst().get();
|
||||
|
||||
assertThat(toOperationNames(groupResource.getOperation()),containsInAnyOrder("export", "export-poll-status"));
|
||||
}
|
||||
|
||||
private List<String> toOperationIdParts(List<CapabilityStatementRestResourceOperationComponent> theOperation) {
|
||||
ArrayList<String> retVal = Lists.newArrayList();
|
||||
for (CapabilityStatementRestResourceOperationComponent next : theOperation) {
|
||||
|
@ -1304,6 +1337,52 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
return myCtx.newXmlParser().setPrettyPrint(false).encodeResourceToString(theResource);
|
||||
}
|
||||
|
||||
public static class BulkDataExportProvider implements IResourceProvider {
|
||||
@Operation(name = JpaConstants.OPERATION_EXPORT, global = false /* set to true once we can handle this */, manualResponse = true, idempotent = true)
|
||||
public void export(
|
||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theOutputFormat,
|
||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theType,
|
||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType<Date> theSince,
|
||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theTypeFilter,
|
||||
ServletRequestDetails theRequestDetails
|
||||
) {
|
||||
}
|
||||
|
||||
@Operation(name = JpaConstants.OPERATION_EXPORT, manualResponse = true, idempotent = true, typeName = "Group")
|
||||
public void groupExport(
|
||||
@IdParam IIdType theIdParam,
|
||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theOutputFormat,
|
||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theType,
|
||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType<Date> theSince,
|
||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theTypeFilter,
|
||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_MDM, min = 0, max = 1, typeName = "boolean") IPrimitiveType<Boolean> theMdm,
|
||||
ServletRequestDetails theRequestDetails
|
||||
) {
|
||||
}
|
||||
|
||||
@Operation(name = JpaConstants.OPERATION_EXPORT, manualResponse = true, idempotent = true, typeName = "Patient")
|
||||
public void patientExport(
|
||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theOutputFormat,
|
||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theType,
|
||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType<Date> theSince,
|
||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theTypeFilter,
|
||||
ServletRequestDetails theRequestDetails
|
||||
) {
|
||||
}
|
||||
|
||||
@Operation(name = JpaConstants.OPERATION_EXPORT_POLL_STATUS, manualResponse = true, idempotent = true)
|
||||
public void exportPollStatus(
|
||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID, typeName = "string", min = 0, max = 1) IPrimitiveType<String> theJobId,
|
||||
ServletRequestDetails theRequestDetails
|
||||
) throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<? extends IBaseResource> getResourceType() {
|
||||
return Group.class;
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public static class ConditionalProvider implements IResourceProvider {
|
||||
|
||||
|
@ -1400,7 +1479,6 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
public IBundleProvider validate(HttpServletRequest theServletRequest, @IdParam IdType theId, @ResourceParam Patient thePatient) {
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
|
@ -1658,17 +1736,4 @@ public class ServerCapabilityStatementProviderR4Test {
|
|||
public static class PatientTripleSub extends PatientSubSub {
|
||||
}
|
||||
|
||||
private static Set<String> toStrings(Collection<? extends IPrimitiveType> theType) {
|
||||
HashSet<String> retVal = new HashSet<String>();
|
||||
for (IPrimitiveType next : theType) {
|
||||
retVal.add(next.getValueAsString());
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@AfterAll
|
||||
public static void afterClassClearContext() {
|
||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue