Enable optional cascading deletes on JPA server (#1363)
* Start working on cascading deleets * WOrk on cascading deletes * Work on cascading deletes * Enable cascading deletes * Cascade deletes almost working * FIx test * Test fix
This commit is contained in:
parent
fa80223d57
commit
1fd99dac63
|
@ -241,7 +241,7 @@ public enum Pointcut {
|
||||||
* ca.uhn.fhir.rest.api.RestOperationTypeEnum - The type of operation that the FHIR server has determined that the client is trying to invoke
|
* ca.uhn.fhir.rest.api.RestOperationTypeEnum - The type of operation that the FHIR server has determined that the client is trying to invoke
|
||||||
* </li>
|
* </li>
|
||||||
* <li>
|
* <li>
|
||||||
* ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails - This parameter is provided for legacy reasons only and will be removed in the fututre. Do not use.
|
* ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails - This parameter is provided for legacy reasons only and will be removed in the future. Do not use.
|
||||||
* </li>
|
* </li>
|
||||||
* </ul>
|
* </ul>
|
||||||
* </p>
|
* </p>
|
||||||
|
@ -373,6 +373,43 @@ public enum Pointcut {
|
||||||
"javax.servlet.http.HttpServletResponse"
|
"javax.servlet.http.HttpServletResponse"
|
||||||
),
|
),
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* <b>Server Hook:</b>
|
||||||
|
* This method is called when an OperationOutcome is being returned in response to a failure.
|
||||||
|
* Hook methods may use this hook to modify the OperationOutcome being returned.
|
||||||
|
* <p>
|
||||||
|
* Hooks may accept the following parameters:
|
||||||
|
* <ul>
|
||||||
|
* <li>
|
||||||
|
* ca.uhn.fhir.rest.api.server.RequestDetails - A bean containing details about the request that is about to be processed, including details such as the
|
||||||
|
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
|
||||||
|
* pulled out of the servlet request. Note that the bean
|
||||||
|
* properties are not all guaranteed to be populated, depending on how early during processing the
|
||||||
|
* exception occurred.
|
||||||
|
* </li>
|
||||||
|
* <li>
|
||||||
|
* ca.uhn.fhir.rest.server.servlet.ServletRequestDetails - A bean containing details about the request that is about to be processed, including details such as the
|
||||||
|
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
|
||||||
|
* pulled out of the servlet request. This parameter is identical to the RequestDetails parameter above but will
|
||||||
|
* only be populated when operating in a RestfulServer implementation. It is provided as a convenience.
|
||||||
|
* </li>
|
||||||
|
* <li>
|
||||||
|
* org.hl7.fhir.instance.model.api.IBaseOperationOutcome - The OperationOutcome resource that will be
|
||||||
|
* returned.
|
||||||
|
* </ul>
|
||||||
|
* <p>
|
||||||
|
* Hook methods must return <code>void</code>
|
||||||
|
* </p>
|
||||||
|
*/
|
||||||
|
SERVER_OUTGOING_FAILURE_OPERATIONOUTCOME(
|
||||||
|
void.class,
|
||||||
|
"ca.uhn.fhir.rest.api.server.RequestDetails",
|
||||||
|
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails",
|
||||||
|
"org.hl7.fhir.instance.model.api.IBaseOperationOutcome"
|
||||||
|
),
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* <b>Server Hook:</b>
|
* <b>Server Hook:</b>
|
||||||
* This method is called after all processing is completed for a request, but only if the
|
* This method is called after all processing is completed for a request, but only if the
|
||||||
|
@ -453,7 +490,6 @@ public enum Pointcut {
|
||||||
*/
|
*/
|
||||||
SUBSCRIPTION_RESOURCE_MATCHED(boolean.class, "ca.uhn.fhir.jpa.subscription.module.CanonicalSubscription", "ca.uhn.fhir.jpa.subscription.module.subscriber.ResourceDeliveryMessage", "ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult"),
|
SUBSCRIPTION_RESOURCE_MATCHED(boolean.class, "ca.uhn.fhir.jpa.subscription.module.CanonicalSubscription", "ca.uhn.fhir.jpa.subscription.module.subscriber.ResourceDeliveryMessage", "ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult"),
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Invoked whenever a persisted resource was checked against all active subscriptions, and did not
|
* Invoked whenever a persisted resource was checked against all active subscriptions, and did not
|
||||||
* match any.
|
* match any.
|
||||||
|
@ -506,6 +542,7 @@ public enum Pointcut {
|
||||||
*/
|
*/
|
||||||
SUBSCRIPTION_AFTER_DELIVERY(void.class, "ca.uhn.fhir.jpa.subscription.module.CanonicalSubscription", "ca.uhn.fhir.jpa.subscription.module.subscriber.ResourceDeliveryMessage"),
|
SUBSCRIPTION_AFTER_DELIVERY(void.class, "ca.uhn.fhir.jpa.subscription.module.CanonicalSubscription", "ca.uhn.fhir.jpa.subscription.module.subscriber.ResourceDeliveryMessage"),
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Invoked immediately after the attempted delivery of a subscription, if the delivery
|
* Invoked immediately after the attempted delivery of a subscription, if the delivery
|
||||||
* failed.
|
* failed.
|
||||||
|
@ -565,7 +602,6 @@ public enum Pointcut {
|
||||||
*/
|
*/
|
||||||
SUBSCRIPTION_BEFORE_REST_HOOK_DELIVERY(boolean.class, "ca.uhn.fhir.jpa.subscription.module.CanonicalSubscription", "ca.uhn.fhir.jpa.subscription.module.subscriber.ResourceDeliveryMessage"),
|
SUBSCRIPTION_BEFORE_REST_HOOK_DELIVERY(boolean.class, "ca.uhn.fhir.jpa.subscription.module.CanonicalSubscription", "ca.uhn.fhir.jpa.subscription.module.subscriber.ResourceDeliveryMessage"),
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Invoked whenever a persisted resource (a resource that has just been stored in the
|
* Invoked whenever a persisted resource (a resource that has just been stored in the
|
||||||
* database via a create/update/patch/etc.) is about to be checked for whether any subscriptions
|
* database via a create/update/patch/etc.) is about to be checked for whether any subscriptions
|
||||||
|
@ -584,6 +620,7 @@ public enum Pointcut {
|
||||||
*/
|
*/
|
||||||
SUBSCRIPTION_BEFORE_PERSISTED_RESOURCE_CHECKED(boolean.class, "ca.uhn.fhir.jpa.subscription.module.ResourceModifiedMessage"),
|
SUBSCRIPTION_BEFORE_PERSISTED_RESOURCE_CHECKED(boolean.class, "ca.uhn.fhir.jpa.subscription.module.ResourceModifiedMessage"),
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Invoked whenever a persisted resource (a resource that has just been stored in the
|
* Invoked whenever a persisted resource (a resource that has just been stored in the
|
||||||
* database via a create/update/patch/etc.) has been checked for whether any subscriptions
|
* database via a create/update/patch/etc.) has been checked for whether any subscriptions
|
||||||
|
@ -600,6 +637,7 @@ public enum Pointcut {
|
||||||
*/
|
*/
|
||||||
SUBSCRIPTION_AFTER_PERSISTED_RESOURCE_CHECKED(void.class, "ca.uhn.fhir.jpa.subscription.module.ResourceModifiedMessage"),
|
SUBSCRIPTION_AFTER_PERSISTED_RESOURCE_CHECKED(void.class, "ca.uhn.fhir.jpa.subscription.module.ResourceModifiedMessage"),
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Invoked immediately after an active subscription is "registered". In HAPI FHIR, when
|
* Invoked immediately after an active subscription is "registered". In HAPI FHIR, when
|
||||||
* a subscription
|
* a subscription
|
||||||
|
@ -618,6 +656,48 @@ public enum Pointcut {
|
||||||
*/
|
*/
|
||||||
SUBSCRIPTION_AFTER_ACTIVE_SUBSCRIPTION_REGISTERED(void.class, "ca.uhn.fhir.jpa.subscription.module.CanonicalSubscription"),
|
SUBSCRIPTION_AFTER_ACTIVE_SUBSCRIPTION_REGISTERED(void.class, "ca.uhn.fhir.jpa.subscription.module.CanonicalSubscription"),
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Invoked when one or more resources may are about to be cascading a delete.
|
||||||
|
* <p>
|
||||||
|
* Hooks may accept the following parameters:
|
||||||
|
* </p>
|
||||||
|
* <ul>
|
||||||
|
* <li>
|
||||||
|
* ca.uhn.fhir.rest.api.server.RequestDetails - A bean containing details about the request that is about to be processed, including details such as the
|
||||||
|
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
|
||||||
|
* pulled out of the servlet request. Note that the bean
|
||||||
|
* properties are not all guaranteed to be populated, depending on how early during processing the
|
||||||
|
* exception occurred. <b>Note that this parameter may be null in contexts where the request is not
|
||||||
|
* known, such as while processing searches</b>
|
||||||
|
* </li>
|
||||||
|
* <li>
|
||||||
|
* ca.uhn.fhir.rest.server.servlet.ServletRequestDetails - A bean containing details about the request that is about to be processed, including details such as the
|
||||||
|
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
|
||||||
|
* pulled out of the servlet request. This parameter is identical to the RequestDetails parameter above but will
|
||||||
|
* only be populated when operating in a RestfulServer implementation. It is provided as a convenience.
|
||||||
|
* </li>
|
||||||
|
* <li>
|
||||||
|
* ca.uhn.fhir.jpa.util.DeleteConflictList - Contains the details about the delete conflicts that are
|
||||||
|
* being resolved via deletion. The source resource is the resource that will be deleted, and
|
||||||
|
* is a cascade because the target resource is already being deleted.
|
||||||
|
* </li>
|
||||||
|
* <li>
|
||||||
|
* org.hl7.fhir.instance.model.api.IBaseResource - The actual resource that is about to be deleted via a cascading delete
|
||||||
|
* </li>
|
||||||
|
* </ul>
|
||||||
|
* <p>
|
||||||
|
* Hooks should return <code>void</code>.
|
||||||
|
* </p>
|
||||||
|
*/
|
||||||
|
STORAGE_CASCADE_DELETE(
|
||||||
|
void.class,
|
||||||
|
"ca.uhn.fhir.rest.api.server.RequestDetails",
|
||||||
|
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails",
|
||||||
|
"ca.uhn.fhir.jpa.delete.DeleteConflictList",
|
||||||
|
"org.hl7.fhir.instance.model.api.IBaseResource"
|
||||||
|
),
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Invoked when one or more resources may be returned to the user, whether as a part of a READ,
|
* Invoked when one or more resources may be returned to the user, whether as a part of a READ,
|
||||||
* a SEARCH, or even as the response to a CREATE/UPDATE, etc.
|
* a SEARCH, or even as the response to a CREATE/UPDATE, etc.
|
||||||
|
@ -664,7 +744,6 @@ public enum Pointcut {
|
||||||
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
|
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
|
||||||
),
|
),
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Invoked when the storage engine is about to check for the existence of a pre-cached search
|
* Invoked when the storage engine is about to check for the existence of a pre-cached search
|
||||||
* whose results match the given search parameters.
|
* whose results match the given search parameters.
|
||||||
|
@ -702,7 +781,6 @@ public enum Pointcut {
|
||||||
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
|
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
|
||||||
),
|
),
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Invoked when a search is starting, prior to creating a record for the search.
|
* Invoked when a search is starting, prior to creating a record for the search.
|
||||||
* <p>
|
* <p>
|
||||||
|
@ -738,7 +816,6 @@ public enum Pointcut {
|
||||||
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
|
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
|
||||||
),
|
),
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Invoked when one or more resources may be returned to the user, whether as a part of a READ,
|
* Invoked when one or more resources may be returned to the user, whether as a part of a READ,
|
||||||
* a SEARCH, or even as the response to a CREATE/UPDATE, etc.
|
* a SEARCH, or even as the response to a CREATE/UPDATE, etc.
|
||||||
|
@ -859,6 +936,7 @@ public enum Pointcut {
|
||||||
"ca.uhn.fhir.rest.api.server.RequestDetails",
|
"ca.uhn.fhir.rest.api.server.RequestDetails",
|
||||||
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
|
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
|
||||||
),
|
),
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Invoked before a resource will be created, immediately before the resource
|
* Invoked before a resource will be created, immediately before the resource
|
||||||
* is persisted to the database.
|
* is persisted to the database.
|
||||||
|
@ -893,6 +971,8 @@ public enum Pointcut {
|
||||||
"ca.uhn.fhir.rest.api.server.RequestDetails",
|
"ca.uhn.fhir.rest.api.server.RequestDetails",
|
||||||
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
|
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
|
||||||
),
|
),
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Invoked before a resource will be created, immediately before the transaction
|
* Invoked before a resource will be created, immediately before the transaction
|
||||||
* is committed (after all validation and other business rules have successfully
|
* is committed (after all validation and other business rules have successfully
|
||||||
|
@ -968,6 +1048,8 @@ public enum Pointcut {
|
||||||
"ca.uhn.fhir.rest.api.server.RequestDetails",
|
"ca.uhn.fhir.rest.api.server.RequestDetails",
|
||||||
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
|
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
|
||||||
),
|
),
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Invoked before a resource will be created
|
* Invoked before a resource will be created
|
||||||
* <p>
|
* <p>
|
||||||
|
@ -1001,7 +1083,6 @@ public enum Pointcut {
|
||||||
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
|
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
|
||||||
),
|
),
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Invoked when a resource delete operation is about to fail due to referential integrity conflicts.
|
* Invoked when a resource delete operation is about to fail due to referential integrity conflicts.
|
||||||
* <p>
|
* <p>
|
||||||
|
@ -1025,15 +1106,15 @@ public enum Pointcut {
|
||||||
* </li>
|
* </li>
|
||||||
* </ul>
|
* </ul>
|
||||||
* <p>
|
* <p>
|
||||||
* Hooks should return <code>boolean</code>. If the method returns <code>true</code> then the caller
|
* Hooks should return <code>ca.uhn.fhir.jpa.delete.DeleteConflictOutcome</code>.
|
||||||
* will retry checking for delete conflicts. If there are still conflicts, then the hook will be invoked again,
|
* If the interceptor returns a non-null result, the DeleteConflictOutcome can be
|
||||||
* repeatedly up to a maximum of {@value ca.uhn.fhir.jpa.delete.DeleteConflictService#MAX_RETRIES} retries.
|
* used to indicate a number of times to retry.
|
||||||
* The first time the hook is invoked, there will be a maximum of {@value ca.uhn.fhir.jpa.delete.DeleteConflictService#MIN_QUERY_RESULT_COUNT}
|
|
||||||
* conflicts passed to the method. Subsequent hook invocations will pass a maximum of
|
|
||||||
* {@value ca.uhn.fhir.jpa.delete.DeleteConflictService#MAX_RETRY_COUNT} conflicts to the hook.
|
|
||||||
* </p>
|
* </p>
|
||||||
*/
|
*/
|
||||||
STORAGE_PRESTORAGE_DELETE_CONFLICTS(boolean.class,
|
STORAGE_PRESTORAGE_DELETE_CONFLICTS(
|
||||||
|
// Return type
|
||||||
|
"ca.uhn.fhir.jpa.delete.DeleteConflictOutcome",
|
||||||
|
// Params
|
||||||
"ca.uhn.fhir.jpa.delete.DeleteConflictList",
|
"ca.uhn.fhir.jpa.delete.DeleteConflictList",
|
||||||
"ca.uhn.fhir.rest.api.server.RequestDetails",
|
"ca.uhn.fhir.rest.api.server.RequestDetails",
|
||||||
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
|
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
|
||||||
|
@ -1076,7 +1157,6 @@ public enum Pointcut {
|
||||||
"ca.uhn.fhir.jpa.model.search.StorageProcessingMessage"
|
"ca.uhn.fhir.jpa.model.search.StorageProcessingMessage"
|
||||||
),
|
),
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Note that this is a performance tracing hook. Use with caution in production
|
* Note that this is a performance tracing hook. Use with caution in production
|
||||||
* systems, since calling it may (or may not) carry a cost.
|
* systems, since calling it may (or may not) carry a cost.
|
||||||
|
@ -1192,6 +1272,7 @@ public enum Pointcut {
|
||||||
"ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails"
|
"ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails"
|
||||||
),
|
),
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Note that this is a performance tracing hook. Use with caution in production
|
* Note that this is a performance tracing hook. Use with caution in production
|
||||||
* systems, since calling it may (or may not) carry a cost.
|
* systems, since calling it may (or may not) carry a cost.
|
||||||
|
@ -1363,8 +1444,8 @@ public enum Pointcut {
|
||||||
private final Class<?> myReturnType;
|
private final Class<?> myReturnType;
|
||||||
private final ExceptionHandlingSpec myExceptionHandlingSpec;
|
private final ExceptionHandlingSpec myExceptionHandlingSpec;
|
||||||
|
|
||||||
Pointcut(@Nonnull Class<?> theReturnType, String... theParameterTypes) {
|
Pointcut(@Nonnull String theReturnType, String... theParameterTypes) {
|
||||||
this(theReturnType, new ExceptionHandlingSpec(), theParameterTypes);
|
this(toReturnTypeClass(theReturnType), new ExceptionHandlingSpec(), theParameterTypes);
|
||||||
}
|
}
|
||||||
|
|
||||||
Pointcut(@Nonnull Class<?> theReturnType, @Nonnull ExceptionHandlingSpec theExceptionHandlingSpec, String... theParameterTypes) {
|
Pointcut(@Nonnull Class<?> theReturnType, @Nonnull ExceptionHandlingSpec theExceptionHandlingSpec, String... theParameterTypes) {
|
||||||
|
@ -1373,6 +1454,10 @@ public enum Pointcut {
|
||||||
myParameterTypes = Collections.unmodifiableList(Arrays.asList(theParameterTypes));
|
myParameterTypes = Collections.unmodifiableList(Arrays.asList(theParameterTypes));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Pointcut(@Nonnull Class<?> theReturnType, String... theParameterTypes) {
|
||||||
|
this(theReturnType, new ExceptionHandlingSpec(), theParameterTypes);
|
||||||
|
}
|
||||||
|
|
||||||
public boolean isShouldLogAndSwallowException(@Nonnull Throwable theException) {
|
public boolean isShouldLogAndSwallowException(@Nonnull Throwable theException) {
|
||||||
for (Class<? extends Throwable> next : myExceptionHandlingSpec.myTypesToLogAndSwallow) {
|
for (Class<? extends Throwable> next : myExceptionHandlingSpec.myTypesToLogAndSwallow) {
|
||||||
if (next.isAssignableFrom(theException.getClass())) {
|
if (next.isAssignableFrom(theException.getClass())) {
|
||||||
|
@ -1392,6 +1477,9 @@ public enum Pointcut {
|
||||||
return myParameterTypes;
|
return myParameterTypes;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private class UnknownType {
|
||||||
|
}
|
||||||
|
|
||||||
private static class ExceptionHandlingSpec {
|
private static class ExceptionHandlingSpec {
|
||||||
|
|
||||||
private final Set<Class<? extends Throwable>> myTypesToLogAndSwallow = new HashSet<>();
|
private final Set<Class<? extends Throwable>> myTypesToLogAndSwallow = new HashSet<>();
|
||||||
|
@ -1403,4 +1491,12 @@ public enum Pointcut {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static Class<?> toReturnTypeClass(String theReturnType) {
|
||||||
|
try {
|
||||||
|
return Class.forName(theReturnType);
|
||||||
|
} catch (ClassNotFoundException theE) {
|
||||||
|
return UnknownType.class;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -268,14 +268,15 @@ public class InterceptorService implements IInterceptorService, IInterceptorBroa
|
||||||
*/
|
*/
|
||||||
for (BaseInvoker nextInvoker : invokers) {
|
for (BaseInvoker nextInvoker : invokers) {
|
||||||
Object nextOutcome = nextInvoker.invoke(theParams);
|
Object nextOutcome = nextInvoker.invoke(theParams);
|
||||||
if (thePointcut.getReturnType() == boolean.class) {
|
Class<?> pointcutReturnType = thePointcut.getReturnType();
|
||||||
|
if (pointcutReturnType.equals(boolean.class)) {
|
||||||
Boolean nextOutcomeAsBoolean = (Boolean) nextOutcome;
|
Boolean nextOutcomeAsBoolean = (Boolean) nextOutcome;
|
||||||
if (Boolean.FALSE.equals(nextOutcomeAsBoolean)) {
|
if (Boolean.FALSE.equals(nextOutcomeAsBoolean)) {
|
||||||
ourLog.trace("callHooks({}) for invoker({}) returned false", thePointcut, nextInvoker);
|
ourLog.trace("callHooks({}) for invoker({}) returned false", thePointcut, nextInvoker);
|
||||||
theRetVal = false;
|
theRetVal = false;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
} else if (thePointcut.getReturnType() != void.class) {
|
} else if (pointcutReturnType.equals(void.class) == false) {
|
||||||
if (nextOutcome != null) {
|
if (nextOutcome != null) {
|
||||||
theRetVal = nextOutcome;
|
theRetVal = nextOutcome;
|
||||||
break;
|
break;
|
||||||
|
@ -481,10 +482,14 @@ public class InterceptorService implements IInterceptorService, IInterceptorBroa
|
||||||
Object[] args = new Object[myParameterTypes.length];
|
Object[] args = new Object[myParameterTypes.length];
|
||||||
for (int i = 0; i < myParameterTypes.length; i++) {
|
for (int i = 0; i < myParameterTypes.length; i++) {
|
||||||
Class<?> nextParamType = myParameterTypes[i];
|
Class<?> nextParamType = myParameterTypes[i];
|
||||||
|
if (nextParamType.equals(Pointcut.class)) {
|
||||||
|
args[i] = myPointcut;
|
||||||
|
} else {
|
||||||
int nextParamIndex = myParameterIndexes[i];
|
int nextParamIndex = myParameterIndexes[i];
|
||||||
Object nextParamValue = theParams.get(nextParamType, nextParamIndex);
|
Object nextParamValue = theParams.get(nextParamType, nextParamIndex);
|
||||||
args[i] = nextParamValue;
|
args[i] = nextParamValue;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Invoke the method
|
// Invoke the method
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -207,6 +207,8 @@ public class Constants {
|
||||||
public static final String TEST_SYSTEM_PROP_VALIDATION_RESOURCE_CACHES_MS = "TEST_SYSTEM_PROP_VALIDATION_RESOURCE_CACHES_MS";
|
public static final String TEST_SYSTEM_PROP_VALIDATION_RESOURCE_CACHES_MS = "TEST_SYSTEM_PROP_VALIDATION_RESOURCE_CACHES_MS";
|
||||||
public static final String PARAM_SEARCH_TOTAL_MODE = "_total";
|
public static final String PARAM_SEARCH_TOTAL_MODE = "_total";
|
||||||
public static final String CAPABILITYSTATEMENT_WEBSOCKET_URL = "http://hl7.org/fhir/StructureDefinition/capabilitystatement-websocket";
|
public static final String CAPABILITYSTATEMENT_WEBSOCKET_URL = "http://hl7.org/fhir/StructureDefinition/capabilitystatement-websocket";
|
||||||
|
public static final String PARAMETER_CASCADE_DELETE = "_cascade";
|
||||||
|
public static final String HEADER_CASCADE_DELETE = "X-Cascade-Delete";
|
||||||
|
|
||||||
static {
|
static {
|
||||||
CHARSET_UTF8 = Charset.forName(CHARSET_NAME_UTF8);
|
CHARSET_UTF8 = Charset.forName(CHARSET_NAME_UTF8);
|
||||||
|
|
|
@ -104,17 +104,19 @@ ca.uhn.fhir.jpa.searchparam.extractor.BaseSearchParamExtractor.failedToExtractPa
|
||||||
ca.uhn.fhir.jpa.dao.SearchBuilder.invalidQuantityPrefix=Unable to handle quantity prefix "{0}" for value: {1}
|
ca.uhn.fhir.jpa.dao.SearchBuilder.invalidQuantityPrefix=Unable to handle quantity prefix "{0}" for value: {1}
|
||||||
ca.uhn.fhir.jpa.dao.SearchBuilder.invalidNumberPrefix=Unable to handle number prefix "{0}" for value: {1}
|
ca.uhn.fhir.jpa.dao.SearchBuilder.invalidNumberPrefix=Unable to handle number prefix "{0}" for value: {1}
|
||||||
|
|
||||||
|
ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoConceptMapDstu3.matchesFound=Matches found!
|
||||||
|
ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoConceptMapDstu3.noMatchesFound=No matches found!
|
||||||
|
ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoConceptMapR4.matchesFound=Matches found!
|
||||||
|
ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoConceptMapR4.noMatchesFound=No matches found!
|
||||||
ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoSearchParameterR4.invalidSearchParamExpression=The expression "{0}" can not be evaluated and may be invalid: {1}
|
ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoSearchParameterR4.invalidSearchParamExpression=The expression "{0}" can not be evaluated and may be invalid: {1}
|
||||||
|
|
||||||
|
ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor.successMsg=Cascaded delete to {0} resources: {1}
|
||||||
|
ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor.noParam=Note that cascading deletes are not active for this request. You can enable cascading deletes by using the "_cascade=true" URL parameter.
|
||||||
|
|
||||||
ca.uhn.fhir.jpa.provider.BaseJpaProvider.cantCombintAtAndSince=Unable to combine _at and _since parameters for history operation
|
ca.uhn.fhir.jpa.provider.BaseJpaProvider.cantCombintAtAndSince=Unable to combine _at and _since parameters for history operation
|
||||||
|
|
||||||
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.cannotCreateDuplicateConceptMapUrl=Can not create multiple ConceptMap resources with ConceptMap.url "{0}", already have one with resource ID: {1}
|
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.cannotCreateDuplicateConceptMapUrl=Can not create multiple ConceptMap resources with ConceptMap.url "{0}", already have one with resource ID: {1}
|
||||||
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.cannotCreateDuplicateCodeSystemUri=Can not create multiple code systems with URI "{0}", already have one with resource ID: {1}
|
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.cannotCreateDuplicateCodeSystemUri=Can not create multiple code systems with URI "{0}", already have one with resource ID: {1}
|
||||||
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.expansionTooLarge=Expansion of ValueSet produced too many codes (maximum {0}) - Operation aborted!
|
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.expansionTooLarge=Expansion of ValueSet produced too many codes (maximum {0}) - Operation aborted!
|
||||||
|
|
||||||
ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoConceptMapDstu3.matchesFound=Matches found!
|
|
||||||
ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoConceptMapDstu3.noMatchesFound=No matches found!
|
|
||||||
ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoConceptMapR4.matchesFound=Matches found!
|
|
||||||
ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoConceptMapR4.noMatchesFound=No matches found!
|
|
||||||
|
|
||||||
ca.uhn.fhir.jpa.util.jsonpatch.JsonPatchUtils.failedToApplyPatch=Failed to apply JSON patch to {0}: {1}
|
ca.uhn.fhir.jpa.util.jsonpatch.JsonPatchUtils.failedToApplyPatch=Failed to apply JSON patch to {0}: {1}
|
||||||
|
|
|
@ -192,6 +192,11 @@ public abstract class BaseConfig implements SchedulingConfigurer {
|
||||||
return daoRegistry().getResourceDaoIfExists(theResourceType) != null;
|
return daoRegistry().getResourceDaoIfExists(theResourceType) != null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public IConsentContextServices consentContextServices() {
|
||||||
|
return new JpaConsentContextServices();
|
||||||
|
}
|
||||||
|
|
||||||
public static void configureEntityManagerFactory(LocalContainerEntityManagerFactoryBean theFactory, FhirContext theCtx) {
|
public static void configureEntityManagerFactory(LocalContainerEntityManagerFactoryBean theFactory, FhirContext theCtx) {
|
||||||
theFactory.setJpaDialect(hibernateJpaDialect(theCtx.getLocalizer()));
|
theFactory.setJpaDialect(hibernateJpaDialect(theCtx.getLocalizer()));
|
||||||
theFactory.setPackagesToScan("ca.uhn.fhir.jpa.model.entity", "ca.uhn.fhir.jpa.entity");
|
theFactory.setPackagesToScan("ca.uhn.fhir.jpa.model.entity", "ca.uhn.fhir.jpa.entity");
|
||||||
|
@ -202,9 +207,5 @@ public abstract class BaseConfig implements SchedulingConfigurer {
|
||||||
return new HapiFhirHibernateJpaDialect(theLocalizer);
|
return new HapiFhirHibernateJpaDialect(theLocalizer);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
|
||||||
public IConsentContextServices consentContextServices() {
|
|
||||||
return new JpaConsentContextServices();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -968,6 +968,7 @@ public class DaoConfig {
|
||||||
* and other FHIR features may not behave as expected when referential integrity is not
|
* and other FHIR features may not behave as expected when referential integrity is not
|
||||||
* preserved. Use this feature with caution.
|
* preserved. Use this feature with caution.
|
||||||
* </p>
|
* </p>
|
||||||
|
* @see ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor
|
||||||
*/
|
*/
|
||||||
public boolean isEnforceReferentialIntegrityOnDelete() {
|
public boolean isEnforceReferentialIntegrityOnDelete() {
|
||||||
return myEnforceReferentialIntegrityOnDelete;
|
return myEnforceReferentialIntegrityOnDelete;
|
||||||
|
@ -981,6 +982,7 @@ public class DaoConfig {
|
||||||
* and other FHIR features may not behave as expected when referential integrity is not
|
* and other FHIR features may not behave as expected when referential integrity is not
|
||||||
* preserved. Use this feature with caution.
|
* preserved. Use this feature with caution.
|
||||||
* </p>
|
* </p>
|
||||||
|
* @see ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor
|
||||||
*/
|
*/
|
||||||
public void setEnforceReferentialIntegrityOnDelete(boolean theEnforceReferentialIntegrityOnDelete) {
|
public void setEnforceReferentialIntegrityOnDelete(boolean theEnforceReferentialIntegrityOnDelete) {
|
||||||
myEnforceReferentialIntegrityOnDelete = theEnforceReferentialIntegrityOnDelete;
|
myEnforceReferentialIntegrityOnDelete = theEnforceReferentialIntegrityOnDelete;
|
||||||
|
|
|
@ -53,4 +53,9 @@ public class DeleteConflictList {
|
||||||
public int size() {
|
public int size() {
|
||||||
return myList.size();
|
return myList.size();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return myList.toString();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,19 @@
|
||||||
|
package ca.uhn.fhir.jpa.delete;
|
||||||
|
|
||||||
|
import org.apache.commons.lang3.Validate;
|
||||||
|
|
||||||
|
public class DeleteConflictOutcome {
|
||||||
|
|
||||||
|
private int myShouldRetryCount;
|
||||||
|
|
||||||
|
public int getShouldRetryCount() {
|
||||||
|
return myShouldRetryCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
public DeleteConflictOutcome setShouldRetryCount(int theShouldRetryCount) {
|
||||||
|
Validate.isTrue(theShouldRetryCount >= 0, "theShouldRetryCount must not be negative");
|
||||||
|
myShouldRetryCount = theShouldRetryCount;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -69,31 +69,33 @@ public class DeleteConflictService {
|
||||||
// In most cases, there will be no hooks, and so we only need to check if there is at least FIRST_QUERY_RESULT_COUNT conflict and populate that.
|
// In most cases, there will be no hooks, and so we only need to check if there is at least FIRST_QUERY_RESULT_COUNT conflict and populate that.
|
||||||
// Only in the case where there is a hook do we need to go back and collect larger batches of conflicts for processing.
|
// Only in the case where there is a hook do we need to go back and collect larger batches of conflicts for processing.
|
||||||
|
|
||||||
boolean tryAgain = findAndHandleConflicts(theRequest, newConflicts, theEntity, theForValidate, FIRST_QUERY_RESULT_COUNT);
|
DeleteConflictOutcome outcome = findAndHandleConflicts(theRequest, newConflicts, theEntity, theForValidate, FIRST_QUERY_RESULT_COUNT);
|
||||||
|
|
||||||
int retryCount = 0;
|
int retryCount = 0;
|
||||||
while (tryAgain && retryCount < MAX_RETRY_ATTEMPTS) {
|
while (outcome != null) {
|
||||||
|
int shouldRetryCount = Math.min(outcome.getShouldRetryCount(), MAX_RETRY_ATTEMPTS);
|
||||||
|
if (!(retryCount < shouldRetryCount)) break;
|
||||||
newConflicts = new DeleteConflictList();
|
newConflicts = new DeleteConflictList();
|
||||||
tryAgain = findAndHandleConflicts(theRequest, newConflicts, theEntity, theForValidate, RETRY_QUERY_RESULT_COUNT);
|
outcome = findAndHandleConflicts(theRequest, newConflicts, theEntity, theForValidate, RETRY_QUERY_RESULT_COUNT);
|
||||||
++retryCount;
|
++retryCount;
|
||||||
}
|
}
|
||||||
theDeleteConflicts.addAll(newConflicts);
|
theDeleteConflicts.addAll(newConflicts);
|
||||||
return retryCount;
|
return retryCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean findAndHandleConflicts(RequestDetails theRequest, DeleteConflictList theDeleteConflicts, ResourceTable theEntity, boolean theForValidate, int theMinQueryResultCount) {
|
private DeleteConflictOutcome findAndHandleConflicts(RequestDetails theRequest, DeleteConflictList theDeleteConflicts, ResourceTable theEntity, boolean theForValidate, int theMinQueryResultCount) {
|
||||||
List<ResourceLink> resultList = myDeleteConflictFinderService.findConflicts(theEntity, theMinQueryResultCount);
|
List<ResourceLink> resultList = myDeleteConflictFinderService.findConflicts(theEntity, theMinQueryResultCount);
|
||||||
if (resultList.isEmpty()) {
|
if (resultList.isEmpty()) {
|
||||||
return false;
|
return null;
|
||||||
}
|
}
|
||||||
return handleConflicts(theRequest, theDeleteConflicts, theEntity, theForValidate, resultList);
|
return handleConflicts(theRequest, theDeleteConflicts, theEntity, theForValidate, resultList);
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean handleConflicts(RequestDetails theRequest, DeleteConflictList theDeleteConflicts, ResourceTable theEntity, boolean theForValidate, List<ResourceLink> theResultList) {
|
private DeleteConflictOutcome handleConflicts(RequestDetails theRequest, DeleteConflictList theDeleteConflicts, ResourceTable theEntity, boolean theForValidate, List<ResourceLink> theResultList) {
|
||||||
if (!myDaoConfig.isEnforceReferentialIntegrityOnDelete() && !theForValidate) {
|
if (!myDaoConfig.isEnforceReferentialIntegrityOnDelete() && !theForValidate) {
|
||||||
ourLog.debug("Deleting {} resource dependencies which can no longer be satisfied", theResultList.size());
|
ourLog.debug("Deleting {} resource dependencies which can no longer be satisfied", theResultList.size());
|
||||||
myResourceLinkDao.deleteAll(theResultList);
|
myResourceLinkDao.deleteAll(theResultList);
|
||||||
return false;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
addConflictsToList(theDeleteConflicts, theEntity, theResultList);
|
addConflictsToList(theDeleteConflicts, theEntity, theResultList);
|
||||||
|
@ -103,7 +105,7 @@ public class DeleteConflictService {
|
||||||
.add(DeleteConflictList.class, theDeleteConflicts)
|
.add(DeleteConflictList.class, theDeleteConflicts)
|
||||||
.add(RequestDetails.class, theRequest)
|
.add(RequestDetails.class, theRequest)
|
||||||
.addIfMatchesType(ServletRequestDetails.class, theRequest);
|
.addIfMatchesType(ServletRequestDetails.class, theRequest);
|
||||||
return JpaInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRESTORAGE_DELETE_CONFLICTS, hooks);
|
return (DeleteConflictOutcome)JpaInterceptorBroadcaster.doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRESTORAGE_DELETE_CONFLICTS, hooks);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void addConflictsToList(DeleteConflictList theDeleteConflicts, ResourceTable theEntity, List<ResourceLink> theResultList) {
|
private void addConflictsToList(DeleteConflictList theDeleteConflicts, ResourceTable theEntity, List<ResourceLink> theResultList) {
|
||||||
|
|
|
@ -0,0 +1,178 @@
|
||||||
|
package ca.uhn.fhir.jpa.interceptor;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.interceptor.api.*;
|
||||||
|
import ca.uhn.fhir.jpa.dao.DaoRegistry;
|
||||||
|
import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
|
||||||
|
import ca.uhn.fhir.jpa.delete.DeleteConflictList;
|
||||||
|
import ca.uhn.fhir.jpa.delete.DeleteConflictOutcome;
|
||||||
|
import ca.uhn.fhir.jpa.util.DeleteConflict;
|
||||||
|
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
|
||||||
|
import ca.uhn.fhir.model.primitive.IdDt;
|
||||||
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
|
import ca.uhn.fhir.rest.api.server.ResponseDetails;
|
||||||
|
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||||
|
import ca.uhn.fhir.util.OperationOutcomeUtil;
|
||||||
|
import org.apache.commons.lang3.Validate;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
import org.hl7.fhir.r4.model.OperationOutcome;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import static ca.uhn.fhir.jpa.delete.DeleteConflictService.MAX_RETRY_ATTEMPTS;
|
||||||
|
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Interceptor that allows for cascading deletes (deletes that resolve constraint issues).
|
||||||
|
* <p>
|
||||||
|
* For example, if <code>DiagnosticReport/A</code> has a reference to <code>Observation/B</code>
|
||||||
|
* it is not normally possible to delete <code>Observation/B</code> without first deleting
|
||||||
|
* <code>DiagnosticReport/A</code>. With this interceptor in place, it is.
|
||||||
|
* </p>
|
||||||
|
* <p>
|
||||||
|
* When using this interceptor, client requests must include the parameter
|
||||||
|
* <code>_cascade=true</code> on the DELETE URL in order to activate
|
||||||
|
* cascading delete, or include the request header <code>X-Cascade-Delete: true</code>
|
||||||
|
* </p>
|
||||||
|
*/
|
||||||
|
@Interceptor
|
||||||
|
public class CascadingDeleteInterceptor {
|
||||||
|
|
||||||
|
private static final Logger ourLog = LoggerFactory.getLogger(CascadingDeleteInterceptor.class);
|
||||||
|
private static final String CASCADED_DELETES_KEY = CascadingDeleteInterceptor.class.getName() + "_CASCADED_DELETES_KEY";
|
||||||
|
private static final String CASCADED_DELETES_FAILED_KEY = CascadingDeleteInterceptor.class.getName() + "_CASCADED_DELETES_FAILED_KEY";
|
||||||
|
|
||||||
|
private final DaoRegistry myDaoRegistry;
|
||||||
|
private final IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructor
|
||||||
|
*
|
||||||
|
* @param theDaoRegistry The DAO registry (must not be null)
|
||||||
|
*/
|
||||||
|
public CascadingDeleteInterceptor(DaoRegistry theDaoRegistry, IInterceptorBroadcaster theInterceptorBroadcaster) {
|
||||||
|
Validate.notNull(theDaoRegistry, "theDaoRegistry must not be null");
|
||||||
|
Validate.notNull(theInterceptorBroadcaster, "theInterceptorBroadcaster must not be null");
|
||||||
|
myDaoRegistry = theDaoRegistry;
|
||||||
|
myInterceptorBroadcaster = theInterceptorBroadcaster;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Hook(Pointcut.STORAGE_PRESTORAGE_DELETE_CONFLICTS)
|
||||||
|
public DeleteConflictOutcome handleDeleteConflicts(DeleteConflictList theConflictList, RequestDetails theRequest) {
|
||||||
|
ourLog.debug("Have delete conflicts: {}", theConflictList);
|
||||||
|
|
||||||
|
if (!shouldCascade(theRequest)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
List<String> cascadedDeletes = getCascadedDeletesMap(theRequest, true);
|
||||||
|
for (Iterator<DeleteConflict> iter = theConflictList.iterator(); iter.hasNext(); ) {
|
||||||
|
DeleteConflict next = iter.next();
|
||||||
|
IdDt nextSource = next.getSourceId();
|
||||||
|
IFhirResourceDao dao = myDaoRegistry.getResourceDao(nextSource.getResourceType());
|
||||||
|
|
||||||
|
// Interceptor call: STORAGE_CASCADE_DELETE
|
||||||
|
IBaseResource resource = dao.read(nextSource);
|
||||||
|
HookParams params = new HookParams()
|
||||||
|
.add(RequestDetails.class, theRequest)
|
||||||
|
.addIfMatchesType(ServletRequestDetails.class, theRequest)
|
||||||
|
.add(DeleteConflictList.class, theConflictList)
|
||||||
|
.add(IBaseResource.class, resource);
|
||||||
|
JpaInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_CASCADE_DELETE, params);
|
||||||
|
|
||||||
|
// Actually perform the delete
|
||||||
|
ourLog.info("Have delete conflict {} - Cascading delete", next);
|
||||||
|
dao.delete(nextSource, theRequest);
|
||||||
|
|
||||||
|
cascadedDeletes.add(nextSource.getValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
return new DeleteConflictOutcome().setShouldRetryCount(MAX_RETRY_ATTEMPTS);
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
private List<String> getCascadedDeletesMap(RequestDetails theRequest, boolean theCreate) {
|
||||||
|
List<String> retVal = (List<String>) theRequest.getUserData().get(CASCADED_DELETES_KEY);
|
||||||
|
if (retVal == null) {
|
||||||
|
retVal = new ArrayList<>();
|
||||||
|
theRequest.getUserData().put(CASCADED_DELETES_KEY, retVal);
|
||||||
|
}
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Hook(Pointcut.SERVER_OUTGOING_FAILURE_OPERATIONOUTCOME)
|
||||||
|
public void outgoingFailureOperationOutcome(RequestDetails theRequestDetails, IBaseOperationOutcome theResponse) {
|
||||||
|
if (theRequestDetails != null) {
|
||||||
|
|
||||||
|
String failedDeleteMessage = (String) theRequestDetails.getUserData().get(CASCADED_DELETES_FAILED_KEY);
|
||||||
|
if (isNotBlank(failedDeleteMessage)) {
|
||||||
|
FhirContext ctx = theRequestDetails.getFhirContext();
|
||||||
|
String severity = OperationOutcome.IssueSeverity.INFORMATION.toCode();
|
||||||
|
String code = OperationOutcome.IssueType.INFORMATIONAL.toCode();
|
||||||
|
String details = failedDeleteMessage;
|
||||||
|
OperationOutcomeUtil.addIssue(ctx, theResponse, severity, details, null, code);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Hook(Pointcut.SERVER_OUTGOING_RESPONSE)
|
||||||
|
public void outgoingResponse(RequestDetails theRequestDetails, ResponseDetails theResponseDetails, IBaseResource theResponse) {
|
||||||
|
if (theRequestDetails != null) {
|
||||||
|
|
||||||
|
// Successful delete list
|
||||||
|
List<String> deleteList = getCascadedDeletesMap(theRequestDetails, false);
|
||||||
|
if (deleteList != null) {
|
||||||
|
if (theResponseDetails.getResponseCode() == 200) {
|
||||||
|
if (theResponse instanceof IBaseOperationOutcome) {
|
||||||
|
FhirContext ctx = theRequestDetails.getFhirContext();
|
||||||
|
IBaseOperationOutcome oo = (IBaseOperationOutcome) theResponse;
|
||||||
|
String severity = OperationOutcome.IssueSeverity.INFORMATION.toCode();
|
||||||
|
String code = OperationOutcome.IssueType.INFORMATIONAL.toCode();
|
||||||
|
String details = ctx.getLocalizer().getMessage(CascadingDeleteInterceptor.class, "successMsg", deleteList.size(), deleteList);
|
||||||
|
OperationOutcomeUtil.addIssue(ctx, oo, severity, details, null, code);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Subclasses may override
|
||||||
|
*
|
||||||
|
* @param theRequest The REST request
|
||||||
|
* @return Returns true if cascading delete should be allowed
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("WeakerAccess")
|
||||||
|
protected boolean shouldCascade(RequestDetails theRequest) {
|
||||||
|
if (theRequest != null) {
|
||||||
|
String[] cascadeParameters = theRequest.getParameters().get(Constants.PARAMETER_CASCADE_DELETE);
|
||||||
|
if (cascadeParameters != null && Arrays.asList(cascadeParameters).contains("true")) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
String cascadeHeader = theRequest.getHeader(Constants.HEADER_CASCADE_DELETE);
|
||||||
|
if ("true".equals(cascadeHeader)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add a message to the response
|
||||||
|
String message = theRequest.getFhirContext().getLocalizer().getMessage(CascadingDeleteInterceptor.class, "noParam");
|
||||||
|
theRequest.getUserData().put(CASCADED_DELETES_FAILED_KEY, message);
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -21,6 +21,8 @@ package ca.uhn.fhir.jpa.util;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import ca.uhn.fhir.model.primitive.IdDt;
|
import ca.uhn.fhir.model.primitive.IdDt;
|
||||||
|
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||||
|
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||||
|
|
||||||
public class DeleteConflict {
|
public class DeleteConflict {
|
||||||
|
|
||||||
|
@ -46,4 +48,13 @@ public class DeleteConflict {
|
||||||
return myTargetId;
|
return myTargetId;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
|
||||||
|
.append("sourceId", mySourceId)
|
||||||
|
.append("sourcePath", mySourcePath)
|
||||||
|
.append("targetId", myTargetId)
|
||||||
|
.toString();
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@ package ca.uhn.fhir.jpa.config;
|
||||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||||
import ca.uhn.fhir.jpa.subscription.SubscriptionTestUtil;
|
import ca.uhn.fhir.jpa.subscription.SubscriptionTestUtil;
|
||||||
import ca.uhn.fhir.jpa.subscription.module.config.UnregisterScheduledProcessor;
|
import ca.uhn.fhir.test.utilities.UnregisterScheduledProcessor;
|
||||||
import ca.uhn.fhir.jpa.subscription.module.subscriber.SubscriptionDeliveringRestHookSubscriber;
|
import ca.uhn.fhir.jpa.subscription.module.subscriber.SubscriptionDeliveringRestHookSubscriber;
|
||||||
import org.springframework.context.annotation.Bean;
|
import org.springframework.context.annotation.Bean;
|
||||||
import org.springframework.context.annotation.Configuration;
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
|
|
@ -6,6 +6,7 @@ import ca.uhn.fhir.jpa.config.TestR4Config;
|
||||||
import ca.uhn.fhir.jpa.dao.*;
|
import ca.uhn.fhir.jpa.dao.*;
|
||||||
import ca.uhn.fhir.jpa.dao.data.*;
|
import ca.uhn.fhir.jpa.dao.data.*;
|
||||||
import ca.uhn.fhir.jpa.dao.dstu2.FhirResourceDaoDstu2SearchNoFtTest;
|
import ca.uhn.fhir.jpa.dao.dstu2.FhirResourceDaoDstu2SearchNoFtTest;
|
||||||
|
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
|
||||||
import ca.uhn.fhir.jpa.interceptor.PerformanceTracingLoggingInterceptor;
|
import ca.uhn.fhir.jpa.interceptor.PerformanceTracingLoggingInterceptor;
|
||||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
|
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
|
||||||
|
@ -160,7 +161,6 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
|
||||||
@Autowired
|
@Autowired
|
||||||
@Qualifier("myRiskAssessmentDaoR4")
|
@Qualifier("myRiskAssessmentDaoR4")
|
||||||
protected IFhirResourceDao<RiskAssessment> myRiskAssessmentDao;
|
protected IFhirResourceDao<RiskAssessment> myRiskAssessmentDao;
|
||||||
protected IServerInterceptor myInterceptor;
|
|
||||||
@Autowired
|
@Autowired
|
||||||
protected IInterceptorService myInterceptorRegistry;
|
protected IInterceptorService myInterceptorRegistry;
|
||||||
@Autowired
|
@Autowired
|
||||||
|
@ -298,6 +298,7 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
|
||||||
private JpaValidationSupportChainR4 myJpaValidationSupportChainR4;
|
private JpaValidationSupportChainR4 myJpaValidationSupportChainR4;
|
||||||
private PerformanceTracingLoggingInterceptor myPerformanceTracingLoggingInterceptor;
|
private PerformanceTracingLoggingInterceptor myPerformanceTracingLoggingInterceptor;
|
||||||
private List<Object> mySystemInterceptors;
|
private List<Object> mySystemInterceptors;
|
||||||
|
protected IServerInterceptor myInterceptor;
|
||||||
|
|
||||||
@After()
|
@After()
|
||||||
public void afterCleanupDao() {
|
public void afterCleanupDao() {
|
||||||
|
|
|
@ -9,7 +9,7 @@ import ca.uhn.fhir.jpa.searchparam.JpaRuntimeSearchParam;
|
||||||
import ca.uhn.fhir.jpa.searchparam.SearchParamConstants;
|
import ca.uhn.fhir.jpa.searchparam.SearchParamConstants;
|
||||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
|
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
|
||||||
import ca.uhn.fhir.jpa.subscription.module.config.UnregisterScheduledProcessor;
|
import ca.uhn.fhir.test.utilities.UnregisterScheduledProcessor;
|
||||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||||
import ca.uhn.fhir.rest.param.DateParam;
|
import ca.uhn.fhir.rest.param.DateParam;
|
||||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||||
|
|
|
@ -40,7 +40,7 @@ public class DeleteConflictServiceR4Test extends BaseJpaR4Test {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDeleteFailCallsHook() throws Exception {
|
public void testDeleteFailCallsHook() {
|
||||||
Organization organization = new Organization();
|
Organization organization = new Organization();
|
||||||
organization.setName("FOO");
|
organization.setName("FOO");
|
||||||
IIdType organizationId = myOrganizationDao.create(organization).getId().toUnqualifiedVersionless();
|
IIdType organizationId = myOrganizationDao.create(organization).getId().toUnqualifiedVersionless();
|
||||||
|
@ -49,7 +49,7 @@ public class DeleteConflictServiceR4Test extends BaseJpaR4Test {
|
||||||
patient.setManagingOrganization(new Reference(organizationId));
|
patient.setManagingOrganization(new Reference(organizationId));
|
||||||
IIdType patientId = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
|
IIdType patientId = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
myDeleteInterceptor.deleteConflictFunction = list -> false;
|
myDeleteInterceptor.deleteConflictFunction = t -> new DeleteConflictOutcome().setShouldRetryCount(0);
|
||||||
try {
|
try {
|
||||||
myOrganizationDao.delete(organizationId);
|
myOrganizationDao.delete(organizationId);
|
||||||
fail();
|
fail();
|
||||||
|
@ -64,7 +64,7 @@ public class DeleteConflictServiceR4Test extends BaseJpaR4Test {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDeleteHookDeletesConflict() throws Exception {
|
public void testDeleteHookDeletesConflict() {
|
||||||
Organization organization = new Organization();
|
Organization organization = new Organization();
|
||||||
organization.setName("FOO");
|
organization.setName("FOO");
|
||||||
IIdType organizationId = myOrganizationDao.create(organization).getId().toUnqualifiedVersionless();
|
IIdType organizationId = myOrganizationDao.create(organization).getId().toUnqualifiedVersionless();
|
||||||
|
@ -82,7 +82,7 @@ public class DeleteConflictServiceR4Test extends BaseJpaR4Test {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDeleteHookDeletesTwoConflicts() throws Exception {
|
public void testDeleteHookDeletesTwoConflicts() {
|
||||||
Organization organization = new Organization();
|
Organization organization = new Organization();
|
||||||
organization.setName("FOO");
|
organization.setName("FOO");
|
||||||
IIdType organizationId = myOrganizationDao.create(organization).getId().toUnqualifiedVersionless();
|
IIdType organizationId = myOrganizationDao.create(organization).getId().toUnqualifiedVersionless();
|
||||||
|
@ -104,7 +104,7 @@ public class DeleteConflictServiceR4Test extends BaseJpaR4Test {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDeleteHookDeletesThreeConflicts() throws Exception {
|
public void testDeleteHookDeletesThreeConflicts() {
|
||||||
Organization organization = new Organization();
|
Organization organization = new Organization();
|
||||||
organization.setName("FOO");
|
organization.setName("FOO");
|
||||||
IIdType organizationId = myOrganizationDao.create(organization).getId().toUnqualifiedVersionless();
|
IIdType organizationId = myOrganizationDao.create(organization).getId().toUnqualifiedVersionless();
|
||||||
|
@ -130,7 +130,7 @@ public class DeleteConflictServiceR4Test extends BaseJpaR4Test {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testBadInterceptorNoInfiniteLoop() throws Exception {
|
public void testBadInterceptorNoInfiniteLoop() {
|
||||||
Organization organization = new Organization();
|
Organization organization = new Organization();
|
||||||
organization.setName("FOO");
|
organization.setName("FOO");
|
||||||
IIdType organizationId = myOrganizationDao.create(organization).getId().toUnqualifiedVersionless();
|
IIdType organizationId = myOrganizationDao.create(organization).getId().toUnqualifiedVersionless();
|
||||||
|
@ -140,7 +140,7 @@ public class DeleteConflictServiceR4Test extends BaseJpaR4Test {
|
||||||
IIdType patientId = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
|
IIdType patientId = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
// Always returning true is bad behaviour. Our infinite loop checker should halt it
|
// Always returning true is bad behaviour. Our infinite loop checker should halt it
|
||||||
myDeleteInterceptor.deleteConflictFunction = list -> true;
|
myDeleteInterceptor.deleteConflictFunction = t -> new DeleteConflictOutcome().setShouldRetryCount(Integer.MAX_VALUE);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
myOrganizationDao.delete(organizationId);
|
myOrganizationDao.delete(organizationId);
|
||||||
|
@ -151,7 +151,7 @@ public class DeleteConflictServiceR4Test extends BaseJpaR4Test {
|
||||||
assertEquals(1 + DeleteConflictService.MAX_RETRY_ATTEMPTS, myDeleteInterceptor.myCallCount);
|
assertEquals(1 + DeleteConflictService.MAX_RETRY_ATTEMPTS, myDeleteInterceptor.myCallCount);
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean deleteConflicts(DeleteConflictList theList) {
|
private DeleteConflictOutcome deleteConflicts(DeleteConflictList theList) {
|
||||||
Iterator<DeleteConflict> iterator = theList.iterator();
|
Iterator<DeleteConflict> iterator = theList.iterator();
|
||||||
while (iterator.hasNext()) {
|
while (iterator.hasNext()) {
|
||||||
DeleteConflict next = iterator.next();
|
DeleteConflict next = iterator.next();
|
||||||
|
@ -162,16 +162,16 @@ public class DeleteConflictServiceR4Test extends BaseJpaR4Test {
|
||||||
++myInterceptorDeleteCount;
|
++myInterceptorDeleteCount;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return myInterceptorDeleteCount > 0;
|
return new DeleteConflictOutcome().setShouldRetryCount(myInterceptorDeleteCount);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static class DeleteConflictInterceptor {
|
private static class DeleteConflictInterceptor {
|
||||||
int myCallCount;
|
int myCallCount;
|
||||||
DeleteConflictList myDeleteConflictList;
|
DeleteConflictList myDeleteConflictList;
|
||||||
Function<DeleteConflictList, Boolean> deleteConflictFunction;
|
Function<DeleteConflictList, DeleteConflictOutcome> deleteConflictFunction;
|
||||||
|
|
||||||
@Hook(Pointcut.STORAGE_PRESTORAGE_DELETE_CONFLICTS)
|
@Hook(Pointcut.STORAGE_PRESTORAGE_DELETE_CONFLICTS)
|
||||||
public boolean deleteConflicts(DeleteConflictList theDeleteConflictList) {
|
public DeleteConflictOutcome deleteConflicts(DeleteConflictList theDeleteConflictList) {
|
||||||
++myCallCount;
|
++myCallCount;
|
||||||
myDeleteConflictList = theDeleteConflictList;
|
myDeleteConflictList = theDeleteConflictList;
|
||||||
return deleteConflictFunction.apply(theDeleteConflictList);
|
return deleteConflictFunction.apply(theDeleteConflictList);
|
||||||
|
|
|
@ -8,8 +8,6 @@ import ca.uhn.fhir.jpa.model.entity.ResourceLink;
|
||||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.junit.runner.RunWith;
|
import org.junit.runner.RunWith;
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.boot.test.mock.mockito.MockBean;
|
import org.springframework.boot.test.mock.mockito.MockBean;
|
||||||
import org.springframework.context.annotation.Bean;
|
import org.springframework.context.annotation.Bean;
|
||||||
|
@ -27,15 +25,20 @@ import static org.mockito.Mockito.when;
|
||||||
@RunWith(SpringRunner.class)
|
@RunWith(SpringRunner.class)
|
||||||
@ContextConfiguration(classes = {DeleteConflictServiceTest.SpringConfig.class})
|
@ContextConfiguration(classes = {DeleteConflictServiceTest.SpringConfig.class})
|
||||||
public class DeleteConflictServiceTest {
|
public class DeleteConflictServiceTest {
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(DeleteConflictServiceTest.class);
|
|
||||||
|
|
||||||
@MockBean
|
@MockBean
|
||||||
private DeleteConflictFinderService myDeleteConflictFinderService;
|
private DeleteConflictFinderService myDeleteConflictFinderService;
|
||||||
|
/**
|
||||||
|
* This is needed, don't remove
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("unused")
|
||||||
@MockBean
|
@MockBean
|
||||||
private IResourceLinkDao myResourceLinkDao;
|
private IResourceLinkDao myResourceLinkDao;
|
||||||
|
@SuppressWarnings("unused")
|
||||||
@MockBean
|
@MockBean
|
||||||
private FhirContext myFhirContext;
|
private FhirContext myFhirContext;
|
||||||
@MockBean
|
@MockBean
|
||||||
|
@SuppressWarnings("unused")
|
||||||
private IInterceptorBroadcaster myInterceptorBroadcaster;
|
private IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
package ca.uhn.fhir.jpa.provider.r4;
|
package ca.uhn.fhir.jpa.provider.r4;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
|
||||||
import ca.uhn.fhir.model.primitive.IdDt;
|
import ca.uhn.fhir.model.primitive.IdDt;
|
||||||
import ca.uhn.fhir.rest.api.Constants;
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||||
|
@ -424,6 +425,134 @@ public class AuthorizationInterceptorResourceProviderR4Test extends BaseResource
|
||||||
assertEquals(id.getValue(), patient.getId());
|
assertEquals(id.getValue(), patient.getId());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDeleteCascadeBlocked() {
|
||||||
|
CascadingDeleteInterceptor cascadingDeleteInterceptor = new CascadingDeleteInterceptor(myDaoRegistry, myInterceptorRegistry);
|
||||||
|
ourRestServer.getInterceptorService().registerInterceptor(cascadingDeleteInterceptor);
|
||||||
|
try {
|
||||||
|
|
||||||
|
// Create Patient, and Observation that refers to it
|
||||||
|
Patient patient = new Patient();
|
||||||
|
patient.addIdentifier().setSystem("http://uhn.ca/mrns").setValue("100");
|
||||||
|
patient.addName().setFamily("Tester").addGiven("Raghad");
|
||||||
|
final IIdType patientId = ourClient.create().resource(patient).execute().getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
|
Observation obs = new Observation();
|
||||||
|
obs.setStatus(ObservationStatus.FINAL);
|
||||||
|
obs.getSubject().setReferenceElement(patientId);
|
||||||
|
ourClient.create().resource(obs).execute();
|
||||||
|
|
||||||
|
// Allow any deletes, but don't allow cascade
|
||||||
|
ourRestServer.registerInterceptor(new AuthorizationInterceptor(PolicyEnum.DENY) {
|
||||||
|
@Override
|
||||||
|
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
|
||||||
|
return new RuleBuilder()
|
||||||
|
.allow().delete().allResources().withAnyId().andThen()
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
ourClient
|
||||||
|
.delete()
|
||||||
|
.resourceById(patientId)
|
||||||
|
.withAdditionalHeader(Constants.HEADER_CASCADE_DELETE, "true")
|
||||||
|
.execute();
|
||||||
|
fail();
|
||||||
|
} catch (ForbiddenOperationException e) {
|
||||||
|
// good
|
||||||
|
}
|
||||||
|
|
||||||
|
} finally {
|
||||||
|
ourRestServer.getInterceptorService().unregisterInterceptor(cascadingDeleteInterceptor);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDeleteCascadeAllowed() {
|
||||||
|
CascadingDeleteInterceptor cascadingDeleteInterceptor = new CascadingDeleteInterceptor(myDaoRegistry, myInterceptorRegistry);
|
||||||
|
ourRestServer.getInterceptorService().registerInterceptor(cascadingDeleteInterceptor);
|
||||||
|
try {
|
||||||
|
|
||||||
|
// Create Patient, and Observation that refers to it
|
||||||
|
Patient patient = new Patient();
|
||||||
|
patient.addIdentifier().setSystem("http://uhn.ca/mrns").setValue("100");
|
||||||
|
patient.addName().setFamily("Tester").addGiven("Raghad");
|
||||||
|
final IIdType patientId = ourClient.create().resource(patient).execute().getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
|
Observation obs = new Observation();
|
||||||
|
obs.setStatus(ObservationStatus.FINAL);
|
||||||
|
obs.getSubject().setReferenceElement(patientId);
|
||||||
|
ourClient.create().resource(obs).execute();
|
||||||
|
|
||||||
|
// Allow any deletes, but don't allow cascade
|
||||||
|
ourRestServer.registerInterceptor(new AuthorizationInterceptor(PolicyEnum.DENY) {
|
||||||
|
@Override
|
||||||
|
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
|
||||||
|
return new RuleBuilder()
|
||||||
|
.allow().delete().allResources().withAnyId().andThen()
|
||||||
|
.allow().delete().onCascade().allResources().withAnyId().andThen()
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
ourClient
|
||||||
|
.delete()
|
||||||
|
.resourceById(patientId)
|
||||||
|
.withAdditionalHeader(Constants.HEADER_CASCADE_DELETE, "true")
|
||||||
|
.execute();
|
||||||
|
|
||||||
|
} finally {
|
||||||
|
ourRestServer.getInterceptorService().unregisterInterceptor(cascadingDeleteInterceptor);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDeleteCascadeAllowed_ButNotOnTargetType() {
|
||||||
|
CascadingDeleteInterceptor cascadingDeleteInterceptor = new CascadingDeleteInterceptor(myDaoRegistry, myInterceptorRegistry);
|
||||||
|
ourRestServer.getInterceptorService().registerInterceptor(cascadingDeleteInterceptor);
|
||||||
|
try {
|
||||||
|
|
||||||
|
// Create Patient, and Observation that refers to it
|
||||||
|
Patient patient = new Patient();
|
||||||
|
patient.addIdentifier().setSystem("http://uhn.ca/mrns").setValue("100");
|
||||||
|
patient.addName().setFamily("Tester").addGiven("Raghad");
|
||||||
|
final IIdType patientId = ourClient.create().resource(patient).execute().getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
|
Observation obs = new Observation();
|
||||||
|
obs.setStatus(ObservationStatus.FINAL);
|
||||||
|
obs.getSubject().setReferenceElement(patientId);
|
||||||
|
ourClient.create().resource(obs).execute();
|
||||||
|
|
||||||
|
// Allow any deletes, but don't allow cascade
|
||||||
|
ourRestServer.registerInterceptor(new AuthorizationInterceptor(PolicyEnum.DENY) {
|
||||||
|
@Override
|
||||||
|
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
|
||||||
|
return new RuleBuilder()
|
||||||
|
.allow().delete().resourcesOfType(Patient.class).withAnyId().andThen()
|
||||||
|
.allow().delete().resourcesOfType(Observation.class).withAnyId().andThen()
|
||||||
|
.allow().delete().onCascade().resourcesOfType(Patient.class).withAnyId().andThen()
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
ourClient
|
||||||
|
.delete()
|
||||||
|
.resourceById(patientId)
|
||||||
|
.withAdditionalHeader(Constants.HEADER_CASCADE_DELETE, "true")
|
||||||
|
.execute();
|
||||||
|
fail();
|
||||||
|
} catch (ForbiddenOperationException e) {
|
||||||
|
// good
|
||||||
|
}
|
||||||
|
|
||||||
|
} finally {
|
||||||
|
ourRestServer.getInterceptorService().unregisterInterceptor(cascadingDeleteInterceptor);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDeleteResourceConditional() throws IOException {
|
public void testDeleteResourceConditional() throws IOException {
|
||||||
String methodName = "testDeleteResourceConditional";
|
String methodName = "testDeleteResourceConditional";
|
||||||
|
|
|
@ -1,8 +1,11 @@
|
||||||
package ca.uhn.fhir.jpa.provider.r4;
|
package ca.uhn.fhir.jpa.provider.r4;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||||
import ca.uhn.fhir.jpa.config.WebsocketDispatcherConfig;
|
import ca.uhn.fhir.jpa.config.WebsocketDispatcherConfig;
|
||||||
|
import ca.uhn.fhir.jpa.dao.DaoRegistry;
|
||||||
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
||||||
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
|
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
|
||||||
|
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
|
||||||
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
|
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
|
||||||
import ca.uhn.fhir.jpa.search.ISearchCoordinatorSvc;
|
import ca.uhn.fhir.jpa.search.ISearchCoordinatorSvc;
|
||||||
import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryR4;
|
import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryR4;
|
||||||
|
@ -75,6 +78,8 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test {
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
protected SubscriptionLoader mySubscriptionLoader;
|
protected SubscriptionLoader mySubscriptionLoader;
|
||||||
|
@Autowired
|
||||||
|
protected DaoRegistry myDaoRegistry;
|
||||||
|
|
||||||
public BaseResourceProviderR4Test() {
|
public BaseResourceProviderR4Test() {
|
||||||
super();
|
super();
|
||||||
|
@ -101,6 +106,7 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test {
|
||||||
|
|
||||||
myTerminologyUploaderProvider = myAppCtx.getBean(TerminologyUploaderProviderR4.class);
|
myTerminologyUploaderProvider = myAppCtx.getBean(TerminologyUploaderProviderR4.class);
|
||||||
ourGraphQLProvider = myAppCtx.getBean("myGraphQLProvider");
|
ourGraphQLProvider = myAppCtx.getBean("myGraphQLProvider");
|
||||||
|
myDaoRegistry = myAppCtx.getBean(DaoRegistry.class);
|
||||||
|
|
||||||
ourRestServer.registerProviders(mySystemProvider, myTerminologyUploaderProvider, ourGraphQLProvider);
|
ourRestServer.registerProviders(mySystemProvider, myTerminologyUploaderProvider, ourGraphQLProvider);
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,133 @@
|
||||||
|
package ca.uhn.fhir.jpa.provider.r4;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||||
|
import ca.uhn.fhir.jpa.dao.DaoRegistry;
|
||||||
|
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
|
||||||
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
|
||||||
|
import ca.uhn.fhir.util.TestUtil;
|
||||||
|
import com.google.common.base.Charsets;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||||
|
import org.apache.http.client.methods.HttpDelete;
|
||||||
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
|
import org.hl7.fhir.r4.model.DiagnosticReport;
|
||||||
|
import org.hl7.fhir.r4.model.Observation;
|
||||||
|
import org.hl7.fhir.r4.model.Patient;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.AfterClass;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import static org.hamcrest.CoreMatchers.containsString;
|
||||||
|
import static org.junit.Assert.*;
|
||||||
|
|
||||||
|
public class CascadingDeleteInterceptorR4Test extends BaseResourceProviderR4Test {
|
||||||
|
|
||||||
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(CascadingDeleteInterceptorR4Test.class);
|
||||||
|
private IIdType myDiagnosticReportId;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private DaoRegistry myDaoRegistry;
|
||||||
|
@Autowired
|
||||||
|
private IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||||
|
|
||||||
|
private IIdType myPatientId;
|
||||||
|
private CascadingDeleteInterceptor myDeleteInterceptor;
|
||||||
|
private IIdType myObservationId;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
@Before
|
||||||
|
public void before() throws Exception {
|
||||||
|
super.before();
|
||||||
|
|
||||||
|
myDeleteInterceptor = new CascadingDeleteInterceptor(myDaoRegistry, myInterceptorBroadcaster);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
@After
|
||||||
|
public void after() throws Exception {
|
||||||
|
super.after();
|
||||||
|
ourRestServer.getInterceptorService().unregisterInterceptor(myDeleteInterceptor);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void createResources() {
|
||||||
|
Patient p = new Patient();
|
||||||
|
p.setActive(true);
|
||||||
|
myPatientId = ourClient.create().resource(p).execute().getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
|
Observation o = new Observation();
|
||||||
|
o.setStatus(Observation.ObservationStatus.FINAL);
|
||||||
|
o.getSubject().setReference(myPatientId.getValue());
|
||||||
|
myObservationId = ourClient.create().resource(o).execute().getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
|
DiagnosticReport dr = new DiagnosticReport();
|
||||||
|
dr.setStatus(DiagnosticReport.DiagnosticReportStatus.FINAL);
|
||||||
|
dr.addResult().setReference(myObservationId.getValue());
|
||||||
|
myDiagnosticReportId = ourClient.create().resource(dr).execute().getId().toUnqualifiedVersionless();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDeleteWithNoInterceptorAndConstraints() {
|
||||||
|
createResources();
|
||||||
|
|
||||||
|
try {
|
||||||
|
ourClient.delete().resourceById(myPatientId).execute();
|
||||||
|
fail();
|
||||||
|
} catch (ResourceVersionConflictException e) {
|
||||||
|
// good
|
||||||
|
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(e.getOperationOutcome()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDeleteWithInterceptorAndConstraints() {
|
||||||
|
createResources();
|
||||||
|
|
||||||
|
ourRestServer.getInterceptorService().registerInterceptor(myDeleteInterceptor);
|
||||||
|
|
||||||
|
try {
|
||||||
|
ourClient.delete().resourceById(myPatientId).execute();
|
||||||
|
fail();
|
||||||
|
} catch (ResourceVersionConflictException e) {
|
||||||
|
String output = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(e.getOperationOutcome());
|
||||||
|
ourLog.info(output);
|
||||||
|
assertThat(output, containsString("Note that cascading deletes are not active for this request. You can enable cascading deletes"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDeleteCascading() throws IOException {
|
||||||
|
createResources();
|
||||||
|
|
||||||
|
ourRestServer.getInterceptorService().registerInterceptor(myDeleteInterceptor);
|
||||||
|
|
||||||
|
HttpDelete delete = new HttpDelete(ourServerBase + "/" + myPatientId.getValue() + "?_cascade=true&_pretty=true");
|
||||||
|
delete.addHeader(Constants.HEADER_ACCEPT, Constants.CT_FHIR_JSON_NEW);
|
||||||
|
try (CloseableHttpResponse response = ourHttpClient.execute(delete)) {
|
||||||
|
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||||
|
String deleteResponse = IOUtils.toString(response.getEntity().getContent(), Charsets.UTF_8);
|
||||||
|
ourLog.info("Response: {}", deleteResponse);
|
||||||
|
assertThat(deleteResponse, containsString("Cascaded delete to 2 resources: [" + myDiagnosticReportId + "/_history/1, " + myObservationId + "/_history/1]"));
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
ourLog.info("Reading {}", myPatientId);
|
||||||
|
ourClient.read().resource(Patient.class).withId(myPatientId).execute();
|
||||||
|
fail();
|
||||||
|
} catch (ResourceGoneException e) {
|
||||||
|
// good
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterClass
|
||||||
|
public static void afterClassClearContext() {
|
||||||
|
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -4,7 +4,7 @@ import ca.uhn.fhir.jpa.config.TestR4Config;
|
||||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
import ca.uhn.fhir.jpa.provider.r4.BaseResourceProviderR4Test;
|
import ca.uhn.fhir.jpa.provider.r4.BaseResourceProviderR4Test;
|
||||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
import ca.uhn.fhir.jpa.subscription.module.config.UnregisterScheduledProcessor;
|
import ca.uhn.fhir.test.utilities.UnregisterScheduledProcessor;
|
||||||
import ca.uhn.fhir.rest.api.Constants;
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||||
import ca.uhn.fhir.rest.param.TokenOrListParam;
|
import ca.uhn.fhir.rest.param.TokenOrListParam;
|
||||||
|
|
|
@ -5,6 +5,7 @@ import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||||
import ca.uhn.fhir.jpa.subscription.module.matcher.ISubscriptionMatcher;
|
import ca.uhn.fhir.jpa.subscription.module.matcher.ISubscriptionMatcher;
|
||||||
import ca.uhn.fhir.jpa.subscription.module.matcher.InMemorySubscriptionMatcher;
|
import ca.uhn.fhir.jpa.subscription.module.matcher.InMemorySubscriptionMatcher;
|
||||||
import ca.uhn.fhir.rest.client.api.IGenericClient;
|
import ca.uhn.fhir.rest.client.api.IGenericClient;
|
||||||
|
import ca.uhn.fhir.test.utilities.UnregisterScheduledProcessor;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
import org.springframework.context.annotation.Bean;
|
import org.springframework.context.annotation.Bean;
|
||||||
import org.springframework.context.annotation.Configuration;
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
|
|
@ -1,9 +1,12 @@
|
||||||
package ca.uhn.fhirtest;
|
package ca.uhn.fhirtest;
|
||||||
|
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||||
import ca.uhn.fhir.jpa.config.WebsocketDispatcherConfig;
|
import ca.uhn.fhir.jpa.config.WebsocketDispatcherConfig;
|
||||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
|
import ca.uhn.fhir.jpa.dao.DaoRegistry;
|
||||||
import ca.uhn.fhir.jpa.dao.IFhirSystemDao;
|
import ca.uhn.fhir.jpa.dao.IFhirSystemDao;
|
||||||
|
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
|
||||||
import ca.uhn.fhir.jpa.provider.JpaConformanceProviderDstu2;
|
import ca.uhn.fhir.jpa.provider.JpaConformanceProviderDstu2;
|
||||||
import ca.uhn.fhir.jpa.provider.JpaSystemProviderDstu2;
|
import ca.uhn.fhir.jpa.provider.JpaSystemProviderDstu2;
|
||||||
import ca.uhn.fhir.jpa.provider.dstu3.JpaConformanceProviderDstu3;
|
import ca.uhn.fhir.jpa.provider.dstu3.JpaConformanceProviderDstu3;
|
||||||
|
@ -218,6 +221,15 @@ public class TestRestfulServer extends RestfulServer {
|
||||||
*/
|
*/
|
||||||
SubscriptionInterceptorLoader subscriptionInterceptorLoader = myAppCtx.getBean(SubscriptionInterceptorLoader.class);
|
SubscriptionInterceptorLoader subscriptionInterceptorLoader = myAppCtx.getBean(SubscriptionInterceptorLoader.class);
|
||||||
subscriptionInterceptorLoader.registerInterceptors();
|
subscriptionInterceptorLoader.registerInterceptors();
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Cascading deletes
|
||||||
|
*/
|
||||||
|
DaoRegistry daoRegistry = myAppCtx.getBean(DaoRegistry.class);
|
||||||
|
IInterceptorBroadcaster interceptorBroadcaster = myAppCtx.getBean(IInterceptorBroadcaster.class);
|
||||||
|
CascadingDeleteInterceptor cascadingDeleteInterceptor = new CascadingDeleteInterceptor(daoRegistry, interceptorBroadcaster);
|
||||||
|
registerInterceptor(cascadingDeleteInterceptor);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -34,6 +34,7 @@ import javax.servlet.http.HttpServletResponse;
|
||||||
|
|
||||||
import ca.uhn.fhir.parser.DataFormatException;
|
import ca.uhn.fhir.parser.DataFormatException;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
|
import ca.uhn.fhir.rest.server.method.BaseResourceReturningMethodBinding;
|
||||||
import org.apache.commons.lang3.exception.ExceptionUtils;
|
import org.apache.commons.lang3.exception.ExceptionUtils;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
|
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
|
||||||
|
|
||||||
|
@ -94,6 +95,7 @@ public class ExceptionHandlingInterceptor extends InterceptorAdapter {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
BaseResourceReturningMethodBinding.callOutgoingFailureOperationOutcomeHook(theRequestDetails, oo);
|
||||||
return response.streamResponseAsResource(oo, true, Collections.singleton(SummaryEnum.FALSE), statusCode, statusMessage, false, false);
|
return response.streamResponseAsResource(oo, true, Collections.singleton(SummaryEnum.FALSE), statusCode, statusMessage, false, false);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,15 +16,16 @@ import ca.uhn.fhir.rest.server.RestfulServerUtils.ResponseEncoding;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.AuthenticationException;
|
import ca.uhn.fhir.rest.server.exceptions.AuthenticationException;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
|
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||||
|
import ca.uhn.fhir.rest.server.method.BaseResourceReturningMethodBinding;
|
||||||
import ca.uhn.fhir.util.StopWatch;
|
import ca.uhn.fhir.util.StopWatch;
|
||||||
import ca.uhn.fhir.util.UrlUtil;
|
import ca.uhn.fhir.util.UrlUtil;
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.commons.text.StringEscapeUtils;
|
import org.apache.commons.text.StringEscapeUtils;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseBinary;
|
import org.hl7.fhir.instance.model.api.IBaseBinary;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
|
||||||
import javax.servlet.ServletException;
|
|
||||||
import javax.servlet.ServletRequest;
|
import javax.servlet.ServletRequest;
|
||||||
import javax.servlet.http.HttpServletRequest;
|
import javax.servlet.http.HttpServletRequest;
|
||||||
import javax.servlet.http.HttpServletResponse;
|
import javax.servlet.http.HttpServletResponse;
|
||||||
|
@ -235,8 +236,7 @@ public class ResponseHighlighterInterceptor {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Hook(value = Pointcut.SERVER_HANDLE_EXCEPTION, order = InterceptorOrders.RESPONSE_HIGHLIGHTER_INTERCEPTOR)
|
@Hook(value = Pointcut.SERVER_HANDLE_EXCEPTION, order = InterceptorOrders.RESPONSE_HIGHLIGHTER_INTERCEPTOR)
|
||||||
public boolean handleException(RequestDetails theRequestDetails, BaseServerResponseException theException, HttpServletRequest theServletRequest, HttpServletResponse theServletResponse)
|
public boolean handleException(RequestDetails theRequestDetails, BaseServerResponseException theException, HttpServletRequest theServletRequest, HttpServletResponse theServletResponse) {
|
||||||
throws ServletException, IOException {
|
|
||||||
/*
|
/*
|
||||||
* It's not a browser...
|
* It's not a browser...
|
||||||
*/
|
*/
|
||||||
|
@ -260,11 +260,17 @@ public class ResponseHighlighterInterceptor {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (theException.getOperationOutcome() == null) {
|
IBaseOperationOutcome oo = theException.getOperationOutcome();
|
||||||
|
if (oo == null) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
streamResponse(theRequestDetails, theServletResponse, theException.getOperationOutcome(), theServletRequest, theException.getStatusCode());
|
ResponseDetails responseDetails = new ResponseDetails();
|
||||||
|
responseDetails.setResponseResource(oo);
|
||||||
|
responseDetails.setResponseCode(theException.getStatusCode());
|
||||||
|
|
||||||
|
BaseResourceReturningMethodBinding.callOutgoingFailureOperationOutcomeHook(theRequestDetails, oo);
|
||||||
|
streamResponse(theRequestDetails, theServletResponse, responseDetails.getResponseResource(), theServletRequest, responseDetails.getResponseCode());
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
|
@ -86,8 +86,8 @@ public class AuthorizationInterceptor implements IRuleApplier {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void applyRulesAndFailIfDeny(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId,
|
private void applyRulesAndFailIfDeny(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId,
|
||||||
IBaseResource theOutputResource) {
|
IBaseResource theOutputResource, Pointcut thePointcut) {
|
||||||
Verdict decision = applyRulesAndReturnDecision(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource);
|
Verdict decision = applyRulesAndReturnDecision(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, thePointcut);
|
||||||
|
|
||||||
if (decision.getDecision() == PolicyEnum.ALLOW) {
|
if (decision.getDecision() == PolicyEnum.ALLOW) {
|
||||||
return;
|
return;
|
||||||
|
@ -98,7 +98,7 @@ public class AuthorizationInterceptor implements IRuleApplier {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Verdict applyRulesAndReturnDecision(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId,
|
public Verdict applyRulesAndReturnDecision(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId,
|
||||||
IBaseResource theOutputResource) {
|
IBaseResource theOutputResource, Pointcut thePointcut) {
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
List<IAuthRule> rules = (List<IAuthRule>) theRequestDetails.getUserData().get(myRequestRuleListKey);
|
List<IAuthRule> rules = (List<IAuthRule>) theRequestDetails.getUserData().get(myRequestRuleListKey);
|
||||||
if (rules == null) {
|
if (rules == null) {
|
||||||
|
@ -110,7 +110,7 @@ public class AuthorizationInterceptor implements IRuleApplier {
|
||||||
|
|
||||||
Verdict verdict = null;
|
Verdict verdict = null;
|
||||||
for (IAuthRule nextRule : rules) {
|
for (IAuthRule nextRule : rules) {
|
||||||
verdict = nextRule.applyRule(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, this, flags);
|
verdict = nextRule.applyRule(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, this, flags, thePointcut);
|
||||||
if (verdict != null) {
|
if (verdict != null) {
|
||||||
ourLog.trace("Rule {} returned decision {}", nextRule, verdict.getDecision());
|
ourLog.trace("Rule {} returned decision {}", nextRule, verdict.getDecision());
|
||||||
break;
|
break;
|
||||||
|
@ -285,12 +285,12 @@ public class AuthorizationInterceptor implements IRuleApplier {
|
||||||
throw new ForbiddenOperationException("Access denied by default policy (no applicable rules)");
|
throw new ForbiddenOperationException("Access denied by default policy (no applicable rules)");
|
||||||
}
|
}
|
||||||
|
|
||||||
private void handleUserOperation(RequestDetails theRequest, IBaseResource theResource, RestOperationTypeEnum operation) {
|
private void handleUserOperation(RequestDetails theRequest, IBaseResource theResource, RestOperationTypeEnum theOperation, Pointcut thePointcut) {
|
||||||
applyRulesAndFailIfDeny(operation, theRequest, theResource, theResource.getIdElement(), null);
|
applyRulesAndFailIfDeny(theOperation, theRequest, theResource, theResource.getIdElement(), null, thePointcut);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Hook(Pointcut.SERVER_INCOMING_REQUEST_PRE_HANDLED)
|
@Hook(Pointcut.SERVER_INCOMING_REQUEST_PRE_HANDLED)
|
||||||
public void incomingRequestPreHandled(RequestDetails theRequest) {
|
public void incomingRequestPreHandled(RequestDetails theRequest, Pointcut thePointcut) {
|
||||||
IBaseResource inputResource = null;
|
IBaseResource inputResource = null;
|
||||||
IIdType inputResourceId = null;
|
IIdType inputResourceId = null;
|
||||||
|
|
||||||
|
@ -308,23 +308,33 @@ public class AuthorizationInterceptor implements IRuleApplier {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
applyRulesAndFailIfDeny(theRequest.getRestOperationType(), theRequest, inputResource, inputResourceId, null);
|
applyRulesAndFailIfDeny(theRequest.getRestOperationType(), theRequest, inputResource, inputResourceId, null, thePointcut);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Hook(Pointcut.STORAGE_PRESHOW_RESOURCES)
|
@Hook(Pointcut.STORAGE_PRESHOW_RESOURCES)
|
||||||
public void hookPreShow(RequestDetails theRequestDetails, IPreResourceShowDetails theDetails) {
|
public void hookPreShow(RequestDetails theRequestDetails, IPreResourceShowDetails theDetails, Pointcut thePointcut) {
|
||||||
for (int i = 0; i < theDetails.size(); i++) {
|
for (int i = 0; i < theDetails.size(); i++) {
|
||||||
IBaseResource next = theDetails.getResource(i);
|
IBaseResource next = theDetails.getResource(i);
|
||||||
checkOutgoingResourceAndFailIfDeny(theRequestDetails, next);
|
checkOutgoingResourceAndFailIfDeny(theRequestDetails, next, thePointcut);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Hook(Pointcut.SERVER_OUTGOING_RESPONSE)
|
@Hook(Pointcut.SERVER_OUTGOING_RESPONSE)
|
||||||
public void hookOutgoingResponse(RequestDetails theRequestDetails, IBaseResource theResponseObject) {
|
public void hookOutgoingResponse(RequestDetails theRequestDetails, IBaseResource theResponseObject, Pointcut thePointcut) {
|
||||||
checkOutgoingResourceAndFailIfDeny(theRequestDetails, theResponseObject);
|
checkOutgoingResourceAndFailIfDeny(theRequestDetails, theResponseObject, thePointcut);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void checkOutgoingResourceAndFailIfDeny(RequestDetails theRequestDetails, IBaseResource theResponseObject) {
|
@Hook(Pointcut.STORAGE_CASCADE_DELETE)
|
||||||
|
public void hookCascadeDeleteForConflict(RequestDetails theRequestDetails, Pointcut thePointcut, IBaseResource theResourceToDelete) {
|
||||||
|
Validate.notNull(theResourceToDelete); // just in case
|
||||||
|
checkPointcutAndFailIfDeny(theRequestDetails, thePointcut, theResourceToDelete);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void checkPointcutAndFailIfDeny(RequestDetails theRequestDetails, Pointcut thePointcut, IBaseResource theInputResource) {
|
||||||
|
applyRulesAndFailIfDeny(theRequestDetails.getRestOperationType(), theRequestDetails, theInputResource, null, null, thePointcut);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void checkOutgoingResourceAndFailIfDeny(RequestDetails theRequestDetails, IBaseResource theResponseObject, Pointcut thePointcut) {
|
||||||
switch (determineOperationDirection(theRequestDetails.getRestOperationType(), null)) {
|
switch (determineOperationDirection(theRequestDetails.getRestOperationType(), null)) {
|
||||||
case IN:
|
case IN:
|
||||||
case NONE:
|
case NONE:
|
||||||
|
@ -343,6 +353,7 @@ public class AuthorizationInterceptor implements IRuleApplier {
|
||||||
FhirContext fhirContext = theRequestDetails.getServer().getFhirContext();
|
FhirContext fhirContext = theRequestDetails.getServer().getFhirContext();
|
||||||
List<IBaseResource> resources = Collections.emptyList();
|
List<IBaseResource> resources = Collections.emptyList();
|
||||||
|
|
||||||
|
//noinspection EnumSwitchStatementWhichMissesCases
|
||||||
switch (theRequestDetails.getRestOperationType()) {
|
switch (theRequestDetails.getRestOperationType()) {
|
||||||
case SEARCH_SYSTEM:
|
case SEARCH_SYSTEM:
|
||||||
case SEARCH_TYPE:
|
case SEARCH_TYPE:
|
||||||
|
@ -368,26 +379,26 @@ public class AuthorizationInterceptor implements IRuleApplier {
|
||||||
}
|
}
|
||||||
|
|
||||||
for (IBaseResource nextResponse : resources) {
|
for (IBaseResource nextResponse : resources) {
|
||||||
applyRulesAndFailIfDeny(theRequestDetails.getRestOperationType(), theRequestDetails, null, null, nextResponse);
|
applyRulesAndFailIfDeny(theRequestDetails.getRestOperationType(), theRequestDetails, null, null, nextResponse, thePointcut);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED)
|
@Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED)
|
||||||
public void hookResourcePreCreate(RequestDetails theRequest, IBaseResource theResource) {
|
public void hookResourcePreCreate(RequestDetails theRequest, IBaseResource theResource, Pointcut thePointcut) {
|
||||||
handleUserOperation(theRequest, theResource, RestOperationTypeEnum.CREATE);
|
handleUserOperation(theRequest, theResource, RestOperationTypeEnum.CREATE, thePointcut);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_DELETED)
|
@Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_DELETED)
|
||||||
public void hookResourcePreDelete(RequestDetails theRequest, IBaseResource theResource) {
|
public void hookResourcePreDelete(RequestDetails theRequest, IBaseResource theResource, Pointcut thePointcut) {
|
||||||
handleUserOperation(theRequest, theResource, RestOperationTypeEnum.DELETE);
|
handleUserOperation(theRequest, theResource, RestOperationTypeEnum.DELETE, thePointcut);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED)
|
@Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED)
|
||||||
public void hookResourcePreUpdate(RequestDetails theRequest, IBaseResource theOldResource, IBaseResource theNewResource) {
|
public void hookResourcePreUpdate(RequestDetails theRequest, IBaseResource theOldResource, IBaseResource theNewResource, Pointcut thePointcut) {
|
||||||
if (theOldResource != null) {
|
if (theOldResource != null) {
|
||||||
handleUserOperation(theRequest, theOldResource, RestOperationTypeEnum.UPDATE);
|
handleUserOperation(theRequest, theOldResource, RestOperationTypeEnum.UPDATE, thePointcut);
|
||||||
}
|
}
|
||||||
handleUserOperation(theRequest, theNewResource, RestOperationTypeEnum.UPDATE);
|
handleUserOperation(theRequest, theNewResource, RestOperationTypeEnum.UPDATE, thePointcut);
|
||||||
}
|
}
|
||||||
|
|
||||||
private enum OperationExamineDirection {
|
private enum OperationExamineDirection {
|
||||||
|
@ -409,7 +420,7 @@ public class AuthorizationInterceptor implements IRuleApplier {
|
||||||
myDecidingRule = theDecidingRule;
|
myDecidingRule = theDecidingRule;
|
||||||
}
|
}
|
||||||
|
|
||||||
public IAuthRule getDecidingRule() {
|
IAuthRule getDecidingRule() {
|
||||||
return myDecidingRule;
|
return myDecidingRule;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -433,10 +444,6 @@ public class AuthorizationInterceptor implements IRuleApplier {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static UnsupportedOperationException failForDstu1() {
|
|
||||||
return new UnsupportedOperationException("Use of this interceptor on DSTU1 servers is not supportd");
|
|
||||||
}
|
|
||||||
|
|
||||||
static List<IBaseResource> toListOfResourcesAndExcludeContainer(IBaseResource theResponseObject, FhirContext fhirContext) {
|
static List<IBaseResource> toListOfResourcesAndExcludeContainer(IBaseResource theResponseObject, FhirContext fhirContext) {
|
||||||
if (theResponseObject == null) {
|
if (theResponseObject == null) {
|
||||||
return Collections.emptyList();
|
return Collections.emptyList();
|
||||||
|
|
|
@ -20,12 +20,12 @@ package ca.uhn.fhir.rest.server.interceptor.auth;
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
|
||||||
|
|
||||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor.Verdict;
|
import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor.Verdict;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
|
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
|
@ -41,22 +41,18 @@ public interface IAuthRule {
|
||||||
/**
|
/**
|
||||||
* Applies the rule and returns a policy decision, or <code>null</code> if the rule does not apply
|
* Applies the rule and returns a policy decision, or <code>null</code> if the rule does not apply
|
||||||
*
|
*
|
||||||
* @param theOperation
|
* @param theOperation The operation type
|
||||||
* The operation type
|
* @param theRequestDetails The request
|
||||||
* @param theRequestDetails
|
* @param theInputResource The resource being input by the client, or <code>null</code>
|
||||||
* The request
|
|
||||||
* @param theInputResource
|
|
||||||
* The resource being input by the client, or <code>null</code>
|
|
||||||
* @param theInputResourceId TODO
|
* @param theInputResourceId TODO
|
||||||
* @param theOutputResource
|
* @param theOutputResource The resource being returned by the server, or <code>null</code>
|
||||||
* The resource being returned by the server, or <code>null</code>
|
* @param theRuleApplier The rule applying module (this can be used by rules to apply the rule set to
|
||||||
* @param theRuleApplier
|
|
||||||
* The rule applying module (this can be used by rules to apply the rule set to
|
|
||||||
* nested objects in the request, such as nested requests in a transaction)
|
* nested objects in the request, such as nested requests in a transaction)
|
||||||
* @param theFlags
|
* @param theFlags The flags configured in the authorization interceptor
|
||||||
|
* @param thePointcut The pointcut hook that triggered this call
|
||||||
* @return Returns a policy decision, or <code>null</code> if the rule does not apply
|
* @return Returns a policy decision, or <code>null</code> if the rule does not apply
|
||||||
*/
|
*/
|
||||||
Verdict applyRule(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, IRuleApplier theRuleApplier, Set<AuthorizationFlagsEnum> theFlags);
|
Verdict applyRule(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, IRuleApplier theRuleApplier, Set<AuthorizationFlagsEnum> theFlags, Pointcut thePointcut);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns a name for this rule, to be used in logs and error messages
|
* Returns a name for this rule, to be used in logs and error messages
|
||||||
|
|
|
@ -39,7 +39,7 @@ public interface IAuthRuleBuilderRule {
|
||||||
/**
|
/**
|
||||||
* This rule applies to the FHIR delete operation
|
* This rule applies to the FHIR delete operation
|
||||||
*/
|
*/
|
||||||
IAuthRuleBuilderRuleOp delete();
|
IAuthRuleBuilderRuleOpDelete delete();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This rule applies to <code>create</code> operations with a <code>conditional</code>
|
* This rule applies to <code>create</code> operations with a <code>conditional</code>
|
||||||
|
|
|
@ -0,0 +1,33 @@
|
||||||
|
package ca.uhn.fhir.rest.server.interceptor.auth;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR - Server Framework
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2019 University Health Network
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
|
public interface IAuthRuleBuilderRuleOpDelete extends IAuthRuleBuilderRuleOp {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specifies that this rule applies to cascading deletes as opposed to regular
|
||||||
|
* deletes. Note that if you want to allow cascading deletes, you will typically
|
||||||
|
* require at least two separate rules: one for the original source resource, and
|
||||||
|
* one for the cascade.
|
||||||
|
*/
|
||||||
|
IAuthRuleBuilderRuleOp onCascade();
|
||||||
|
|
||||||
|
}
|
|
@ -20,6 +20,7 @@ package ca.uhn.fhir.rest.server.interceptor.auth;
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
|
|
||||||
|
@ -29,6 +30,6 @@ import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor.Verdict
|
||||||
|
|
||||||
public interface IRuleApplier {
|
public interface IRuleApplier {
|
||||||
|
|
||||||
Verdict applyRulesAndReturnDecision(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource);
|
Verdict applyRulesAndReturnDecision(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, Pointcut thePointcut);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,6 +21,7 @@ package ca.uhn.fhir.rest.server.interceptor.auth;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor.Verdict;
|
import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor.Verdict;
|
||||||
|
@ -80,7 +81,7 @@ class OperationRule extends BaseRule implements IAuthRule {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Verdict applyRule(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, IRuleApplier theRuleApplier, Set<AuthorizationFlagsEnum> theFlags) {
|
public Verdict applyRule(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, IRuleApplier theRuleApplier, Set<AuthorizationFlagsEnum> theFlags, Pointcut thePointcut) {
|
||||||
FhirContext ctx = theRequestDetails.getServer().getFhirContext();
|
FhirContext ctx = theRequestDetails.getServer().getFhirContext();
|
||||||
|
|
||||||
if (isOtherTenant(theRequestDetails)) {
|
if (isOtherTenant(theRequestDetails)) {
|
||||||
|
|
|
@ -195,7 +195,7 @@ public class RuleBuilder implements IAuthRuleBuilder {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public IAuthRuleBuilderRuleOp delete() {
|
public IAuthRuleBuilderRuleOpDelete delete() {
|
||||||
return new RuleBuilderRuleOp(RuleOpEnum.DELETE);
|
return new RuleBuilderRuleOp(RuleOpEnum.DELETE);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -300,10 +300,11 @@ public class RuleBuilder implements IAuthRuleBuilder {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private class RuleBuilderRuleOp implements IAuthRuleBuilderRuleOp {
|
private class RuleBuilderRuleOp implements IAuthRuleBuilderRuleOp, IAuthRuleBuilderRuleOpDelete {
|
||||||
|
|
||||||
private final RuleOpEnum myRuleOp;
|
private final RuleOpEnum myRuleOp;
|
||||||
private RuleBuilderRuleOpClassifier myInstancesBuilder;
|
private RuleBuilderRuleOpClassifier myInstancesBuilder;
|
||||||
|
private boolean myOnCascade;
|
||||||
|
|
||||||
public RuleBuilderRuleOp(RuleOpEnum theRuleOp) {
|
public RuleBuilderRuleOp(RuleOpEnum theRuleOp) {
|
||||||
myRuleOp = theRuleOp;
|
myRuleOp = theRuleOp;
|
||||||
|
@ -350,6 +351,12 @@ public class RuleBuilder implements IAuthRuleBuilder {
|
||||||
return new RuleBuilderRuleOpClassifier(AppliesTypeEnum.TYPES, Collections.singleton(theType));
|
return new RuleBuilderRuleOpClassifier(AppliesTypeEnum.TYPES, Collections.singleton(theType));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public IAuthRuleBuilderRuleOp onCascade() {
|
||||||
|
myOnCascade = true;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
private class RuleBuilderRuleOpClassifier implements IAuthRuleBuilderRuleOpClassifier {
|
private class RuleBuilderRuleOpClassifier implements IAuthRuleBuilderRuleOpClassifier {
|
||||||
|
|
||||||
private final AppliesTypeEnum myAppliesTo;
|
private final AppliesTypeEnum myAppliesTo;
|
||||||
|
@ -389,6 +396,7 @@ public class RuleBuilder implements IAuthRuleBuilder {
|
||||||
myRule.setClassifierType(myClassifierType);
|
myRule.setClassifierType(myClassifierType);
|
||||||
myRule.setClassifierCompartmentName(myInCompartmentName);
|
myRule.setClassifierCompartmentName(myInCompartmentName);
|
||||||
myRule.setClassifierCompartmentOwners(myInCompartmentOwners);
|
myRule.setClassifierCompartmentOwners(myInCompartmentOwners);
|
||||||
|
myRule.setAppliesToDeleteCascade(myOnCascade);
|
||||||
myRules.add(myRule);
|
myRules.add(myRule);
|
||||||
|
|
||||||
return new RuleBuilderFinished(myRule);
|
return new RuleBuilderFinished(myRule);
|
||||||
|
|
|
@ -20,6 +20,7 @@ package ca.uhn.fhir.rest.server.interceptor.auth;
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor.Verdict;
|
import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor.Verdict;
|
||||||
|
@ -40,7 +41,7 @@ public class RuleImplConditional extends BaseRule implements IAuthRule {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Verdict applyRule(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource,
|
public Verdict applyRule(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource,
|
||||||
IRuleApplier theRuleApplier, Set<AuthorizationFlagsEnum> theFlags) {
|
IRuleApplier theRuleApplier, Set<AuthorizationFlagsEnum> theFlags, Pointcut thePointcut) {
|
||||||
|
|
||||||
if (isOtherTenant(theRequestDetails)) {
|
if (isOtherTenant(theRequestDetails)) {
|
||||||
return null;
|
return null;
|
||||||
|
|
|
@ -3,11 +3,11 @@ package ca.uhn.fhir.rest.server.interceptor.auth;
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||||
|
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||||
import ca.uhn.fhir.rest.api.QualifiedParamList;
|
import ca.uhn.fhir.rest.api.QualifiedParamList;
|
||||||
import ca.uhn.fhir.rest.api.RequestTypeEnum;
|
import ca.uhn.fhir.rest.api.RequestTypeEnum;
|
||||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.param.ParameterUtil;
|
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||||
import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor.Verdict;
|
import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor.Verdict;
|
||||||
|
@ -59,11 +59,12 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ {
|
||||||
private RuleOpEnum myOp;
|
private RuleOpEnum myOp;
|
||||||
private TransactionAppliesToEnum myTransactionAppliesToOp;
|
private TransactionAppliesToEnum myTransactionAppliesToOp;
|
||||||
private Collection<IIdType> myAppliesToInstances;
|
private Collection<IIdType> myAppliesToInstances;
|
||||||
|
private boolean myAppliesToDeleteCascade;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor
|
* Constructor
|
||||||
*/
|
*/
|
||||||
public RuleImplOp(String theRuleName) {
|
RuleImplOp(String theRuleName) {
|
||||||
super(theRuleName);
|
super(theRuleName);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -72,13 +73,13 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ {
|
||||||
return myAppliesToInstances;
|
return myAppliesToInstances;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setAppliesToInstances(Collection<IIdType> theAppliesToInstances) {
|
void setAppliesToInstances(Collection<IIdType> theAppliesToInstances) {
|
||||||
myAppliesToInstances = theAppliesToInstances;
|
myAppliesToInstances = theAppliesToInstances;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Verdict applyRule(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource,
|
public Verdict applyRule(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource,
|
||||||
IRuleApplier theRuleApplier, Set<AuthorizationFlagsEnum> theFlags) {
|
IRuleApplier theRuleApplier, Set<AuthorizationFlagsEnum> theFlags, Pointcut thePointcut) {
|
||||||
|
|
||||||
if (isOtherTenant(theRequestDetails)) {
|
if (isOtherTenant(theRequestDetails)) {
|
||||||
return null;
|
return null;
|
||||||
|
@ -207,6 +208,9 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ {
|
||||||
break;
|
break;
|
||||||
case DELETE:
|
case DELETE:
|
||||||
if (theOperation == RestOperationTypeEnum.DELETE) {
|
if (theOperation == RestOperationTypeEnum.DELETE) {
|
||||||
|
if (myAppliesToDeleteCascade != (thePointcut == Pointcut.STORAGE_CASCADE_DELETE)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
if (theInputResource == null) {
|
if (theInputResource == null) {
|
||||||
return newVerdict();
|
return newVerdict();
|
||||||
}
|
}
|
||||||
|
@ -264,7 +268,7 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Verdict newVerdict = theRuleApplier.applyRulesAndReturnDecision(operation, theRequestDetails, inputResource, null, null);
|
Verdict newVerdict = theRuleApplier.applyRulesAndReturnDecision(operation, theRequestDetails, inputResource, null, null, thePointcut);
|
||||||
if (newVerdict == null) {
|
if (newVerdict == null) {
|
||||||
continue;
|
continue;
|
||||||
} else if (verdict == null) {
|
} else if (verdict == null) {
|
||||||
|
@ -292,7 +296,7 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ {
|
||||||
if (nextResource == null) {
|
if (nextResource == null) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
Verdict newVerdict = theRuleApplier.applyRulesAndReturnDecision(RestOperationTypeEnum.READ, theRequestDetails, null, null, nextResource);
|
Verdict newVerdict = theRuleApplier.applyRulesAndReturnDecision(RestOperationTypeEnum.READ, theRequestDetails, null, null, nextResource, thePointcut);
|
||||||
if (newVerdict == null) {
|
if (newVerdict == null) {
|
||||||
continue;
|
continue;
|
||||||
} else if (verdict == null) {
|
} else if (verdict == null) {
|
||||||
|
@ -584,4 +588,8 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ {
|
||||||
return builder.toString();
|
return builder.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setAppliesToDeleteCascade(boolean theAppliesToDeleteCascade) {
|
||||||
|
myAppliesToDeleteCascade = theAppliesToDeleteCascade;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,6 +20,7 @@ package ca.uhn.fhir.rest.server.interceptor.auth;
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
@ -35,7 +36,7 @@ class RuleImplPatch extends BaseRule {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public AuthorizationInterceptor.Verdict applyRule(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, IRuleApplier theRuleApplier, Set<AuthorizationFlagsEnum> theFlags) {
|
public AuthorizationInterceptor.Verdict applyRule(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, IRuleApplier theRuleApplier, Set<AuthorizationFlagsEnum> theFlags, Pointcut thePointcut) {
|
||||||
if (isOtherTenant(theRequestDetails)) {
|
if (isOtherTenant(theRequestDetails)) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
|
@ -415,7 +415,7 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi
|
||||||
RESOURCE
|
RESOURCE
|
||||||
}
|
}
|
||||||
|
|
||||||
static boolean callOutgoingResponseHook(RequestDetails theRequest, ResponseDetails theResponseDetails) {
|
public static boolean callOutgoingResponseHook(RequestDetails theRequest, ResponseDetails theResponseDetails) {
|
||||||
HttpServletRequest servletRequest = null;
|
HttpServletRequest servletRequest = null;
|
||||||
HttpServletResponse servletResponse = null;
|
HttpServletResponse servletResponse = null;
|
||||||
if (theRequest instanceof ServletRequestDetails) {
|
if (theRequest instanceof ServletRequestDetails) {
|
||||||
|
@ -438,4 +438,14 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static void callOutgoingFailureOperationOutcomeHook(RequestDetails theRequestDetails, IBaseOperationOutcome theOperationOutcome) {
|
||||||
|
HookParams responseParams = new HookParams();
|
||||||
|
responseParams.add(RequestDetails.class, theRequestDetails);
|
||||||
|
responseParams.addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
|
||||||
|
responseParams.add(IBaseOperationOutcome.class, theOperationOutcome);
|
||||||
|
|
||||||
|
if (theRequestDetails.getInterceptorBroadcaster() != null) {
|
||||||
|
theRequestDetails.getInterceptorBroadcaster().callHooks(Pointcut.SERVER_OUTGOING_FAILURE_OPERATIONOUTCOME, responseParams);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -285,8 +285,6 @@ public class OperationMethodBinding extends BaseResourceReturningMethodBinding {
|
||||||
theMethodParams[myIdParamIndex] = theRequest.getId();
|
theMethodParams[myIdParamIndex] = theRequest.getId();
|
||||||
}
|
}
|
||||||
|
|
||||||
// populateActionRequestDetailsForInterceptor(theRequest); AAAAAA
|
|
||||||
|
|
||||||
Object response = invokeServerMethod(theServer, theRequest, theMethodParams);
|
Object response = invokeServerMethod(theServer, theRequest, theMethodParams);
|
||||||
IBundleProvider retVal = toResourceList(response);
|
IBundleProvider retVal = toResourceList(response);
|
||||||
return retVal;
|
return retVal;
|
||||||
|
|
|
@ -88,12 +88,9 @@ public class InterceptorDstu3Test {
|
||||||
public void testServerPreHandledOnOperationCapturesResource() throws IOException {
|
public void testServerPreHandledOnOperationCapturesResource() throws IOException {
|
||||||
|
|
||||||
AtomicReference<IBaseResource> resource = new AtomicReference<>();
|
AtomicReference<IBaseResource> resource = new AtomicReference<>();
|
||||||
IAnonymousInterceptor interceptor = new IAnonymousInterceptor() {
|
IAnonymousInterceptor interceptor = (thePointcut, theArgs) -> {
|
||||||
@Override
|
|
||||||
public void invoke(Pointcut thePointcut, HookParams theArgs) {
|
|
||||||
RequestDetails requestDetails = theArgs.get(RequestDetails.class);
|
RequestDetails requestDetails = theArgs.get(RequestDetails.class);
|
||||||
resource.set(requestDetails.getResource());
|
resource.set(requestDetails.getResource());
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
ourServlet.getInterceptorService().registerAnonymousInterceptor(Pointcut.SERVER_INCOMING_REQUEST_PRE_HANDLED, interceptor);
|
ourServlet.getInterceptorService().registerAnonymousInterceptor(Pointcut.SERVER_INCOMING_REQUEST_PRE_HANDLED, interceptor);
|
||||||
|
|
|
@ -2,7 +2,9 @@ package ca.uhn.fhir.rest.server.interceptor;
|
||||||
|
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.context.api.BundleInclusionRule;
|
import ca.uhn.fhir.context.api.BundleInclusionRule;
|
||||||
|
import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor;
|
||||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||||
|
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||||
import ca.uhn.fhir.rest.annotation.*;
|
import ca.uhn.fhir.rest.annotation.*;
|
||||||
import ca.uhn.fhir.rest.api.Constants;
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
import ca.uhn.fhir.rest.api.EncodingEnum;
|
import ca.uhn.fhir.rest.api.EncodingEnum;
|
||||||
|
@ -12,6 +14,7 @@ import ca.uhn.fhir.rest.server.IResourceProvider;
|
||||||
import ca.uhn.fhir.rest.server.RestfulServer;
|
import ca.uhn.fhir.rest.server.RestfulServer;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||||
|
import ca.uhn.fhir.test.utilities.JettyUtil;
|
||||||
import ca.uhn.fhir.util.TestUtil;
|
import ca.uhn.fhir.util.TestUtil;
|
||||||
import ca.uhn.fhir.util.UrlUtil;
|
import ca.uhn.fhir.util.UrlUtil;
|
||||||
import com.google.common.base.Charsets;
|
import com.google.common.base.Charsets;
|
||||||
|
@ -25,6 +28,7 @@ import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
|
||||||
import org.eclipse.jetty.server.Server;
|
import org.eclipse.jetty.server.Server;
|
||||||
import org.eclipse.jetty.servlet.ServletHandler;
|
import org.eclipse.jetty.servlet.ServletHandler;
|
||||||
import org.eclipse.jetty.servlet.ServletHolder;
|
import org.eclipse.jetty.servlet.ServletHolder;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
import org.hl7.fhir.r4.model.*;
|
import org.hl7.fhir.r4.model.*;
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
|
@ -46,8 +50,6 @@ import static org.junit.Assert.*;
|
||||||
import static org.mockito.Mockito.mock;
|
import static org.mockito.Mockito.mock;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
import ca.uhn.fhir.test.utilities.JettyUtil;
|
|
||||||
|
|
||||||
public class ResponseHighlightingInterceptorTest {
|
public class ResponseHighlightingInterceptorTest {
|
||||||
|
|
||||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResponseHighlightingInterceptorTest.class);
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResponseHighlightingInterceptorTest.class);
|
||||||
|
@ -404,6 +406,34 @@ public class ResponseHighlightingInterceptorTest {
|
||||||
assertThat(output, containsString("<span class='hlTagName'>OperationOutcome</span>"));
|
assertThat(output, containsString("<span class='hlTagName'>OperationOutcome</span>"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testHighlightExceptionInvokesOutgoingFailureOperationOutcome() throws Exception {
|
||||||
|
IAnonymousInterceptor outgoingResponseInterceptor = (thePointcut, theArgs) -> {
|
||||||
|
OperationOutcome oo = (OperationOutcome) theArgs.get(IBaseOperationOutcome.class);
|
||||||
|
oo.addIssue().setDiagnostics("HELP IM A BUG");
|
||||||
|
};
|
||||||
|
ourServlet.getInterceptorService().registerAnonymousInterceptor(Pointcut.SERVER_OUTGOING_FAILURE_OPERATIONOUTCOME, outgoingResponseInterceptor);
|
||||||
|
try {
|
||||||
|
|
||||||
|
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Foobar/123");
|
||||||
|
httpGet.addHeader("Accept", "text/html");
|
||||||
|
CloseableHttpResponse status = ourClient.execute(httpGet);
|
||||||
|
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||||
|
status.close();
|
||||||
|
|
||||||
|
ourLog.info("Resp: {}", responseContent);
|
||||||
|
assertEquals(404, status.getStatusLine().getStatusCode());
|
||||||
|
assertThat(responseContent, stringContainsInOrder("HELP IM A BUG"));
|
||||||
|
|
||||||
|
} finally {
|
||||||
|
|
||||||
|
ourServlet.getInterceptorService().unregisterInterceptor(outgoingResponseInterceptor);
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* See #346
|
* See #346
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
package ca.uhn.fhir.jpa.subscription.module.config;
|
package ca.uhn.fhir.test.utilities;
|
||||||
|
|
||||||
/*-
|
/*-
|
||||||
* #%L
|
* #%L
|
|
@ -202,6 +202,14 @@
|
||||||
the HAPI FHIR library with all of its submodules automatically sharing the same
|
the HAPI FHIR library with all of its submodules automatically sharing the same
|
||||||
version. Thanks to Stig Døssing for the pull request!
|
version. Thanks to Stig Døssing for the pull request!
|
||||||
</action>
|
</action>
|
||||||
|
<action type="add">
|
||||||
|
A new interceptor called CascadingDeleteInterceptor has been added to the
|
||||||
|
JPA project. This interceptor allows deletes to cascade when a specific
|
||||||
|
URL parameter or header is added to the request. Cascading deletes
|
||||||
|
can also be controlled by a new flag in the AuthorizationIntereptor
|
||||||
|
RuleBuilder, in order to ensure that cascading deletes are only available
|
||||||
|
to users with sufficient permission.
|
||||||
|
</action>
|
||||||
</release>
|
</release>
|
||||||
<release version="3.8.0" date="2019-05-30" description="Hippo">
|
<release version="3.8.0" date="2019-05-30" description="Hippo">
|
||||||
<action type="fix">
|
<action type="fix">
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<document xmlns="http://maven.apache.org/XDOC/2.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/XDOC/2.0 http://maven.apache.org/xsd/xdoc-2.0.xsd">
|
<document xmlns="http://maven.apache.org/XDOC/2.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/XDOC/2.0 http://maven.apache.org/xsd/xdoc-2.0.xsd">
|
||||||
|
|
||||||
<properties>
|
<properties>
|
||||||
<title>JPA Server</title>
|
<title>JPA Server</title>
|
||||||
|
@ -20,7 +21,8 @@
|
||||||
<p>
|
<p>
|
||||||
HAPI also provides a persistence module which can be used to
|
HAPI also provides a persistence module which can be used to
|
||||||
provide a complete RESTful server implementation, backed by a database of
|
provide a complete RESTful server implementation, backed by a database of
|
||||||
your choosing. This module uses the <a href="http://en.wikipedia.org/wiki/Java_Persistence_API">JPA 2.0</a>
|
your choosing. This module uses the
|
||||||
|
<a href="http://en.wikipedia.org/wiki/Java_Persistence_API">JPA 2.0</a>
|
||||||
API to store data in a database without depending on any specific database technology.
|
API to store data in a database without depending on any specific database technology.
|
||||||
</p>
|
</p>
|
||||||
<p>
|
<p>
|
||||||
|
@ -38,12 +40,17 @@
|
||||||
<p>
|
<p>
|
||||||
The easiest way to get started with HAPI's JPA server module is
|
The easiest way to get started with HAPI's JPA server module is
|
||||||
to begin with the example project. There is a complete sample project
|
to begin with the example project. There is a complete sample project
|
||||||
found in our GitHub repo here: <a href="https://github.com/jamesagnew/hapi-fhir/tree/master/hapi-fhir-jpaserver-example">hapi-fhir-jpaserver-example</a>
|
found in our GitHub repo here:
|
||||||
|
<a href="https://github.com/jamesagnew/hapi-fhir/tree/master/hapi-fhir-jpaserver-example">
|
||||||
|
hapi-fhir-jpaserver-example
|
||||||
|
</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
This example is a fully contained FHIR server, supporting all standard operations (read/create/delete/etc).
|
This example is a fully contained FHIR server, supporting all standard operations
|
||||||
It bundles an embedded instance of the <a href="http://db.apache.org/derby/">Apache Derby</a> Java database
|
(read/create/delete/etc).
|
||||||
|
It bundles an embedded instance of the <a href="http://db.apache.org/derby/">Apache Derby</a> Java
|
||||||
|
database
|
||||||
so that the server can run without depending on any external database, but it can also be
|
so that the server can run without depending on any external database, but it can also be
|
||||||
configured to use an installation of Oracle, Postgres, etc.
|
configured to use an installation of Oracle, Postgres, etc.
|
||||||
</p>
|
</p>
|
||||||
|
@ -61,20 +68,27 @@ $ mvn install]]></source>
|
||||||
</p>
|
</p>
|
||||||
<ul>
|
<ul>
|
||||||
<li>
|
<li>
|
||||||
<b>Deploy to Tomcat/JBoss/Websphere/etc: </b> You will now have a file
|
<b>Deploy to Tomcat/JBoss/Websphere/etc:</b>
|
||||||
|
You will now have a file
|
||||||
in your <code>target</code> directory called <code>hapi-fhir-jpaserver-example.war</code>.
|
in your <code>target</code> directory called <code>hapi-fhir-jpaserver-example.war</code>.
|
||||||
This WAR file can be deployed to any Servlet container, at which point you could
|
This WAR file can be deployed to any Servlet container, at which point you could
|
||||||
access the server by pointing your browser at a URL similar to the following
|
access the server by pointing your browser at a URL similar to the following
|
||||||
(you may need to adjust the
|
(you may need to adjust the
|
||||||
port depending on which port your container is configured to listen on):
|
port depending on which port your container is configured to listen on):
|
||||||
<a href="http://localhost:8080/hapi-fhir-jpaserver-example/">http://localhost:8080/hapi-fhir-jpaserver-example/</a>
|
<a href="http://localhost:8080/hapi-fhir-jpaserver-example/">
|
||||||
|
http://localhost:8080/hapi-fhir-jpaserver-example/
|
||||||
|
</a>
|
||||||
</li>
|
</li>
|
||||||
<li>
|
<li>
|
||||||
<b>Run with Maven and Embedded Jetty: </b> To start the server
|
<b>Run with Maven and Embedded Jetty:</b>
|
||||||
directly within Maven, you can execute the following command:<br/>
|
To start the server
|
||||||
|
directly within Maven, you can execute the following command:
|
||||||
|
<br/>
|
||||||
<source>$ mvn jetty:run</source>
|
<source>$ mvn jetty:run</source>
|
||||||
You can then access the server by pointing your browser at the following URL:
|
You can then access the server by pointing your browser at the following URL:
|
||||||
<a href="http://localhost:8080/hapi-fhir-jpaserver-example/">http://localhost:8080/hapi-fhir-jpaserver-example/</a>
|
<a href="http://localhost:8080/hapi-fhir-jpaserver-example/">
|
||||||
|
http://localhost:8080/hapi-fhir-jpaserver-example/
|
||||||
|
</a>
|
||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
</subsection>
|
</subsection>
|
||||||
|
@ -88,7 +102,10 @@ $ mvn install]]></source>
|
||||||
</p>
|
</p>
|
||||||
<ul>
|
<ul>
|
||||||
<li>
|
<li>
|
||||||
<a href="https://github.com/jamesagnew/hapi-fhir/blob/master/hapi-fhir-jpaserver-example/src/main/java/ca/uhn/fhir/jpa/demo/FhirServerConfig.java"><b>FhirServerConfig.java</b></a>:
|
<a href="https://github.com/jamesagnew/hapi-fhir/blob/master/hapi-fhir-jpaserver-example/src/main/java/ca/uhn/fhir/jpa/demo/FhirServerConfig.java">
|
||||||
|
<b>FhirServerConfig.java</b>
|
||||||
|
</a>
|
||||||
|
:
|
||||||
Configures the database connection settings
|
Configures the database connection settings
|
||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
|
@ -112,7 +129,8 @@ public DaoConfig daoConfig() {
|
||||||
<p>
|
<p>
|
||||||
You can use this method to change various configuration settings on the DaoConfig bean
|
You can use this method to change various configuration settings on the DaoConfig bean
|
||||||
which define the way that the JPA server will behave.
|
which define the way that the JPA server will behave.
|
||||||
See the <a href="./apidocs-jpaserver/ca/uhn/fhir/jpa/dao/DaoConfig.html">DaoConfig JavaDoc</a>
|
See the
|
||||||
|
<a href="./apidocs-jpaserver/ca/uhn/fhir/jpa/dao/DaoConfig.html">DaoConfig JavaDoc</a>
|
||||||
for information about the available settings.
|
for information about the available settings.
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
|
@ -190,12 +208,15 @@ public DaoConfig daoConfig() {
|
||||||
from your own resources. In this case, you are not neccesarily telling
|
from your own resources. In this case, you are not neccesarily telling
|
||||||
the server that this is a real address that it should resolve, but
|
the server that this is a real address that it should resolve, but
|
||||||
rather that this is an identifier for a ValueSet where
|
rather that this is an identifier for a ValueSet where
|
||||||
<code>ValueSet.url</code> has the given URI/URL.
|
<code>ValueSet.url</code>
|
||||||
|
has the given URI/URL.
|
||||||
</p>
|
</p>
|
||||||
<p>
|
<p>
|
||||||
HAPI can be configured to treat certain URI/URL patterns as
|
HAPI can be configured to treat certain URI/URL patterns as
|
||||||
logical by using the DaoConfig#setTreatReferencesAsLogical property
|
logical by using the DaoConfig#setTreatReferencesAsLogical property
|
||||||
(see <a href="./apidocs-jpaserver/ca/uhn/fhir/jpa/dao/DaoConfig.html#setTreatReferencesAsLogical-java.util.Set-">JavaDoc</a>).
|
(see <a
|
||||||
|
href="./apidocs-jpaserver/ca/uhn/fhir/jpa/dao/DaoConfig.html#setTreatReferencesAsLogical-java.util.Set-">
|
||||||
|
JavaDoc</a>).
|
||||||
For example:
|
For example:
|
||||||
</p>
|
</p>
|
||||||
<div class="source">
|
<div class="source">
|
||||||
|
@ -225,7 +246,9 @@ public DaoConfig daoConfig() {
|
||||||
but in some cases it is not. If you want to disable caching, you have two
|
but in some cases it is not. If you want to disable caching, you have two
|
||||||
options:
|
options:
|
||||||
</p>
|
</p>
|
||||||
<p><b>Globally Disable / Change Caching Timeout</b></p>
|
<p>
|
||||||
|
<b>Globally Disable / Change Caching Timeout</b>
|
||||||
|
</p>
|
||||||
<p>
|
<p>
|
||||||
You can change the global cache using the following setting:
|
You can change the global cache using the following setting:
|
||||||
</p>
|
</p>
|
||||||
|
@ -234,7 +257,9 @@ public DaoConfig daoConfig() {
|
||||||
myDaoConfig.setReuseCachedSearchResultsForMillis(null);
|
myDaoConfig.setReuseCachedSearchResultsForMillis(null);
|
||||||
</pre>
|
</pre>
|
||||||
</div>
|
</div>
|
||||||
<p><b>Disable Cache at the Request Level</b></p>
|
<p>
|
||||||
|
<b>Disable Cache at the Request Level</b>
|
||||||
|
</p>
|
||||||
<p>
|
<p>
|
||||||
Clients can selectively disable caching for an individual request
|
Clients can selectively disable caching for an individual request
|
||||||
using the Cache-Control header:
|
using the Cache-Control header:
|
||||||
|
@ -244,7 +269,9 @@ public DaoConfig daoConfig() {
|
||||||
Cache-Control: no-cache
|
Cache-Control: no-cache
|
||||||
</pre>
|
</pre>
|
||||||
</div>
|
</div>
|
||||||
<p><b>Disable Paging at the Request Level</b></p>
|
<p>
|
||||||
|
<b>Disable Paging at the Request Level</b>
|
||||||
|
</p>
|
||||||
<p>
|
<p>
|
||||||
If the client knows that they will only want a small number of results
|
If the client knows that they will only want a small number of results
|
||||||
(for example, a UI containing 20 results is being shown and the client
|
(for example, a UI containing 20 results is being shown and the client
|
||||||
|
@ -282,39 +309,49 @@ public DaoConfig daoConfig() {
|
||||||
a
|
a
|
||||||
<a href="./apidocs/ca/uhn/fhir/rest/annotation/Search.html">@Search</a>
|
<a href="./apidocs/ca/uhn/fhir/rest/annotation/Search.html">@Search</a>
|
||||||
method implementing the complete set of search parameters defined in the FHIR
|
method implementing the complete set of search parameters defined in the FHIR
|
||||||
specification for the given resource type.<br/><br/>
|
specification for the given resource type.
|
||||||
|
<br/>
|
||||||
|
<br/>
|
||||||
The resource providers also extend a superclass which implements all of the
|
The resource providers also extend a superclass which implements all of the
|
||||||
other FHIR methods, such as Read, Create, Delete, etc.<br/><br/>
|
other FHIR methods, such as Read, Create, Delete, etc.
|
||||||
|
<br/>
|
||||||
|
<br/>
|
||||||
Note that these resource providers are generated as a part of the HAPI build process,
|
Note that these resource providers are generated as a part of the HAPI build process,
|
||||||
so they are not checked into Git. You can see their source
|
so they are not checked into Git. You can see their source
|
||||||
in the <a href="./xref-jpaserver/">JXR Report</a>,
|
in the <a href="./xref-jpaserver/">JXR Report</a>,
|
||||||
for example the
|
for example the
|
||||||
<a href="./xref-jpaserver/ca/uhn/fhir/jpa/rp/dstu2/PatientResourceProvider.html">PatientResourceProvider</a>.
|
<a href="./xref-jpaserver/ca/uhn/fhir/jpa/rp/dstu2/PatientResourceProvider.html">
|
||||||
<br/><br/>
|
PatientResourceProvider</a>.
|
||||||
|
<br/>
|
||||||
|
<br/>
|
||||||
The resource providers do not actually implement any of the logic
|
The resource providers do not actually implement any of the logic
|
||||||
in searching, updating, etc. They simply receive the incoming HTTP calls (via the RestfulServer)
|
in searching, updating, etc. They simply receive the incoming HTTP calls (via the RestfulServer)
|
||||||
and pass along the incoming requests to the DAOs.
|
and pass along the incoming requests to the DAOs.
|
||||||
<br/><br/>
|
<br/>
|
||||||
|
<br/>
|
||||||
</li>
|
</li>
|
||||||
<li>
|
<li>
|
||||||
<b>HAPI DAOs:</b>
|
<b>HAPI DAOs:</b>
|
||||||
The DAOs actually implement all of the database business logic relating to
|
The DAOs actually implement all of the database business logic relating to
|
||||||
the storage, indexing, and retrieval of FHIR resources, using the underlying JPA
|
the storage, indexing, and retrieval of FHIR resources, using the underlying JPA
|
||||||
API.
|
API.
|
||||||
<br/><br/>
|
<br/>
|
||||||
|
<br/>
|
||||||
</li>
|
</li>
|
||||||
<li>
|
<li>
|
||||||
<b>Hibernate:</b>
|
<b>Hibernate:</b>
|
||||||
The HAPI JPA Server uses the JPA library, implemented by Hibernate. No Hibernate
|
The HAPI JPA Server uses the JPA library, implemented by Hibernate. No Hibernate
|
||||||
specific features are used, so the library should also work with other
|
specific features are used, so the library should also work with other
|
||||||
providers (e.g. Eclipselink) but it is not tested regularly with them.
|
providers (e.g. Eclipselink) but it is not tested regularly with them.
|
||||||
<br/><br/>
|
<br/>
|
||||||
|
<br/>
|
||||||
</li>
|
</li>
|
||||||
<li>
|
<li>
|
||||||
<b>Database:</b>
|
<b>Database:</b>
|
||||||
The RESTful server uses an embedded Derby database, but can be configured to
|
The RESTful server uses an embedded Derby database, but can be configured to
|
||||||
talk to
|
talk to
|
||||||
<a href="https://developer.jboss.org/wiki/SupportedDatabases2?_sscc=t">any database supported by Hibernate</a>.
|
<a href="https://developer.jboss.org/wiki/SupportedDatabases2?_sscc=t">any database supported by
|
||||||
|
Hibernate</a>.
|
||||||
</li>
|
</li>
|
||||||
|
|
||||||
</ul>
|
</ul>
|
||||||
|
@ -378,19 +415,24 @@ delete from hfj_res_ver where res_id in (select res_id from hfj_resource where s
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
When upgrading the JPA server from one version of HAPI FHIR to a newer version,
|
When upgrading the JPA server from one version of HAPI FHIR to a newer version,
|
||||||
often there will be changes to the database schema. The <b>Migrate Database</b>
|
often there will be changes to the database schema. The
|
||||||
|
<b>Migrate Database</b>
|
||||||
command can be used to perform a migration from one version to the next.
|
command can be used to perform a migration from one version to the next.
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
Note that this feature was added in HAPI FHIR 3.5.0. It is not able to migrate
|
Note that this feature was added in HAPI FHIR 3.5.0. It is not able to migrate
|
||||||
from versions prior to HAPI FHIR 3.4.0. <b>Please make a backup of your
|
from versions prior to HAPI FHIR 3.4.0.
|
||||||
database before running this command!</b>
|
<b>Please make a backup of your
|
||||||
|
database before running this command!
|
||||||
|
</b>
|
||||||
</p>
|
</p>
|
||||||
<p>
|
<p>
|
||||||
The following example shows how to use the migrator utility to migrate between two versions.
|
The following example shows how to use the migrator utility to migrate between two versions.
|
||||||
</p>
|
</p>
|
||||||
<pre>./hapi-fhir-cli migrate-database -d DERBY_EMBEDDED -u "jdbc:derby:directory:target/jpaserver_derby_files;create=true" -n "" -p "" -f V3_4_0 -t V3_5_0</pre>
|
<pre>./hapi-fhir-cli migrate-database -d DERBY_EMBEDDED -u
|
||||||
|
"jdbc:derby:directory:target/jpaserver_derby_files;create=true" -n "" -p "" -f V3_4_0 -t V3_5_0
|
||||||
|
</pre>
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
You may use the following command to get detailed help on the options:
|
You may use the following command to get detailed help on the options:
|
||||||
|
@ -400,9 +442,18 @@ delete from hfj_res_ver where res_id in (select res_id from hfj_resource where s
|
||||||
<p>
|
<p>
|
||||||
Note the arguments:
|
Note the arguments:
|
||||||
<ul>
|
<ul>
|
||||||
<li><code>-d [dialect]</code> - This indicates the database dialect to use. See the detailed help for a list of options</li>
|
<li>
|
||||||
<li><code>-f [version]</code> - The version to migrate from</li>
|
<code>-d [dialect]</code>
|
||||||
<li><code>-t [version]</code> - The version to migrate to</li>
|
- This indicates the database dialect to use. See the detailed help for a list of options
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<code>-f [version]</code>
|
||||||
|
- The version to migrate from
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<code>-t [version]</code>
|
||||||
|
- The version to migrate to
|
||||||
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
|
@ -412,7 +463,9 @@ delete from hfj_res_ver where res_id in (select res_id from hfj_resource where s
|
||||||
so they are not included in HAPI FHIR. In order to use this command with an Oracle database,
|
so they are not included in HAPI FHIR. In order to use this command with an Oracle database,
|
||||||
you will need to invoke the CLI as follows:
|
you will need to invoke the CLI as follows:
|
||||||
</p>
|
</p>
|
||||||
<pre>java -cp hapi-fhir-cli.jar ca.uhn.fhir.cli.App migrate-database -d ORACLE_12C -u "[url]" -n "[username]" -p "[password]" -f V3_4_0 -t V3_5_0</pre>
|
<pre>java -cp hapi-fhir-cli.jar ca.uhn.fhir.cli.App migrate-database -d ORACLE_12C -u "[url]" -n
|
||||||
|
"[username]" -p "[password]" -f V3_4_0 -t V3_5_0
|
||||||
|
</pre>
|
||||||
</subsection>
|
</subsection>
|
||||||
|
|
||||||
<subsection name="Migrating 3.4.0 to 3.5.0+">
|
<subsection name="Migrating 3.4.0 to 3.5.0+">
|
||||||
|
@ -446,19 +499,26 @@ delete from hfj_res_ver where res_id in (select res_id from hfj_resource where s
|
||||||
</li>
|
</li>
|
||||||
<li>
|
<li>
|
||||||
Modify your <code>DaoConfig</code> to specify that hash-based searches should not be used, using
|
Modify your <code>DaoConfig</code> to specify that hash-based searches should not be used, using
|
||||||
the following setting:<br/>
|
the following setting:
|
||||||
|
<br/>
|
||||||
<pre>myDaoConfig.setDisableHashBasedSearches(true);</pre>
|
<pre>myDaoConfig.setDisableHashBasedSearches(true);</pre>
|
||||||
</li>
|
</li>
|
||||||
<li>
|
<li>
|
||||||
Make sure that you have your JPA settings configured to not automatically
|
Make sure that you have your JPA settings configured to not automatically
|
||||||
create database indexes and columns using the following setting
|
create database indexes and columns using the following setting
|
||||||
in your JPA Properties:<br/>
|
in your JPA Properties:
|
||||||
|
<br/>
|
||||||
<pre>extraProperties.put("hibernate.hbm2ddl.auto", "none");</pre>
|
<pre>extraProperties.put("hibernate.hbm2ddl.auto", "none");</pre>
|
||||||
</li>
|
</li>
|
||||||
<li>
|
<li>
|
||||||
Run the database migrator command, including the entry <code>-x no-migrate-350-hashes</code>
|
Run the database migrator command, including the entry
|
||||||
on the command line. For example:<br/>
|
<code>-x no-migrate-350-hashes</code>
|
||||||
<pre>./hapi-fhir-cli migrate-database -d DERBY_EMBEDDED -u "jdbc:derby:directory:target/jpaserver_derby_files;create=true" -n "" -p "" -f V3_4_0 -t V3_6_0 -x no-migrate-350-hashes</pre>
|
on the command line. For example:
|
||||||
|
<br/>
|
||||||
|
<pre>./hapi-fhir-cli migrate-database -d DERBY_EMBEDDED -u
|
||||||
|
"jdbc:derby:directory:target/jpaserver_derby_files;create=true" -n "" -p "" -f V3_4_0 -t V3_6_0 -x
|
||||||
|
no-migrate-350-hashes
|
||||||
|
</pre>
|
||||||
</li>
|
</li>
|
||||||
<li>
|
<li>
|
||||||
Rebuild and start your HAPI FHIR JPA server. At this point you should have a working
|
Rebuild and start your HAPI FHIR JPA server. At this point you should have a working
|
||||||
|
@ -468,7 +528,8 @@ delete from hfj_res_ver where res_id in (select res_id from hfj_resource where s
|
||||||
</li>
|
</li>
|
||||||
<li>
|
<li>
|
||||||
With the system running, request a complete reindex of the data in the database using
|
With the system running, request a complete reindex of the data in the database using
|
||||||
an HTTP request such as the following:<br/>
|
an HTTP request such as the following:
|
||||||
|
<br/>
|
||||||
<pre>GET /$mark-all-resources-for-reindexing</pre>
|
<pre>GET /$mark-all-resources-for-reindexing</pre>
|
||||||
Note that this is a custom operation built into the HAPI FHIR JPA server. It should
|
Note that this is a custom operation built into the HAPI FHIR JPA server. It should
|
||||||
be secured in a real deployment, so Authentication is likely required for this
|
be secured in a real deployment, so Authentication is likely required for this
|
||||||
|
@ -488,12 +549,16 @@ delete from hfj_res_ver where res_id in (select res_id from hfj_resource where s
|
||||||
<li>
|
<li>
|
||||||
Modify your <code>DaoConfig</code> to specify that hash-based searches are used, using
|
Modify your <code>DaoConfig</code> to specify that hash-based searches are used, using
|
||||||
the following setting (this is the default setting, so it could also simply
|
the following setting (this is the default setting, so it could also simply
|
||||||
be omitted):<br/>
|
be omitted):
|
||||||
|
<br/>
|
||||||
<pre>myDaoConfig.setDisableHashBasedSearches(false);</pre>
|
<pre>myDaoConfig.setDisableHashBasedSearches(false);</pre>
|
||||||
</li>
|
</li>
|
||||||
<li>
|
<li>
|
||||||
Execute the migrator tool again, this time omitting the flag option, e.g.<br/>
|
Execute the migrator tool again, this time omitting the flag option, e.g.
|
||||||
<pre>./hapi-fhir-cli migrate-database -d DERBY_EMBEDDED -u "jdbc:derby:directory:target/jpaserver_derby_files;create=true" -n "" -p "" -f V3_4_0 -t V3_6_0</pre>
|
<br/>
|
||||||
|
<pre>./hapi-fhir-cli migrate-database -d DERBY_EMBEDDED -u
|
||||||
|
"jdbc:derby:directory:target/jpaserver_derby_files;create=true" -n "" -p "" -f V3_4_0 -t V3_6_0
|
||||||
|
</pre>
|
||||||
</li>
|
</li>
|
||||||
<li>
|
<li>
|
||||||
Rebuild, and start HAPI FHIR JPA again.
|
Rebuild, and start HAPI FHIR JPA again.
|
||||||
|
@ -503,6 +568,23 @@ delete from hfj_res_ver where res_id in (select res_id from hfj_resource where s
|
||||||
|
|
||||||
</section>
|
</section>
|
||||||
|
|
||||||
|
<section type="Cascading Deletes">
|
||||||
|
<p>
|
||||||
|
An interceptor called
|
||||||
|
<code>CascadingDeleteInterceptor</code>
|
||||||
|
may be registered against the Server. When this interceptor is enabled,
|
||||||
|
cascading deletes may be performed using either of the following:
|
||||||
|
</p>
|
||||||
|
<ul>
|
||||||
|
<li>The request may include the following parameter:
|
||||||
|
<code>_cascade=true</code>
|
||||||
|
</li>
|
||||||
|
<li>The request may include the following header:
|
||||||
|
<code>X-Cascade-Delete: true</code>
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
</section>
|
||||||
|
|
||||||
</body>
|
</body>
|
||||||
|
|
||||||
</document>
|
</document>
|
||||||
|
|
Loading…
Reference in New Issue