Enable optional cascading deletes on JPA server (#1363)

* Start working on cascading deleets

* WOrk on cascading deletes

* Work on cascading deletes

* Enable cascading deletes

* Cascade deletes almost working

* FIx test

* Test fix
This commit is contained in:
James Agnew 2019-06-28 17:32:39 -04:00 committed by GitHub
parent fa80223d57
commit 1fd99dac63
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
41 changed files with 1044 additions and 247 deletions

View File

@ -241,7 +241,7 @@ public enum Pointcut {
* ca.uhn.fhir.rest.api.RestOperationTypeEnum - The type of operation that the FHIR server has determined that the client is trying to invoke
* </li>
* <li>
* ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails - This parameter is provided for legacy reasons only and will be removed in the fututre. Do not use.
* ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails - This parameter is provided for legacy reasons only and will be removed in the future. Do not use.
* </li>
* </ul>
* </p>
@ -373,6 +373,43 @@ public enum Pointcut {
"javax.servlet.http.HttpServletResponse"
),
/**
* <b>Server Hook:</b>
* This method is called when an OperationOutcome is being returned in response to a failure.
* Hook methods may use this hook to modify the OperationOutcome being returned.
* <p>
* Hooks may accept the following parameters:
* <ul>
* <li>
* ca.uhn.fhir.rest.api.server.RequestDetails - A bean containing details about the request that is about to be processed, including details such as the
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
* pulled out of the servlet request. Note that the bean
* properties are not all guaranteed to be populated, depending on how early during processing the
* exception occurred.
* </li>
* <li>
* ca.uhn.fhir.rest.server.servlet.ServletRequestDetails - A bean containing details about the request that is about to be processed, including details such as the
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
* pulled out of the servlet request. This parameter is identical to the RequestDetails parameter above but will
* only be populated when operating in a RestfulServer implementation. It is provided as a convenience.
* </li>
* <li>
* org.hl7.fhir.instance.model.api.IBaseOperationOutcome - The OperationOutcome resource that will be
* returned.
* </ul>
* <p>
* Hook methods must return <code>void</code>
* </p>
*/
SERVER_OUTGOING_FAILURE_OPERATIONOUTCOME(
void.class,
"ca.uhn.fhir.rest.api.server.RequestDetails",
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails",
"org.hl7.fhir.instance.model.api.IBaseOperationOutcome"
),
/**
* <b>Server Hook:</b>
* This method is called after all processing is completed for a request, but only if the
@ -453,7 +490,6 @@ public enum Pointcut {
*/
SUBSCRIPTION_RESOURCE_MATCHED(boolean.class, "ca.uhn.fhir.jpa.subscription.module.CanonicalSubscription", "ca.uhn.fhir.jpa.subscription.module.subscriber.ResourceDeliveryMessage", "ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult"),
/**
* Invoked whenever a persisted resource was checked against all active subscriptions, and did not
* match any.
@ -506,6 +542,7 @@ public enum Pointcut {
*/
SUBSCRIPTION_AFTER_DELIVERY(void.class, "ca.uhn.fhir.jpa.subscription.module.CanonicalSubscription", "ca.uhn.fhir.jpa.subscription.module.subscriber.ResourceDeliveryMessage"),
/**
* Invoked immediately after the attempted delivery of a subscription, if the delivery
* failed.
@ -565,7 +602,6 @@ public enum Pointcut {
*/
SUBSCRIPTION_BEFORE_REST_HOOK_DELIVERY(boolean.class, "ca.uhn.fhir.jpa.subscription.module.CanonicalSubscription", "ca.uhn.fhir.jpa.subscription.module.subscriber.ResourceDeliveryMessage"),
/**
* Invoked whenever a persisted resource (a resource that has just been stored in the
* database via a create/update/patch/etc.) is about to be checked for whether any subscriptions
@ -584,6 +620,7 @@ public enum Pointcut {
*/
SUBSCRIPTION_BEFORE_PERSISTED_RESOURCE_CHECKED(boolean.class, "ca.uhn.fhir.jpa.subscription.module.ResourceModifiedMessage"),
/**
* Invoked whenever a persisted resource (a resource that has just been stored in the
* database via a create/update/patch/etc.) has been checked for whether any subscriptions
@ -600,6 +637,7 @@ public enum Pointcut {
*/
SUBSCRIPTION_AFTER_PERSISTED_RESOURCE_CHECKED(void.class, "ca.uhn.fhir.jpa.subscription.module.ResourceModifiedMessage"),
/**
* Invoked immediately after an active subscription is "registered". In HAPI FHIR, when
* a subscription
@ -618,6 +656,48 @@ public enum Pointcut {
*/
SUBSCRIPTION_AFTER_ACTIVE_SUBSCRIPTION_REGISTERED(void.class, "ca.uhn.fhir.jpa.subscription.module.CanonicalSubscription"),
/**
* Invoked when one or more resources may are about to be cascading a delete.
* <p>
* Hooks may accept the following parameters:
* </p>
* <ul>
* <li>
* ca.uhn.fhir.rest.api.server.RequestDetails - A bean containing details about the request that is about to be processed, including details such as the
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
* pulled out of the servlet request. Note that the bean
* properties are not all guaranteed to be populated, depending on how early during processing the
* exception occurred. <b>Note that this parameter may be null in contexts where the request is not
* known, such as while processing searches</b>
* </li>
* <li>
* ca.uhn.fhir.rest.server.servlet.ServletRequestDetails - A bean containing details about the request that is about to be processed, including details such as the
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
* pulled out of the servlet request. This parameter is identical to the RequestDetails parameter above but will
* only be populated when operating in a RestfulServer implementation. It is provided as a convenience.
* </li>
* <li>
* ca.uhn.fhir.jpa.util.DeleteConflictList - Contains the details about the delete conflicts that are
* being resolved via deletion. The source resource is the resource that will be deleted, and
* is a cascade because the target resource is already being deleted.
* </li>
* <li>
* org.hl7.fhir.instance.model.api.IBaseResource - The actual resource that is about to be deleted via a cascading delete
* </li>
* </ul>
* <p>
* Hooks should return <code>void</code>.
* </p>
*/
STORAGE_CASCADE_DELETE(
void.class,
"ca.uhn.fhir.rest.api.server.RequestDetails",
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails",
"ca.uhn.fhir.jpa.delete.DeleteConflictList",
"org.hl7.fhir.instance.model.api.IBaseResource"
),
/**
* Invoked when one or more resources may be returned to the user, whether as a part of a READ,
* a SEARCH, or even as the response to a CREATE/UPDATE, etc.
@ -664,7 +744,6 @@ public enum Pointcut {
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
),
/**
* Invoked when the storage engine is about to check for the existence of a pre-cached search
* whose results match the given search parameters.
@ -702,7 +781,6 @@ public enum Pointcut {
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
),
/**
* Invoked when a search is starting, prior to creating a record for the search.
* <p>
@ -738,7 +816,6 @@ public enum Pointcut {
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
),
/**
* Invoked when one or more resources may be returned to the user, whether as a part of a READ,
* a SEARCH, or even as the response to a CREATE/UPDATE, etc.
@ -859,6 +936,7 @@ public enum Pointcut {
"ca.uhn.fhir.rest.api.server.RequestDetails",
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
),
/**
* Invoked before a resource will be created, immediately before the resource
* is persisted to the database.
@ -893,6 +971,8 @@ public enum Pointcut {
"ca.uhn.fhir.rest.api.server.RequestDetails",
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
),
/**
* Invoked before a resource will be created, immediately before the transaction
* is committed (after all validation and other business rules have successfully
@ -968,6 +1048,8 @@ public enum Pointcut {
"ca.uhn.fhir.rest.api.server.RequestDetails",
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
),
/**
* Invoked before a resource will be created
* <p>
@ -1001,7 +1083,6 @@ public enum Pointcut {
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
),
/**
* Invoked when a resource delete operation is about to fail due to referential integrity conflicts.
* <p>
@ -1025,15 +1106,15 @@ public enum Pointcut {
* </li>
* </ul>
* <p>
* Hooks should return <code>boolean</code>. If the method returns <code>true</code> then the caller
* will retry checking for delete conflicts. If there are still conflicts, then the hook will be invoked again,
* repeatedly up to a maximum of {@value ca.uhn.fhir.jpa.delete.DeleteConflictService#MAX_RETRIES} retries.
* The first time the hook is invoked, there will be a maximum of {@value ca.uhn.fhir.jpa.delete.DeleteConflictService#MIN_QUERY_RESULT_COUNT}
* conflicts passed to the method. Subsequent hook invocations will pass a maximum of
* {@value ca.uhn.fhir.jpa.delete.DeleteConflictService#MAX_RETRY_COUNT} conflicts to the hook.
* Hooks should return <code>ca.uhn.fhir.jpa.delete.DeleteConflictOutcome</code>.
* If the interceptor returns a non-null result, the DeleteConflictOutcome can be
* used to indicate a number of times to retry.
* </p>
*/
STORAGE_PRESTORAGE_DELETE_CONFLICTS(boolean.class,
STORAGE_PRESTORAGE_DELETE_CONFLICTS(
// Return type
"ca.uhn.fhir.jpa.delete.DeleteConflictOutcome",
// Params
"ca.uhn.fhir.jpa.delete.DeleteConflictList",
"ca.uhn.fhir.rest.api.server.RequestDetails",
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
@ -1076,7 +1157,6 @@ public enum Pointcut {
"ca.uhn.fhir.jpa.model.search.StorageProcessingMessage"
),
/**
* Note that this is a performance tracing hook. Use with caution in production
* systems, since calling it may (or may not) carry a cost.
@ -1192,6 +1272,7 @@ public enum Pointcut {
"ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails"
),
/**
* Note that this is a performance tracing hook. Use with caution in production
* systems, since calling it may (or may not) carry a cost.
@ -1363,8 +1444,8 @@ public enum Pointcut {
private final Class<?> myReturnType;
private final ExceptionHandlingSpec myExceptionHandlingSpec;
Pointcut(@Nonnull Class<?> theReturnType, String... theParameterTypes) {
this(theReturnType, new ExceptionHandlingSpec(), theParameterTypes);
Pointcut(@Nonnull String theReturnType, String... theParameterTypes) {
this(toReturnTypeClass(theReturnType), new ExceptionHandlingSpec(), theParameterTypes);
}
Pointcut(@Nonnull Class<?> theReturnType, @Nonnull ExceptionHandlingSpec theExceptionHandlingSpec, String... theParameterTypes) {
@ -1373,6 +1454,10 @@ public enum Pointcut {
myParameterTypes = Collections.unmodifiableList(Arrays.asList(theParameterTypes));
}
Pointcut(@Nonnull Class<?> theReturnType, String... theParameterTypes) {
this(theReturnType, new ExceptionHandlingSpec(), theParameterTypes);
}
public boolean isShouldLogAndSwallowException(@Nonnull Throwable theException) {
for (Class<? extends Throwable> next : myExceptionHandlingSpec.myTypesToLogAndSwallow) {
if (next.isAssignableFrom(theException.getClass())) {
@ -1392,6 +1477,9 @@ public enum Pointcut {
return myParameterTypes;
}
private class UnknownType {
}
private static class ExceptionHandlingSpec {
private final Set<Class<? extends Throwable>> myTypesToLogAndSwallow = new HashSet<>();
@ -1403,4 +1491,12 @@ public enum Pointcut {
}
private static Class<?> toReturnTypeClass(String theReturnType) {
try {
return Class.forName(theReturnType);
} catch (ClassNotFoundException theE) {
return UnknownType.class;
}
}
}

View File

@ -268,14 +268,15 @@ public class InterceptorService implements IInterceptorService, IInterceptorBroa
*/
for (BaseInvoker nextInvoker : invokers) {
Object nextOutcome = nextInvoker.invoke(theParams);
if (thePointcut.getReturnType() == boolean.class) {
Class<?> pointcutReturnType = thePointcut.getReturnType();
if (pointcutReturnType.equals(boolean.class)) {
Boolean nextOutcomeAsBoolean = (Boolean) nextOutcome;
if (Boolean.FALSE.equals(nextOutcomeAsBoolean)) {
ourLog.trace("callHooks({}) for invoker({}) returned false", thePointcut, nextInvoker);
theRetVal = false;
break;
}
} else if (thePointcut.getReturnType() != void.class) {
} else if (pointcutReturnType.equals(void.class) == false) {
if (nextOutcome != null) {
theRetVal = nextOutcome;
break;
@ -481,9 +482,13 @@ public class InterceptorService implements IInterceptorService, IInterceptorBroa
Object[] args = new Object[myParameterTypes.length];
for (int i = 0; i < myParameterTypes.length; i++) {
Class<?> nextParamType = myParameterTypes[i];
int nextParamIndex = myParameterIndexes[i];
Object nextParamValue = theParams.get(nextParamType, nextParamIndex);
args[i] = nextParamValue;
if (nextParamType.equals(Pointcut.class)) {
args[i] = myPointcut;
} else {
int nextParamIndex = myParameterIndexes[i];
Object nextParamValue = theParams.get(nextParamType, nextParamIndex);
args[i] = nextParamValue;
}
}
// Invoke the method

View File

@ -207,6 +207,8 @@ public class Constants {
public static final String TEST_SYSTEM_PROP_VALIDATION_RESOURCE_CACHES_MS = "TEST_SYSTEM_PROP_VALIDATION_RESOURCE_CACHES_MS";
public static final String PARAM_SEARCH_TOTAL_MODE = "_total";
public static final String CAPABILITYSTATEMENT_WEBSOCKET_URL = "http://hl7.org/fhir/StructureDefinition/capabilitystatement-websocket";
public static final String PARAMETER_CASCADE_DELETE = "_cascade";
public static final String HEADER_CASCADE_DELETE = "X-Cascade-Delete";
static {
CHARSET_UTF8 = Charset.forName(CHARSET_NAME_UTF8);

View File

@ -104,17 +104,19 @@ ca.uhn.fhir.jpa.searchparam.extractor.BaseSearchParamExtractor.failedToExtractPa
ca.uhn.fhir.jpa.dao.SearchBuilder.invalidQuantityPrefix=Unable to handle quantity prefix "{0}" for value: {1}
ca.uhn.fhir.jpa.dao.SearchBuilder.invalidNumberPrefix=Unable to handle number prefix "{0}" for value: {1}
ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoConceptMapDstu3.matchesFound=Matches found!
ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoConceptMapDstu3.noMatchesFound=No matches found!
ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoConceptMapR4.matchesFound=Matches found!
ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoConceptMapR4.noMatchesFound=No matches found!
ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoSearchParameterR4.invalidSearchParamExpression=The expression "{0}" can not be evaluated and may be invalid: {1}
ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor.successMsg=Cascaded delete to {0} resources: {1}
ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor.noParam=Note that cascading deletes are not active for this request. You can enable cascading deletes by using the "_cascade=true" URL parameter.
ca.uhn.fhir.jpa.provider.BaseJpaProvider.cantCombintAtAndSince=Unable to combine _at and _since parameters for history operation
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.cannotCreateDuplicateConceptMapUrl=Can not create multiple ConceptMap resources with ConceptMap.url "{0}", already have one with resource ID: {1}
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.cannotCreateDuplicateCodeSystemUri=Can not create multiple code systems with URI "{0}", already have one with resource ID: {1}
ca.uhn.fhir.jpa.term.BaseHapiTerminologySvcImpl.expansionTooLarge=Expansion of ValueSet produced too many codes (maximum {0}) - Operation aborted!
ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoConceptMapDstu3.matchesFound=Matches found!
ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoConceptMapDstu3.noMatchesFound=No matches found!
ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoConceptMapR4.matchesFound=Matches found!
ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoConceptMapR4.noMatchesFound=No matches found!
ca.uhn.fhir.jpa.util.jsonpatch.JsonPatchUtils.failedToApplyPatch=Failed to apply JSON patch to {0}: {1}

View File

@ -47,9 +47,9 @@ import javax.annotation.Nonnull;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -192,6 +192,11 @@ public abstract class BaseConfig implements SchedulingConfigurer {
return daoRegistry().getResourceDaoIfExists(theResourceType) != null;
}
@Bean
public IConsentContextServices consentContextServices() {
return new JpaConsentContextServices();
}
public static void configureEntityManagerFactory(LocalContainerEntityManagerFactoryBean theFactory, FhirContext theCtx) {
theFactory.setJpaDialect(hibernateJpaDialect(theCtx.getLocalizer()));
theFactory.setPackagesToScan("ca.uhn.fhir.jpa.model.entity", "ca.uhn.fhir.jpa.entity");
@ -202,9 +207,5 @@ public abstract class BaseConfig implements SchedulingConfigurer {
return new HapiFhirHibernateJpaDialect(theLocalizer);
}
@Bean
public IConsentContextServices consentContextServices() {
return new JpaConsentContextServices();
}
}

View File

@ -968,6 +968,7 @@ public class DaoConfig {
* and other FHIR features may not behave as expected when referential integrity is not
* preserved. Use this feature with caution.
* </p>
* @see ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor
*/
public boolean isEnforceReferentialIntegrityOnDelete() {
return myEnforceReferentialIntegrityOnDelete;
@ -981,6 +982,7 @@ public class DaoConfig {
* and other FHIR features may not behave as expected when referential integrity is not
* preserved. Use this feature with caution.
* </p>
* @see ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor
*/
public void setEnforceReferentialIntegrityOnDelete(boolean theEnforceReferentialIntegrityOnDelete) {
myEnforceReferentialIntegrityOnDelete = theEnforceReferentialIntegrityOnDelete;

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.delete;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -53,4 +53,9 @@ public class DeleteConflictList {
public int size() {
return myList.size();
}
@Override
public String toString() {
return myList.toString();
}
}

View File

@ -0,0 +1,19 @@
package ca.uhn.fhir.jpa.delete;
import org.apache.commons.lang3.Validate;
public class DeleteConflictOutcome {
private int myShouldRetryCount;
public int getShouldRetryCount() {
return myShouldRetryCount;
}
public DeleteConflictOutcome setShouldRetryCount(int theShouldRetryCount) {
Validate.isTrue(theShouldRetryCount >= 0, "theShouldRetryCount must not be negative");
myShouldRetryCount = theShouldRetryCount;
return this;
}
}

View File

@ -69,31 +69,33 @@ public class DeleteConflictService {
// In most cases, there will be no hooks, and so we only need to check if there is at least FIRST_QUERY_RESULT_COUNT conflict and populate that.
// Only in the case where there is a hook do we need to go back and collect larger batches of conflicts for processing.
boolean tryAgain = findAndHandleConflicts(theRequest, newConflicts, theEntity, theForValidate, FIRST_QUERY_RESULT_COUNT);
DeleteConflictOutcome outcome = findAndHandleConflicts(theRequest, newConflicts, theEntity, theForValidate, FIRST_QUERY_RESULT_COUNT);
int retryCount = 0;
while (tryAgain && retryCount < MAX_RETRY_ATTEMPTS) {
while (outcome != null) {
int shouldRetryCount = Math.min(outcome.getShouldRetryCount(), MAX_RETRY_ATTEMPTS);
if (!(retryCount < shouldRetryCount)) break;
newConflicts = new DeleteConflictList();
tryAgain = findAndHandleConflicts(theRequest, newConflicts, theEntity, theForValidate, RETRY_QUERY_RESULT_COUNT);
outcome = findAndHandleConflicts(theRequest, newConflicts, theEntity, theForValidate, RETRY_QUERY_RESULT_COUNT);
++retryCount;
}
theDeleteConflicts.addAll(newConflicts);
return retryCount;
}
private boolean findAndHandleConflicts(RequestDetails theRequest, DeleteConflictList theDeleteConflicts, ResourceTable theEntity, boolean theForValidate, int theMinQueryResultCount) {
private DeleteConflictOutcome findAndHandleConflicts(RequestDetails theRequest, DeleteConflictList theDeleteConflicts, ResourceTable theEntity, boolean theForValidate, int theMinQueryResultCount) {
List<ResourceLink> resultList = myDeleteConflictFinderService.findConflicts(theEntity, theMinQueryResultCount);
if (resultList.isEmpty()) {
return false;
return null;
}
return handleConflicts(theRequest, theDeleteConflicts, theEntity, theForValidate, resultList);
}
private boolean handleConflicts(RequestDetails theRequest, DeleteConflictList theDeleteConflicts, ResourceTable theEntity, boolean theForValidate, List<ResourceLink> theResultList) {
private DeleteConflictOutcome handleConflicts(RequestDetails theRequest, DeleteConflictList theDeleteConflicts, ResourceTable theEntity, boolean theForValidate, List<ResourceLink> theResultList) {
if (!myDaoConfig.isEnforceReferentialIntegrityOnDelete() && !theForValidate) {
ourLog.debug("Deleting {} resource dependencies which can no longer be satisfied", theResultList.size());
myResourceLinkDao.deleteAll(theResultList);
return false;
return null;
}
addConflictsToList(theDeleteConflicts, theEntity, theResultList);
@ -103,7 +105,7 @@ public class DeleteConflictService {
.add(DeleteConflictList.class, theDeleteConflicts)
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest);
return JpaInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRESTORAGE_DELETE_CONFLICTS, hooks);
return (DeleteConflictOutcome)JpaInterceptorBroadcaster.doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRESTORAGE_DELETE_CONFLICTS, hooks);
}
private void addConflictsToList(DeleteConflictList theDeleteConflicts, ResourceTable theEntity, List<ResourceLink> theResultList) {

View File

@ -0,0 +1,178 @@
package ca.uhn.fhir.jpa.interceptor;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.api.*;
import ca.uhn.fhir.jpa.dao.DaoRegistry;
import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.delete.DeleteConflictList;
import ca.uhn.fhir.jpa.delete.DeleteConflictOutcome;
import ca.uhn.fhir.jpa.util.DeleteConflict;
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.ResponseDetails;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.util.OperationOutcomeUtil;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r4.model.OperationOutcome;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import static ca.uhn.fhir.jpa.delete.DeleteConflictService.MAX_RETRY_ATTEMPTS;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/**
* Interceptor that allows for cascading deletes (deletes that resolve constraint issues).
* <p>
* For example, if <code>DiagnosticReport/A</code> has a reference to <code>Observation/B</code>
* it is not normally possible to delete <code>Observation/B</code> without first deleting
* <code>DiagnosticReport/A</code>. With this interceptor in place, it is.
* </p>
* <p>
* When using this interceptor, client requests must include the parameter
* <code>_cascade=true</code> on the DELETE URL in order to activate
* cascading delete, or include the request header <code>X-Cascade-Delete: true</code>
* </p>
*/
@Interceptor
public class CascadingDeleteInterceptor {
private static final Logger ourLog = LoggerFactory.getLogger(CascadingDeleteInterceptor.class);
private static final String CASCADED_DELETES_KEY = CascadingDeleteInterceptor.class.getName() + "_CASCADED_DELETES_KEY";
private static final String CASCADED_DELETES_FAILED_KEY = CascadingDeleteInterceptor.class.getName() + "_CASCADED_DELETES_FAILED_KEY";
private final DaoRegistry myDaoRegistry;
private final IInterceptorBroadcaster myInterceptorBroadcaster;
/**
* Constructor
*
* @param theDaoRegistry The DAO registry (must not be null)
*/
public CascadingDeleteInterceptor(DaoRegistry theDaoRegistry, IInterceptorBroadcaster theInterceptorBroadcaster) {
Validate.notNull(theDaoRegistry, "theDaoRegistry must not be null");
Validate.notNull(theInterceptorBroadcaster, "theInterceptorBroadcaster must not be null");
myDaoRegistry = theDaoRegistry;
myInterceptorBroadcaster = theInterceptorBroadcaster;
}
@Hook(Pointcut.STORAGE_PRESTORAGE_DELETE_CONFLICTS)
public DeleteConflictOutcome handleDeleteConflicts(DeleteConflictList theConflictList, RequestDetails theRequest) {
ourLog.debug("Have delete conflicts: {}", theConflictList);
if (!shouldCascade(theRequest)) {
return null;
}
List<String> cascadedDeletes = getCascadedDeletesMap(theRequest, true);
for (Iterator<DeleteConflict> iter = theConflictList.iterator(); iter.hasNext(); ) {
DeleteConflict next = iter.next();
IdDt nextSource = next.getSourceId();
IFhirResourceDao dao = myDaoRegistry.getResourceDao(nextSource.getResourceType());
// Interceptor call: STORAGE_CASCADE_DELETE
IBaseResource resource = dao.read(nextSource);
HookParams params = new HookParams()
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest)
.add(DeleteConflictList.class, theConflictList)
.add(IBaseResource.class, resource);
JpaInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_CASCADE_DELETE, params);
// Actually perform the delete
ourLog.info("Have delete conflict {} - Cascading delete", next);
dao.delete(nextSource, theRequest);
cascadedDeletes.add(nextSource.getValue());
}
return new DeleteConflictOutcome().setShouldRetryCount(MAX_RETRY_ATTEMPTS);
}
@SuppressWarnings("unchecked")
private List<String> getCascadedDeletesMap(RequestDetails theRequest, boolean theCreate) {
List<String> retVal = (List<String>) theRequest.getUserData().get(CASCADED_DELETES_KEY);
if (retVal == null) {
retVal = new ArrayList<>();
theRequest.getUserData().put(CASCADED_DELETES_KEY, retVal);
}
return retVal;
}
@Hook(Pointcut.SERVER_OUTGOING_FAILURE_OPERATIONOUTCOME)
public void outgoingFailureOperationOutcome(RequestDetails theRequestDetails, IBaseOperationOutcome theResponse) {
if (theRequestDetails != null) {
String failedDeleteMessage = (String) theRequestDetails.getUserData().get(CASCADED_DELETES_FAILED_KEY);
if (isNotBlank(failedDeleteMessage)) {
FhirContext ctx = theRequestDetails.getFhirContext();
String severity = OperationOutcome.IssueSeverity.INFORMATION.toCode();
String code = OperationOutcome.IssueType.INFORMATIONAL.toCode();
String details = failedDeleteMessage;
OperationOutcomeUtil.addIssue(ctx, theResponse, severity, details, null, code);
}
}
}
@Hook(Pointcut.SERVER_OUTGOING_RESPONSE)
public void outgoingResponse(RequestDetails theRequestDetails, ResponseDetails theResponseDetails, IBaseResource theResponse) {
if (theRequestDetails != null) {
// Successful delete list
List<String> deleteList = getCascadedDeletesMap(theRequestDetails, false);
if (deleteList != null) {
if (theResponseDetails.getResponseCode() == 200) {
if (theResponse instanceof IBaseOperationOutcome) {
FhirContext ctx = theRequestDetails.getFhirContext();
IBaseOperationOutcome oo = (IBaseOperationOutcome) theResponse;
String severity = OperationOutcome.IssueSeverity.INFORMATION.toCode();
String code = OperationOutcome.IssueType.INFORMATIONAL.toCode();
String details = ctx.getLocalizer().getMessage(CascadingDeleteInterceptor.class, "successMsg", deleteList.size(), deleteList);
OperationOutcomeUtil.addIssue(ctx, oo, severity, details, null, code);
}
}
}
}
}
/**
* Subclasses may override
*
* @param theRequest The REST request
* @return Returns true if cascading delete should be allowed
*/
@SuppressWarnings("WeakerAccess")
protected boolean shouldCascade(RequestDetails theRequest) {
if (theRequest != null) {
String[] cascadeParameters = theRequest.getParameters().get(Constants.PARAMETER_CASCADE_DELETE);
if (cascadeParameters != null && Arrays.asList(cascadeParameters).contains("true")) {
return true;
}
String cascadeHeader = theRequest.getHeader(Constants.HEADER_CASCADE_DELETE);
if ("true".equals(cascadeHeader)) {
return true;
}
// Add a message to the response
String message = theRequest.getFhirContext().getLocalizer().getMessage(CascadingDeleteInterceptor.class, "noParam");
theRequest.getUserData().put(CASCADED_DELETES_FAILED_KEY, message);
}
return false;
}
}

View File

@ -21,6 +21,8 @@ package ca.uhn.fhir.jpa.util;
*/
import ca.uhn.fhir.model.primitive.IdDt;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
public class DeleteConflict {
@ -46,4 +48,13 @@ public class DeleteConflict {
return myTargetId;
}
@Override
public String toString() {
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
.append("sourceId", mySourceId)
.append("sourcePath", mySourcePath)
.append("targetId", myTargetId)
.toString();
}
}

View File

@ -3,7 +3,7 @@ package ca.uhn.fhir.jpa.config;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
import ca.uhn.fhir.jpa.subscription.SubscriptionTestUtil;
import ca.uhn.fhir.jpa.subscription.module.config.UnregisterScheduledProcessor;
import ca.uhn.fhir.test.utilities.UnregisterScheduledProcessor;
import ca.uhn.fhir.jpa.subscription.module.subscriber.SubscriptionDeliveringRestHookSubscriber;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

View File

@ -6,6 +6,7 @@ import ca.uhn.fhir.jpa.config.TestR4Config;
import ca.uhn.fhir.jpa.dao.*;
import ca.uhn.fhir.jpa.dao.data.*;
import ca.uhn.fhir.jpa.dao.dstu2.FhirResourceDaoDstu2SearchNoFtTest;
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
import ca.uhn.fhir.jpa.interceptor.PerformanceTracingLoggingInterceptor;
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
@ -160,7 +161,6 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
@Autowired
@Qualifier("myRiskAssessmentDaoR4")
protected IFhirResourceDao<RiskAssessment> myRiskAssessmentDao;
protected IServerInterceptor myInterceptor;
@Autowired
protected IInterceptorService myInterceptorRegistry;
@Autowired
@ -298,6 +298,7 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
private JpaValidationSupportChainR4 myJpaValidationSupportChainR4;
private PerformanceTracingLoggingInterceptor myPerformanceTracingLoggingInterceptor;
private List<Object> mySystemInterceptors;
protected IServerInterceptor myInterceptor;
@After()
public void afterCleanupDao() {

View File

@ -9,7 +9,7 @@ import ca.uhn.fhir.jpa.searchparam.JpaRuntimeSearchParam;
import ca.uhn.fhir.jpa.searchparam.SearchParamConstants;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
import ca.uhn.fhir.jpa.subscription.module.config.UnregisterScheduledProcessor;
import ca.uhn.fhir.test.utilities.UnregisterScheduledProcessor;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.param.DateParam;
import ca.uhn.fhir.rest.param.ReferenceParam;

View File

@ -40,7 +40,7 @@ public class DeleteConflictServiceR4Test extends BaseJpaR4Test {
}
@Test
public void testDeleteFailCallsHook() throws Exception {
public void testDeleteFailCallsHook() {
Organization organization = new Organization();
organization.setName("FOO");
IIdType organizationId = myOrganizationDao.create(organization).getId().toUnqualifiedVersionless();
@ -49,7 +49,7 @@ public class DeleteConflictServiceR4Test extends BaseJpaR4Test {
patient.setManagingOrganization(new Reference(organizationId));
IIdType patientId = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
myDeleteInterceptor.deleteConflictFunction = list -> false;
myDeleteInterceptor.deleteConflictFunction = t -> new DeleteConflictOutcome().setShouldRetryCount(0);
try {
myOrganizationDao.delete(organizationId);
fail();
@ -64,7 +64,7 @@ public class DeleteConflictServiceR4Test extends BaseJpaR4Test {
}
@Test
public void testDeleteHookDeletesConflict() throws Exception {
public void testDeleteHookDeletesConflict() {
Organization organization = new Organization();
organization.setName("FOO");
IIdType organizationId = myOrganizationDao.create(organization).getId().toUnqualifiedVersionless();
@ -82,7 +82,7 @@ public class DeleteConflictServiceR4Test extends BaseJpaR4Test {
}
@Test
public void testDeleteHookDeletesTwoConflicts() throws Exception {
public void testDeleteHookDeletesTwoConflicts() {
Organization organization = new Organization();
organization.setName("FOO");
IIdType organizationId = myOrganizationDao.create(organization).getId().toUnqualifiedVersionless();
@ -104,7 +104,7 @@ public class DeleteConflictServiceR4Test extends BaseJpaR4Test {
}
@Test
public void testDeleteHookDeletesThreeConflicts() throws Exception {
public void testDeleteHookDeletesThreeConflicts() {
Organization organization = new Organization();
organization.setName("FOO");
IIdType organizationId = myOrganizationDao.create(organization).getId().toUnqualifiedVersionless();
@ -130,7 +130,7 @@ public class DeleteConflictServiceR4Test extends BaseJpaR4Test {
}
@Test
public void testBadInterceptorNoInfiniteLoop() throws Exception {
public void testBadInterceptorNoInfiniteLoop() {
Organization organization = new Organization();
organization.setName("FOO");
IIdType organizationId = myOrganizationDao.create(organization).getId().toUnqualifiedVersionless();
@ -140,7 +140,7 @@ public class DeleteConflictServiceR4Test extends BaseJpaR4Test {
IIdType patientId = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
// Always returning true is bad behaviour. Our infinite loop checker should halt it
myDeleteInterceptor.deleteConflictFunction = list -> true;
myDeleteInterceptor.deleteConflictFunction = t -> new DeleteConflictOutcome().setShouldRetryCount(Integer.MAX_VALUE);
try {
myOrganizationDao.delete(organizationId);
@ -151,7 +151,7 @@ public class DeleteConflictServiceR4Test extends BaseJpaR4Test {
assertEquals(1 + DeleteConflictService.MAX_RETRY_ATTEMPTS, myDeleteInterceptor.myCallCount);
}
private boolean deleteConflicts(DeleteConflictList theList) {
private DeleteConflictOutcome deleteConflicts(DeleteConflictList theList) {
Iterator<DeleteConflict> iterator = theList.iterator();
while (iterator.hasNext()) {
DeleteConflict next = iterator.next();
@ -162,16 +162,16 @@ public class DeleteConflictServiceR4Test extends BaseJpaR4Test {
++myInterceptorDeleteCount;
}
}
return myInterceptorDeleteCount > 0;
return new DeleteConflictOutcome().setShouldRetryCount(myInterceptorDeleteCount);
}
private static class DeleteConflictInterceptor {
int myCallCount;
DeleteConflictList myDeleteConflictList;
Function<DeleteConflictList, Boolean> deleteConflictFunction;
Function<DeleteConflictList, DeleteConflictOutcome> deleteConflictFunction;
@Hook(Pointcut.STORAGE_PRESTORAGE_DELETE_CONFLICTS)
public boolean deleteConflicts(DeleteConflictList theDeleteConflictList) {
public DeleteConflictOutcome deleteConflicts(DeleteConflictList theDeleteConflictList) {
++myCallCount;
myDeleteConflictList = theDeleteConflictList;
return deleteConflictFunction.apply(theDeleteConflictList);

View File

@ -8,8 +8,6 @@ import ca.uhn.fhir.jpa.model.entity.ResourceLink;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.context.annotation.Bean;
@ -27,15 +25,20 @@ import static org.mockito.Mockito.when;
@RunWith(SpringRunner.class)
@ContextConfiguration(classes = {DeleteConflictServiceTest.SpringConfig.class})
public class DeleteConflictServiceTest {
private static final Logger ourLog = LoggerFactory.getLogger(DeleteConflictServiceTest.class);
@MockBean
private DeleteConflictFinderService myDeleteConflictFinderService;
/**
* This is needed, don't remove
*/
@SuppressWarnings("unused")
@MockBean
private IResourceLinkDao myResourceLinkDao;
@SuppressWarnings("unused")
@MockBean
private FhirContext myFhirContext;
@MockBean
@SuppressWarnings("unused")
private IInterceptorBroadcaster myInterceptorBroadcaster;
@Autowired

View File

@ -1,5 +1,6 @@
package ca.uhn.fhir.jpa.provider.r4;
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.MethodOutcome;
@ -424,6 +425,134 @@ public class AuthorizationInterceptorResourceProviderR4Test extends BaseResource
assertEquals(id.getValue(), patient.getId());
}
@Test
public void testDeleteCascadeBlocked() {
CascadingDeleteInterceptor cascadingDeleteInterceptor = new CascadingDeleteInterceptor(myDaoRegistry, myInterceptorRegistry);
ourRestServer.getInterceptorService().registerInterceptor(cascadingDeleteInterceptor);
try {
// Create Patient, and Observation that refers to it
Patient patient = new Patient();
patient.addIdentifier().setSystem("http://uhn.ca/mrns").setValue("100");
patient.addName().setFamily("Tester").addGiven("Raghad");
final IIdType patientId = ourClient.create().resource(patient).execute().getId().toUnqualifiedVersionless();
Observation obs = new Observation();
obs.setStatus(ObservationStatus.FINAL);
obs.getSubject().setReferenceElement(patientId);
ourClient.create().resource(obs).execute();
// Allow any deletes, but don't allow cascade
ourRestServer.registerInterceptor(new AuthorizationInterceptor(PolicyEnum.DENY) {
@Override
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
return new RuleBuilder()
.allow().delete().allResources().withAnyId().andThen()
.build();
}
});
try {
ourClient
.delete()
.resourceById(patientId)
.withAdditionalHeader(Constants.HEADER_CASCADE_DELETE, "true")
.execute();
fail();
} catch (ForbiddenOperationException e) {
// good
}
} finally {
ourRestServer.getInterceptorService().unregisterInterceptor(cascadingDeleteInterceptor);
}
}
@Test
public void testDeleteCascadeAllowed() {
CascadingDeleteInterceptor cascadingDeleteInterceptor = new CascadingDeleteInterceptor(myDaoRegistry, myInterceptorRegistry);
ourRestServer.getInterceptorService().registerInterceptor(cascadingDeleteInterceptor);
try {
// Create Patient, and Observation that refers to it
Patient patient = new Patient();
patient.addIdentifier().setSystem("http://uhn.ca/mrns").setValue("100");
patient.addName().setFamily("Tester").addGiven("Raghad");
final IIdType patientId = ourClient.create().resource(patient).execute().getId().toUnqualifiedVersionless();
Observation obs = new Observation();
obs.setStatus(ObservationStatus.FINAL);
obs.getSubject().setReferenceElement(patientId);
ourClient.create().resource(obs).execute();
// Allow any deletes, but don't allow cascade
ourRestServer.registerInterceptor(new AuthorizationInterceptor(PolicyEnum.DENY) {
@Override
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
return new RuleBuilder()
.allow().delete().allResources().withAnyId().andThen()
.allow().delete().onCascade().allResources().withAnyId().andThen()
.build();
}
});
ourClient
.delete()
.resourceById(patientId)
.withAdditionalHeader(Constants.HEADER_CASCADE_DELETE, "true")
.execute();
} finally {
ourRestServer.getInterceptorService().unregisterInterceptor(cascadingDeleteInterceptor);
}
}
@Test
public void testDeleteCascadeAllowed_ButNotOnTargetType() {
CascadingDeleteInterceptor cascadingDeleteInterceptor = new CascadingDeleteInterceptor(myDaoRegistry, myInterceptorRegistry);
ourRestServer.getInterceptorService().registerInterceptor(cascadingDeleteInterceptor);
try {
// Create Patient, and Observation that refers to it
Patient patient = new Patient();
patient.addIdentifier().setSystem("http://uhn.ca/mrns").setValue("100");
patient.addName().setFamily("Tester").addGiven("Raghad");
final IIdType patientId = ourClient.create().resource(patient).execute().getId().toUnqualifiedVersionless();
Observation obs = new Observation();
obs.setStatus(ObservationStatus.FINAL);
obs.getSubject().setReferenceElement(patientId);
ourClient.create().resource(obs).execute();
// Allow any deletes, but don't allow cascade
ourRestServer.registerInterceptor(new AuthorizationInterceptor(PolicyEnum.DENY) {
@Override
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
return new RuleBuilder()
.allow().delete().resourcesOfType(Patient.class).withAnyId().andThen()
.allow().delete().resourcesOfType(Observation.class).withAnyId().andThen()
.allow().delete().onCascade().resourcesOfType(Patient.class).withAnyId().andThen()
.build();
}
});
try {
ourClient
.delete()
.resourceById(patientId)
.withAdditionalHeader(Constants.HEADER_CASCADE_DELETE, "true")
.execute();
fail();
} catch (ForbiddenOperationException e) {
// good
}
} finally {
ourRestServer.getInterceptorService().unregisterInterceptor(cascadingDeleteInterceptor);
}
}
@Test
public void testDeleteResourceConditional() throws IOException {
String methodName = "testDeleteResourceConditional";

View File

@ -1,8 +1,11 @@
package ca.uhn.fhir.jpa.provider.r4;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.jpa.config.WebsocketDispatcherConfig;
import ca.uhn.fhir.jpa.dao.DaoRegistry;
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
import ca.uhn.fhir.jpa.search.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryR4;
@ -75,6 +78,8 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test {
@Autowired
protected SubscriptionLoader mySubscriptionLoader;
@Autowired
protected DaoRegistry myDaoRegistry;
public BaseResourceProviderR4Test() {
super();
@ -101,6 +106,7 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test {
myTerminologyUploaderProvider = myAppCtx.getBean(TerminologyUploaderProviderR4.class);
ourGraphQLProvider = myAppCtx.getBean("myGraphQLProvider");
myDaoRegistry = myAppCtx.getBean(DaoRegistry.class);
ourRestServer.registerProviders(mySystemProvider, myTerminologyUploaderProvider, ourGraphQLProvider);

View File

@ -0,0 +1,133 @@
package ca.uhn.fhir.jpa.provider.r4;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.jpa.dao.DaoRegistry;
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
import ca.uhn.fhir.util.TestUtil;
import com.google.common.base.Charsets;
import org.apache.commons.io.IOUtils;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpDelete;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.DiagnosticReport;
import org.hl7.fhir.r4.model.Observation;
import org.hl7.fhir.r4.model.Patient;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import java.io.IOException;
import static org.hamcrest.CoreMatchers.containsString;
import static org.junit.Assert.*;
public class CascadingDeleteInterceptorR4Test extends BaseResourceProviderR4Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(CascadingDeleteInterceptorR4Test.class);
private IIdType myDiagnosticReportId;
@Autowired
private DaoRegistry myDaoRegistry;
@Autowired
private IInterceptorBroadcaster myInterceptorBroadcaster;
private IIdType myPatientId;
private CascadingDeleteInterceptor myDeleteInterceptor;
private IIdType myObservationId;
@Override
@Before
public void before() throws Exception {
super.before();
myDeleteInterceptor = new CascadingDeleteInterceptor(myDaoRegistry, myInterceptorBroadcaster);
}
@Override
@After
public void after() throws Exception {
super.after();
ourRestServer.getInterceptorService().unregisterInterceptor(myDeleteInterceptor);
}
public void createResources() {
Patient p = new Patient();
p.setActive(true);
myPatientId = ourClient.create().resource(p).execute().getId().toUnqualifiedVersionless();
Observation o = new Observation();
o.setStatus(Observation.ObservationStatus.FINAL);
o.getSubject().setReference(myPatientId.getValue());
myObservationId = ourClient.create().resource(o).execute().getId().toUnqualifiedVersionless();
DiagnosticReport dr = new DiagnosticReport();
dr.setStatus(DiagnosticReport.DiagnosticReportStatus.FINAL);
dr.addResult().setReference(myObservationId.getValue());
myDiagnosticReportId = ourClient.create().resource(dr).execute().getId().toUnqualifiedVersionless();
}
@Test
public void testDeleteWithNoInterceptorAndConstraints() {
createResources();
try {
ourClient.delete().resourceById(myPatientId).execute();
fail();
} catch (ResourceVersionConflictException e) {
// good
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(e.getOperationOutcome()));
}
}
@Test
public void testDeleteWithInterceptorAndConstraints() {
createResources();
ourRestServer.getInterceptorService().registerInterceptor(myDeleteInterceptor);
try {
ourClient.delete().resourceById(myPatientId).execute();
fail();
} catch (ResourceVersionConflictException e) {
String output = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(e.getOperationOutcome());
ourLog.info(output);
assertThat(output, containsString("Note that cascading deletes are not active for this request. You can enable cascading deletes"));
}
}
@Test
public void testDeleteCascading() throws IOException {
createResources();
ourRestServer.getInterceptorService().registerInterceptor(myDeleteInterceptor);
HttpDelete delete = new HttpDelete(ourServerBase + "/" + myPatientId.getValue() + "?_cascade=true&_pretty=true");
delete.addHeader(Constants.HEADER_ACCEPT, Constants.CT_FHIR_JSON_NEW);
try (CloseableHttpResponse response = ourHttpClient.execute(delete)) {
assertEquals(200, response.getStatusLine().getStatusCode());
String deleteResponse = IOUtils.toString(response.getEntity().getContent(), Charsets.UTF_8);
ourLog.info("Response: {}", deleteResponse);
assertThat(deleteResponse, containsString("Cascaded delete to 2 resources: [" + myDiagnosticReportId + "/_history/1, " + myObservationId + "/_history/1]"));
}
try {
ourLog.info("Reading {}", myPatientId);
ourClient.read().resource(Patient.class).withId(myPatientId).execute();
fail();
} catch (ResourceGoneException e) {
// good
}
}
@AfterClass
public static void afterClassClearContext() {
TestUtil.clearAllStaticFieldsForUnitTest();
}
}

View File

@ -4,7 +4,7 @@ import ca.uhn.fhir.jpa.config.TestR4Config;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.provider.r4.BaseResourceProviderR4Test;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.subscription.module.config.UnregisterScheduledProcessor;
import ca.uhn.fhir.test.utilities.UnregisterScheduledProcessor;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.param.TokenOrListParam;

View File

@ -5,6 +5,7 @@ import ca.uhn.fhir.jpa.model.entity.ModelConfig;
import ca.uhn.fhir.jpa.subscription.module.matcher.ISubscriptionMatcher;
import ca.uhn.fhir.jpa.subscription.module.matcher.InMemorySubscriptionMatcher;
import ca.uhn.fhir.rest.client.api.IGenericClient;
import ca.uhn.fhir.test.utilities.UnregisterScheduledProcessor;
import org.mockito.Mockito;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

View File

@ -1,9 +1,12 @@
package ca.uhn.fhirtest;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.jpa.config.WebsocketDispatcherConfig;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.dao.DaoRegistry;
import ca.uhn.fhir.jpa.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
import ca.uhn.fhir.jpa.provider.JpaConformanceProviderDstu2;
import ca.uhn.fhir.jpa.provider.JpaSystemProviderDstu2;
import ca.uhn.fhir.jpa.provider.dstu3.JpaConformanceProviderDstu3;
@ -218,6 +221,15 @@ public class TestRestfulServer extends RestfulServer {
*/
SubscriptionInterceptorLoader subscriptionInterceptorLoader = myAppCtx.getBean(SubscriptionInterceptorLoader.class);
subscriptionInterceptorLoader.registerInterceptors();
/*
* Cascading deletes
*/
DaoRegistry daoRegistry = myAppCtx.getBean(DaoRegistry.class);
IInterceptorBroadcaster interceptorBroadcaster = myAppCtx.getBean(IInterceptorBroadcaster.class);
CascadingDeleteInterceptor cascadingDeleteInterceptor = new CascadingDeleteInterceptor(daoRegistry, interceptorBroadcaster);
registerInterceptor(cascadingDeleteInterceptor);
}
/**

View File

@ -34,6 +34,7 @@ import javax.servlet.http.HttpServletResponse;
import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.method.BaseResourceReturningMethodBinding;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
@ -93,7 +94,8 @@ public class ExceptionHandlingInterceptor extends InterceptorAdapter {
statusMessage = sm;
}
}
BaseResourceReturningMethodBinding.callOutgoingFailureOperationOutcomeHook(theRequestDetails, oo);
return response.streamResponseAsResource(oo, true, Collections.singleton(SummaryEnum.FALSE), statusCode, statusMessage, false, false);
}

View File

@ -16,15 +16,16 @@ import ca.uhn.fhir.rest.server.RestfulServerUtils.ResponseEncoding;
import ca.uhn.fhir.rest.server.exceptions.AuthenticationException;
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.method.BaseResourceReturningMethodBinding;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.UrlUtil;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.text.StringEscapeUtils;
import org.hl7.fhir.instance.model.api.IBaseBinary;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
import org.hl7.fhir.instance.model.api.IBaseResource;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@ -235,8 +236,7 @@ public class ResponseHighlighterInterceptor {
}
@Hook(value = Pointcut.SERVER_HANDLE_EXCEPTION, order = InterceptorOrders.RESPONSE_HIGHLIGHTER_INTERCEPTOR)
public boolean handleException(RequestDetails theRequestDetails, BaseServerResponseException theException, HttpServletRequest theServletRequest, HttpServletResponse theServletResponse)
throws ServletException, IOException {
public boolean handleException(RequestDetails theRequestDetails, BaseServerResponseException theException, HttpServletRequest theServletRequest, HttpServletResponse theServletResponse) {
/*
* It's not a browser...
*/
@ -260,11 +260,17 @@ public class ResponseHighlighterInterceptor {
return true;
}
if (theException.getOperationOutcome() == null) {
IBaseOperationOutcome oo = theException.getOperationOutcome();
if (oo == null) {
return true;
}
streamResponse(theRequestDetails, theServletResponse, theException.getOperationOutcome(), theServletRequest, theException.getStatusCode());
ResponseDetails responseDetails = new ResponseDetails();
responseDetails.setResponseResource(oo);
responseDetails.setResponseCode(theException.getStatusCode());
BaseResourceReturningMethodBinding.callOutgoingFailureOperationOutcomeHook(theRequestDetails, oo);
streamResponse(theRequestDetails, theServletResponse, responseDetails.getResponseResource(), theServletRequest, responseDetails.getResponseCode());
return false;
}

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.rest.server.interceptor.auth;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -86,8 +86,8 @@ public class AuthorizationInterceptor implements IRuleApplier {
}
private void applyRulesAndFailIfDeny(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId,
IBaseResource theOutputResource) {
Verdict decision = applyRulesAndReturnDecision(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource);
IBaseResource theOutputResource, Pointcut thePointcut) {
Verdict decision = applyRulesAndReturnDecision(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, thePointcut);
if (decision.getDecision() == PolicyEnum.ALLOW) {
return;
@ -98,7 +98,7 @@ public class AuthorizationInterceptor implements IRuleApplier {
@Override
public Verdict applyRulesAndReturnDecision(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId,
IBaseResource theOutputResource) {
IBaseResource theOutputResource, Pointcut thePointcut) {
@SuppressWarnings("unchecked")
List<IAuthRule> rules = (List<IAuthRule>) theRequestDetails.getUserData().get(myRequestRuleListKey);
if (rules == null) {
@ -110,7 +110,7 @@ public class AuthorizationInterceptor implements IRuleApplier {
Verdict verdict = null;
for (IAuthRule nextRule : rules) {
verdict = nextRule.applyRule(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, this, flags);
verdict = nextRule.applyRule(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, this, flags, thePointcut);
if (verdict != null) {
ourLog.trace("Rule {} returned decision {}", nextRule, verdict.getDecision());
break;
@ -285,12 +285,12 @@ public class AuthorizationInterceptor implements IRuleApplier {
throw new ForbiddenOperationException("Access denied by default policy (no applicable rules)");
}
private void handleUserOperation(RequestDetails theRequest, IBaseResource theResource, RestOperationTypeEnum operation) {
applyRulesAndFailIfDeny(operation, theRequest, theResource, theResource.getIdElement(), null);
private void handleUserOperation(RequestDetails theRequest, IBaseResource theResource, RestOperationTypeEnum theOperation, Pointcut thePointcut) {
applyRulesAndFailIfDeny(theOperation, theRequest, theResource, theResource.getIdElement(), null, thePointcut);
}
@Hook(Pointcut.SERVER_INCOMING_REQUEST_PRE_HANDLED)
public void incomingRequestPreHandled(RequestDetails theRequest) {
public void incomingRequestPreHandled(RequestDetails theRequest, Pointcut thePointcut) {
IBaseResource inputResource = null;
IIdType inputResourceId = null;
@ -308,23 +308,33 @@ public class AuthorizationInterceptor implements IRuleApplier {
return;
}
applyRulesAndFailIfDeny(theRequest.getRestOperationType(), theRequest, inputResource, inputResourceId, null);
applyRulesAndFailIfDeny(theRequest.getRestOperationType(), theRequest, inputResource, inputResourceId, null, thePointcut);
}
@Hook(Pointcut.STORAGE_PRESHOW_RESOURCES)
public void hookPreShow(RequestDetails theRequestDetails, IPreResourceShowDetails theDetails) {
public void hookPreShow(RequestDetails theRequestDetails, IPreResourceShowDetails theDetails, Pointcut thePointcut) {
for (int i = 0; i < theDetails.size(); i++) {
IBaseResource next = theDetails.getResource(i);
checkOutgoingResourceAndFailIfDeny(theRequestDetails, next);
checkOutgoingResourceAndFailIfDeny(theRequestDetails, next, thePointcut);
}
}
@Hook(Pointcut.SERVER_OUTGOING_RESPONSE)
public void hookOutgoingResponse(RequestDetails theRequestDetails, IBaseResource theResponseObject) {
checkOutgoingResourceAndFailIfDeny(theRequestDetails, theResponseObject);
public void hookOutgoingResponse(RequestDetails theRequestDetails, IBaseResource theResponseObject, Pointcut thePointcut) {
checkOutgoingResourceAndFailIfDeny(theRequestDetails, theResponseObject, thePointcut);
}
private void checkOutgoingResourceAndFailIfDeny(RequestDetails theRequestDetails, IBaseResource theResponseObject) {
@Hook(Pointcut.STORAGE_CASCADE_DELETE)
public void hookCascadeDeleteForConflict(RequestDetails theRequestDetails, Pointcut thePointcut, IBaseResource theResourceToDelete) {
Validate.notNull(theResourceToDelete); // just in case
checkPointcutAndFailIfDeny(theRequestDetails, thePointcut, theResourceToDelete);
}
private void checkPointcutAndFailIfDeny(RequestDetails theRequestDetails, Pointcut thePointcut, IBaseResource theInputResource) {
applyRulesAndFailIfDeny(theRequestDetails.getRestOperationType(), theRequestDetails, theInputResource, null, null, thePointcut);
}
private void checkOutgoingResourceAndFailIfDeny(RequestDetails theRequestDetails, IBaseResource theResponseObject, Pointcut thePointcut) {
switch (determineOperationDirection(theRequestDetails.getRestOperationType(), null)) {
case IN:
case NONE:
@ -343,6 +353,7 @@ public class AuthorizationInterceptor implements IRuleApplier {
FhirContext fhirContext = theRequestDetails.getServer().getFhirContext();
List<IBaseResource> resources = Collections.emptyList();
//noinspection EnumSwitchStatementWhichMissesCases
switch (theRequestDetails.getRestOperationType()) {
case SEARCH_SYSTEM:
case SEARCH_TYPE:
@ -368,26 +379,26 @@ public class AuthorizationInterceptor implements IRuleApplier {
}
for (IBaseResource nextResponse : resources) {
applyRulesAndFailIfDeny(theRequestDetails.getRestOperationType(), theRequestDetails, null, null, nextResponse);
applyRulesAndFailIfDeny(theRequestDetails.getRestOperationType(), theRequestDetails, null, null, nextResponse, thePointcut);
}
}
@Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED)
public void hookResourcePreCreate(RequestDetails theRequest, IBaseResource theResource) {
handleUserOperation(theRequest, theResource, RestOperationTypeEnum.CREATE);
public void hookResourcePreCreate(RequestDetails theRequest, IBaseResource theResource, Pointcut thePointcut) {
handleUserOperation(theRequest, theResource, RestOperationTypeEnum.CREATE, thePointcut);
}
@Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_DELETED)
public void hookResourcePreDelete(RequestDetails theRequest, IBaseResource theResource) {
handleUserOperation(theRequest, theResource, RestOperationTypeEnum.DELETE);
public void hookResourcePreDelete(RequestDetails theRequest, IBaseResource theResource, Pointcut thePointcut) {
handleUserOperation(theRequest, theResource, RestOperationTypeEnum.DELETE, thePointcut);
}
@Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED)
public void hookResourcePreUpdate(RequestDetails theRequest, IBaseResource theOldResource, IBaseResource theNewResource) {
public void hookResourcePreUpdate(RequestDetails theRequest, IBaseResource theOldResource, IBaseResource theNewResource, Pointcut thePointcut) {
if (theOldResource != null) {
handleUserOperation(theRequest, theOldResource, RestOperationTypeEnum.UPDATE);
handleUserOperation(theRequest, theOldResource, RestOperationTypeEnum.UPDATE, thePointcut);
}
handleUserOperation(theRequest, theNewResource, RestOperationTypeEnum.UPDATE);
handleUserOperation(theRequest, theNewResource, RestOperationTypeEnum.UPDATE, thePointcut);
}
private enum OperationExamineDirection {
@ -409,7 +420,7 @@ public class AuthorizationInterceptor implements IRuleApplier {
myDecidingRule = theDecidingRule;
}
public IAuthRule getDecidingRule() {
IAuthRule getDecidingRule() {
return myDecidingRule;
}
@ -433,10 +444,6 @@ public class AuthorizationInterceptor implements IRuleApplier {
}
private static UnsupportedOperationException failForDstu1() {
return new UnsupportedOperationException("Use of this interceptor on DSTU1 servers is not supportd");
}
static List<IBaseResource> toListOfResourcesAndExcludeContainer(IBaseResource theResponseObject, FhirContext fhirContext) {
if (theResponseObject == null) {
return Collections.emptyList();

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.rest.server.interceptor.auth;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -20,12 +20,12 @@ package ca.uhn.fhir.rest.server.interceptor.auth;
* #L%
*/
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor.Verdict;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import java.util.Set;
@ -40,23 +40,19 @@ public interface IAuthRule {
/**
* Applies the rule and returns a policy decision, or <code>null</code> if the rule does not apply
*
* @param theOperation
* The operation type
* @param theRequestDetails
* The request
* @param theInputResource
* The resource being input by the client, or <code>null</code>
*
* @param theOperation The operation type
* @param theRequestDetails The request
* @param theInputResource The resource being input by the client, or <code>null</code>
* @param theInputResourceId TODO
* @param theOutputResource
* The resource being returned by the server, or <code>null</code>
* @param theRuleApplier
* The rule applying module (this can be used by rules to apply the rule set to
* nested objects in the request, such as nested requests in a transaction)
* @param theFlags
* @param theOutputResource The resource being returned by the server, or <code>null</code>
* @param theRuleApplier The rule applying module (this can be used by rules to apply the rule set to
* nested objects in the request, such as nested requests in a transaction)
* @param theFlags The flags configured in the authorization interceptor
* @param thePointcut The pointcut hook that triggered this call
* @return Returns a policy decision, or <code>null</code> if the rule does not apply
*/
Verdict applyRule(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, IRuleApplier theRuleApplier, Set<AuthorizationFlagsEnum> theFlags);
Verdict applyRule(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, IRuleApplier theRuleApplier, Set<AuthorizationFlagsEnum> theFlags, Pointcut thePointcut);
/**
* Returns a name for this rule, to be used in logs and error messages

View File

@ -39,7 +39,7 @@ public interface IAuthRuleBuilderRule {
/**
* This rule applies to the FHIR delete operation
*/
IAuthRuleBuilderRuleOp delete();
IAuthRuleBuilderRuleOpDelete delete();
/**
* This rule applies to <code>create</code> operations with a <code>conditional</code>

View File

@ -0,0 +1,33 @@
package ca.uhn.fhir.rest.server.interceptor.auth;
/*
* #%L
* HAPI FHIR - Server Framework
* %%
* Copyright (C) 2014 - 2019 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
public interface IAuthRuleBuilderRuleOpDelete extends IAuthRuleBuilderRuleOp {
/**
* Specifies that this rule applies to cascading deletes as opposed to regular
* deletes. Note that if you want to allow cascading deletes, you will typically
* require at least two separate rules: one for the original source resource, and
* one for the cascade.
*/
IAuthRuleBuilderRuleOp onCascade();
}

View File

@ -20,6 +20,7 @@ package ca.uhn.fhir.rest.server.interceptor.auth;
* #L%
*/
import ca.uhn.fhir.interceptor.api.Pointcut;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
@ -29,6 +30,6 @@ import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor.Verdict
public interface IRuleApplier {
Verdict applyRulesAndReturnDecision(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource);
Verdict applyRulesAndReturnDecision(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, Pointcut thePointcut);
}

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.rest.server.interceptor.auth;
*/
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor.Verdict;
@ -80,7 +81,7 @@ class OperationRule extends BaseRule implements IAuthRule {
}
@Override
public Verdict applyRule(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, IRuleApplier theRuleApplier, Set<AuthorizationFlagsEnum> theFlags) {
public Verdict applyRule(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, IRuleApplier theRuleApplier, Set<AuthorizationFlagsEnum> theFlags, Pointcut thePointcut) {
FhirContext ctx = theRequestDetails.getServer().getFhirContext();
if (isOtherTenant(theRequestDetails)) {

View File

@ -195,7 +195,7 @@ public class RuleBuilder implements IAuthRuleBuilder {
}
@Override
public IAuthRuleBuilderRuleOp delete() {
public IAuthRuleBuilderRuleOpDelete delete() {
return new RuleBuilderRuleOp(RuleOpEnum.DELETE);
}
@ -300,10 +300,11 @@ public class RuleBuilder implements IAuthRuleBuilder {
}
private class RuleBuilderRuleOp implements IAuthRuleBuilderRuleOp {
private class RuleBuilderRuleOp implements IAuthRuleBuilderRuleOp, IAuthRuleBuilderRuleOpDelete {
private final RuleOpEnum myRuleOp;
private RuleBuilderRuleOpClassifier myInstancesBuilder;
private boolean myOnCascade;
public RuleBuilderRuleOp(RuleOpEnum theRuleOp) {
myRuleOp = theRuleOp;
@ -350,6 +351,12 @@ public class RuleBuilder implements IAuthRuleBuilder {
return new RuleBuilderRuleOpClassifier(AppliesTypeEnum.TYPES, Collections.singleton(theType));
}
@Override
public IAuthRuleBuilderRuleOp onCascade() {
myOnCascade = true;
return this;
}
private class RuleBuilderRuleOpClassifier implements IAuthRuleBuilderRuleOpClassifier {
private final AppliesTypeEnum myAppliesTo;
@ -389,6 +396,7 @@ public class RuleBuilder implements IAuthRuleBuilder {
myRule.setClassifierType(myClassifierType);
myRule.setClassifierCompartmentName(myInCompartmentName);
myRule.setClassifierCompartmentOwners(myInCompartmentOwners);
myRule.setAppliesToDeleteCascade(myOnCascade);
myRules.add(myRule);
return new RuleBuilderFinished(myRule);

View File

@ -20,6 +20,7 @@ package ca.uhn.fhir.rest.server.interceptor.auth;
* #L%
*/
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor.Verdict;
@ -40,7 +41,7 @@ public class RuleImplConditional extends BaseRule implements IAuthRule {
@Override
public Verdict applyRule(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource,
IRuleApplier theRuleApplier, Set<AuthorizationFlagsEnum> theFlags) {
IRuleApplier theRuleApplier, Set<AuthorizationFlagsEnum> theFlags, Pointcut thePointcut) {
if (isOtherTenant(theRequestDetails)) {
return null;

View File

@ -3,11 +3,11 @@ package ca.uhn.fhir.rest.server.interceptor.auth;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.rest.api.QualifiedParamList;
import ca.uhn.fhir.rest.api.RequestTypeEnum;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.ParameterUtil;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor.Verdict;
@ -59,11 +59,12 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ {
private RuleOpEnum myOp;
private TransactionAppliesToEnum myTransactionAppliesToOp;
private Collection<IIdType> myAppliesToInstances;
private boolean myAppliesToDeleteCascade;
/**
* Constructor
*/
public RuleImplOp(String theRuleName) {
RuleImplOp(String theRuleName) {
super(theRuleName);
}
@ -72,13 +73,13 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ {
return myAppliesToInstances;
}
public void setAppliesToInstances(Collection<IIdType> theAppliesToInstances) {
void setAppliesToInstances(Collection<IIdType> theAppliesToInstances) {
myAppliesToInstances = theAppliesToInstances;
}
@Override
public Verdict applyRule(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource,
IRuleApplier theRuleApplier, Set<AuthorizationFlagsEnum> theFlags) {
IRuleApplier theRuleApplier, Set<AuthorizationFlagsEnum> theFlags, Pointcut thePointcut) {
if (isOtherTenant(theRequestDetails)) {
return null;
@ -207,6 +208,9 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ {
break;
case DELETE:
if (theOperation == RestOperationTypeEnum.DELETE) {
if (myAppliesToDeleteCascade != (thePointcut == Pointcut.STORAGE_CASCADE_DELETE)) {
return null;
}
if (theInputResource == null) {
return newVerdict();
}
@ -264,7 +268,7 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ {
}
}
Verdict newVerdict = theRuleApplier.applyRulesAndReturnDecision(operation, theRequestDetails, inputResource, null, null);
Verdict newVerdict = theRuleApplier.applyRulesAndReturnDecision(operation, theRequestDetails, inputResource, null, null, thePointcut);
if (newVerdict == null) {
continue;
} else if (verdict == null) {
@ -292,7 +296,7 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ {
if (nextResource == null) {
continue;
}
Verdict newVerdict = theRuleApplier.applyRulesAndReturnDecision(RestOperationTypeEnum.READ, theRequestDetails, null, null, nextResource);
Verdict newVerdict = theRuleApplier.applyRulesAndReturnDecision(RestOperationTypeEnum.READ, theRequestDetails, null, null, nextResource, thePointcut);
if (newVerdict == null) {
continue;
} else if (verdict == null) {
@ -584,4 +588,8 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ {
return builder.toString();
}
public void setAppliesToDeleteCascade(boolean theAppliesToDeleteCascade) {
myAppliesToDeleteCascade = theAppliesToDeleteCascade;
}
}

View File

@ -20,6 +20,7 @@ package ca.uhn.fhir.rest.server.interceptor.auth;
* #L%
*/
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import org.hl7.fhir.instance.model.api.IBaseResource;
@ -35,7 +36,7 @@ class RuleImplPatch extends BaseRule {
}
@Override
public AuthorizationInterceptor.Verdict applyRule(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, IRuleApplier theRuleApplier, Set<AuthorizationFlagsEnum> theFlags) {
public AuthorizationInterceptor.Verdict applyRule(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, IRuleApplier theRuleApplier, Set<AuthorizationFlagsEnum> theFlags, Pointcut thePointcut) {
if (isOtherTenant(theRequestDetails)) {
return null;
}

View File

@ -415,7 +415,7 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi
RESOURCE
}
static boolean callOutgoingResponseHook(RequestDetails theRequest, ResponseDetails theResponseDetails) {
public static boolean callOutgoingResponseHook(RequestDetails theRequest, ResponseDetails theResponseDetails) {
HttpServletRequest servletRequest = null;
HttpServletResponse servletResponse = null;
if (theRequest instanceof ServletRequestDetails) {
@ -438,4 +438,14 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi
return true;
}
public static void callOutgoingFailureOperationOutcomeHook(RequestDetails theRequestDetails, IBaseOperationOutcome theOperationOutcome) {
HookParams responseParams = new HookParams();
responseParams.add(RequestDetails.class, theRequestDetails);
responseParams.addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
responseParams.add(IBaseOperationOutcome.class, theOperationOutcome);
if (theRequestDetails.getInterceptorBroadcaster() != null) {
theRequestDetails.getInterceptorBroadcaster().callHooks(Pointcut.SERVER_OUTGOING_FAILURE_OPERATIONOUTCOME, responseParams);
}
}
}

View File

@ -285,8 +285,6 @@ public class OperationMethodBinding extends BaseResourceReturningMethodBinding {
theMethodParams[myIdParamIndex] = theRequest.getId();
}
// populateActionRequestDetailsForInterceptor(theRequest); AAAAAA
Object response = invokeServerMethod(theServer, theRequest, theMethodParams);
IBundleProvider retVal = toResourceList(response);
return retVal;

View File

@ -88,12 +88,9 @@ public class InterceptorDstu3Test {
public void testServerPreHandledOnOperationCapturesResource() throws IOException {
AtomicReference<IBaseResource> resource = new AtomicReference<>();
IAnonymousInterceptor interceptor = new IAnonymousInterceptor() {
@Override
public void invoke(Pointcut thePointcut, HookParams theArgs) {
RequestDetails requestDetails = theArgs.get(RequestDetails.class);
resource.set(requestDetails.getResource());
}
IAnonymousInterceptor interceptor = (thePointcut, theArgs) -> {
RequestDetails requestDetails = theArgs.get(RequestDetails.class);
resource.set(requestDetails.getResource());
};
ourServlet.getInterceptorService().registerAnonymousInterceptor(Pointcut.SERVER_INCOMING_REQUEST_PRE_HANDLED, interceptor);

View File

@ -2,7 +2,9 @@ package ca.uhn.fhir.rest.server.interceptor;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.api.BundleInclusionRule;
import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.rest.annotation.*;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.EncodingEnum;
@ -12,6 +14,7 @@ import ca.uhn.fhir.rest.server.IResourceProvider;
import ca.uhn.fhir.rest.server.RestfulServer;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.test.utilities.JettyUtil;
import ca.uhn.fhir.util.TestUtil;
import ca.uhn.fhir.util.UrlUtil;
import com.google.common.base.Charsets;
@ -25,6 +28,7 @@ import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r4.model.*;
import org.junit.AfterClass;
@ -46,8 +50,6 @@ import static org.junit.Assert.*;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import ca.uhn.fhir.test.utilities.JettyUtil;
public class ResponseHighlightingInterceptorTest {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResponseHighlightingInterceptorTest.class);
@ -404,6 +406,34 @@ public class ResponseHighlightingInterceptorTest {
assertThat(output, containsString("<span class='hlTagName'>OperationOutcome</span>"));
}
@Test
public void testHighlightExceptionInvokesOutgoingFailureOperationOutcome() throws Exception {
IAnonymousInterceptor outgoingResponseInterceptor = (thePointcut, theArgs) -> {
OperationOutcome oo = (OperationOutcome) theArgs.get(IBaseOperationOutcome.class);
oo.addIssue().setDiagnostics("HELP IM A BUG");
};
ourServlet.getInterceptorService().registerAnonymousInterceptor(Pointcut.SERVER_OUTGOING_FAILURE_OPERATIONOUTCOME, outgoingResponseInterceptor);
try {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Foobar/123");
httpGet.addHeader("Accept", "text/html");
CloseableHttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
status.close();
ourLog.info("Resp: {}", responseContent);
assertEquals(404, status.getStatusLine().getStatusCode());
assertThat(responseContent, stringContainsInOrder("HELP IM A BUG"));
} finally {
ourServlet.getInterceptorService().unregisterInterceptor(outgoingResponseInterceptor);
}
}
/**
* See #346
*/

View File

@ -202,6 +202,14 @@
the HAPI FHIR library with all of its submodules automatically sharing the same
version. Thanks to Stig Døssing for the pull request!
</action>
<action type="add">
A new interceptor called CascadingDeleteInterceptor has been added to the
JPA project. This interceptor allows deletes to cascade when a specific
URL parameter or header is added to the request. Cascading deletes
can also be controlled by a new flag in the AuthorizationIntereptor
RuleBuilder, in order to ensure that cascading deletes are only available
to users with sufficient permission.
</action>
</release>
<release version="3.8.0" date="2019-05-30" description="Hippo">
<action type="fix">

View File

@ -1,5 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<document xmlns="http://maven.apache.org/XDOC/2.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/XDOC/2.0 http://maven.apache.org/xsd/xdoc-2.0.xsd">
<document xmlns="http://maven.apache.org/XDOC/2.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/XDOC/2.0 http://maven.apache.org/xsd/xdoc-2.0.xsd">
<properties>
<title>JPA Server</title>
@ -11,7 +12,7 @@
<section name="JPA Server">
<p>
The HAPI FHIR
The HAPI FHIR
<a href="./doc_rest_server.html">RestfulServer</a>
module can be used to create a FHIR server endpoint against an arbitrary
data source, which could be a database of your own design, an existing
@ -20,83 +21,99 @@
<p>
HAPI also provides a persistence module which can be used to
provide a complete RESTful server implementation, backed by a database of
your choosing. This module uses the <a href="http://en.wikipedia.org/wiki/Java_Persistence_API">JPA 2.0</a>
API to store data in a database without depending on any specific database technology.
your choosing. This module uses the
<a href="http://en.wikipedia.org/wiki/Java_Persistence_API">JPA 2.0</a>
API to store data in a database without depending on any specific database technology.
</p>
<p>
<b>Important Note: </b>
This implementation uses a fairly simple table design, with a
single table being used to hold resource bodies (which are stored as
CLOBs, optionally GZipped to save space) and a set of tables to hold search indexes, tags,
<b>Important Note:</b>
This implementation uses a fairly simple table design, with a
single table being used to hold resource bodies (which are stored as
CLOBs, optionally GZipped to save space) and a set of tables to hold search indexes, tags,
history details, etc. This design is only one of many possible ways
of designing a FHIR server so it is worth considering whether it
is appropriate for the problem you are trying to solve.
</p>
<subsection name="Getting Started">
<p>
The easiest way to get started with HAPI's JPA server module is
to begin with the example project. There is a complete sample project
found in our GitHub repo here: <a href="https://github.com/jamesagnew/hapi-fhir/tree/master/hapi-fhir-jpaserver-example">hapi-fhir-jpaserver-example</a>
to begin with the example project. There is a complete sample project
found in our GitHub repo here:
<a href="https://github.com/jamesagnew/hapi-fhir/tree/master/hapi-fhir-jpaserver-example">
hapi-fhir-jpaserver-example
</a>
</p>
<p>
This example is a fully contained FHIR server, supporting all standard operations (read/create/delete/etc).
It bundles an embedded instance of the <a href="http://db.apache.org/derby/">Apache Derby</a> Java database
This example is a fully contained FHIR server, supporting all standard operations
(read/create/delete/etc).
It bundles an embedded instance of the <a href="http://db.apache.org/derby/">Apache Derby</a> Java
database
so that the server can run without depending on any external database, but it can also be
configured to use an installation of Oracle, Postgres, etc.
configured to use an installation of Oracle, Postgres, etc.
</p>
<p>
To take this project for a spin, check out the sources from GitHib (or download a snapshot),
and then build the project:
</p>
<source><![CDATA[$ cd hapi-fhir-jpaserver-example
$ mvn install]]></source>
<p>
You now have two options for starting the server:
</p>
<ul>
<li>
<b>Deploy to Tomcat/JBoss/Websphere/etc: </b> You will now have a file
<b>Deploy to Tomcat/JBoss/Websphere/etc:</b>
You will now have a file
in your <code>target</code> directory called <code>hapi-fhir-jpaserver-example.war</code>.
This WAR file can be deployed to any Servlet container, at which point you could
access the server by pointing your browser at a URL similar to the following
(you may need to adjust the
access the server by pointing your browser at a URL similar to the following
(you may need to adjust the
port depending on which port your container is configured to listen on):
<a href="http://localhost:8080/hapi-fhir-jpaserver-example/">http://localhost:8080/hapi-fhir-jpaserver-example/</a>
<a href="http://localhost:8080/hapi-fhir-jpaserver-example/">
http://localhost:8080/hapi-fhir-jpaserver-example/
</a>
</li>
<li>
<b>Run with Maven and Embedded Jetty: </b> To start the server
directly within Maven, you can execute the following command:<br/>
<b>Run with Maven and Embedded Jetty:</b>
To start the server
directly within Maven, you can execute the following command:
<br/>
<source>$ mvn jetty:run</source>
You can then access the server by pointing your browser at the following URL:
<a href="http://localhost:8080/hapi-fhir-jpaserver-example/">http://localhost:8080/hapi-fhir-jpaserver-example/</a>
You can then access the server by pointing your browser at the following URL:
<a href="http://localhost:8080/hapi-fhir-jpaserver-example/">
http://localhost:8080/hapi-fhir-jpaserver-example/
</a>
</li>
</ul>
</subsection>
</section>
<section name="Configuring The JPA Server">
<p>
The JPA server is configured through a series of configuration files, most
of which are documented inline.
of which are documented inline.
</p>
<ul>
<li>
<a href="https://github.com/jamesagnew/hapi-fhir/blob/master/hapi-fhir-jpaserver-example/src/main/java/ca/uhn/fhir/jpa/demo/FhirServerConfig.java"><b>FhirServerConfig.java</b></a>:
Configures the database connection settings
<a href="https://github.com/jamesagnew/hapi-fhir/blob/master/hapi-fhir-jpaserver-example/src/main/java/ca/uhn/fhir/jpa/demo/FhirServerConfig.java">
<b>FhirServerConfig.java</b>
</a>
:
Configures the database connection settings
</li>
</ul>
</section>
<section name="DaoConfig">
<p>
The Spring confguration contains a definition for a bean called <code>daoConfig</code>,
which will look something like the following:
@ -108,16 +125,17 @@ public DaoConfig daoConfig() {
retVal.setAllowInlineMatchUrlReferences(true);
return retVal;
}]]></source>
<p>
You can use this method to change various configuration settings on the DaoConfig bean
which define the way that the JPA server will behave.
See the <a href="./apidocs-jpaserver/ca/uhn/fhir/jpa/dao/DaoConfig.html">DaoConfig JavaDoc</a>
See the
<a href="./apidocs-jpaserver/ca/uhn/fhir/jpa/dao/DaoConfig.html">DaoConfig JavaDoc</a>
for information about the available settings.
</p>
<subsection name="External/Absolute Resource References">
<p>
Clients may sometimes post resources to your server that contain
absolute resource references. For example, consider the following resource:
@ -133,7 +151,7 @@ public DaoConfig daoConfig() {
<reference value="http://example.com/fhir/Organization/123"/>
</managingOrganization>
</Patient>]]></source>
<p>
By default, the server will reject this reference, as only
local references are permitted by the server. This can be changed
@ -141,7 +159,7 @@ public DaoConfig daoConfig() {
</p>
<p>
If you want the server to recognize that this URL is actually a local
reference (i.e. because the server will be deployed to the base URL
reference (i.e. because the server will be deployed to the base URL
<code>http://example.com/fhir/</code>) you can
configure the server to recognize this URL via the following DaoConfig
setting:
@ -161,7 +179,7 @@ public DaoConfig daoConfig() {
it will be possible to search for references that refer to these
external references.
</p>
<source><![CDATA[@Bean
public DaoConfig daoConfig() {
DaoConfig retVal = new DaoConfig();
@ -176,7 +194,7 @@ public DaoConfig daoConfig() {
</subsection>
<subsection name="Logical References">
<p>
In some cases, you may have references which are <i>Logical References</i>,
which means that they act as an identifier and not necessarily as a literal
@ -185,27 +203,30 @@ public DaoConfig daoConfig() {
<p>
A common use for logical references is in references to conformance
resources, such as ValueSets, StructureDefinitions, etc. For example,
you might refer to the ValueSet
you might refer to the ValueSet
<code>http://hl7.org/fhir/ValueSet/quantity-comparator</code>
from your own resources. In this case, you are not neccesarily telling
the server that this is a real address that it should resolve, but
rather that this is an identifier for a ValueSet where
<code>ValueSet.url</code> has the given URI/URL.
<code>ValueSet.url</code>
has the given URI/URL.
</p>
<p>
HAPI can be configured to treat certain URI/URL patterns as
HAPI can be configured to treat certain URI/URL patterns as
logical by using the DaoConfig#setTreatReferencesAsLogical property
(see <a href="./apidocs-jpaserver/ca/uhn/fhir/jpa/dao/DaoConfig.html#setTreatReferencesAsLogical-java.util.Set-">JavaDoc</a>).
(see <a
href="./apidocs-jpaserver/ca/uhn/fhir/jpa/dao/DaoConfig.html#setTreatReferencesAsLogical-java.util.Set-">
JavaDoc</a>).
For example:
</p>
<div class="source">
<pre>
// Treat specific URL as logical
myDaoConfig.getTreatReferencesAsLogical().add("http://mysystem.com/ValueSet/cats-and-dogs");
// Treat all references with given prefix as logical
myDaoConfig.getTreatReferencesAsLogical().add("http://mysystem.com/mysystem-vs-*");
</pre>
<pre>
// Treat specific URL as logical
myDaoConfig.getTreatReferencesAsLogical().add("http://mysystem.com/ValueSet/cats-and-dogs");
// Treat all references with given prefix as logical
myDaoConfig.getTreatReferencesAsLogical().add("http://mysystem.com/mysystem-vs-*");
</pre>
</div>
<a name="search_result caching"/>
</subsection>
@ -225,26 +246,32 @@ public DaoConfig daoConfig() {
but in some cases it is not. If you want to disable caching, you have two
options:
</p>
<p><b>Globally Disable / Change Caching Timeout</b></p>
<p>
<b>Globally Disable / Change Caching Timeout</b>
</p>
<p>
You can change the global cache using the following setting:
</p>
<div class="source">
<pre>
myDaoConfig.setReuseCachedSearchResultsForMillis(null);
</pre>
<pre>
myDaoConfig.setReuseCachedSearchResultsForMillis(null);
</pre>
</div>
<p><b>Disable Cache at the Request Level</b></p>
<p>
<b>Disable Cache at the Request Level</b>
</p>
<p>
Clients can selectively disable caching for an individual request
using the Cache-Control header:
</p>
<div class="source">
<pre>
Cache-Control: no-cache
</pre>
<pre>
Cache-Control: no-cache
</pre>
</div>
<p><b>Disable Paging at the Request Level</b></p>
<p>
<b>Disable Paging at the Request Level</b>
</p>
<p>
If the client knows that they will only want a small number of results
(for example, a UI containing 20 results is being shown and the client
@ -265,64 +292,74 @@ public DaoConfig daoConfig() {
</subsection>
</section>
<section name="Architecture">
<img src="images/jpa_architecture.png" alt="Architecture" align="right"/>
<p>
The HAPI JPA Server has the following components:
</p>
<ul>
<li>
<b>Resource Providers: </b>
<b>Resource Providers:</b>
A RESTful server <a href="./doc_rest_server.html#resource_providers">Resource Provider</a> is
provided for each resource type in a given release of FHIR. Each resource provider implements
a
<a href="./apidocs/ca/uhn/fhir/rest/annotation/Search.html">@Search</a>
a
<a href="./apidocs/ca/uhn/fhir/rest/annotation/Search.html">@Search</a>
method implementing the complete set of search parameters defined in the FHIR
specification for the given resource type.<br/><br/>
specification for the given resource type.
<br/>
<br/>
The resource providers also extend a superclass which implements all of the
other FHIR methods, such as Read, Create, Delete, etc.<br/><br/>
Note that these resource providers are generated as a part of the HAPI build process,
so they are not checked into Git. You can see their source
other FHIR methods, such as Read, Create, Delete, etc.
<br/>
<br/>
Note that these resource providers are generated as a part of the HAPI build process,
so they are not checked into Git. You can see their source
in the <a href="./xref-jpaserver/">JXR Report</a>,
for example the
<a href="./xref-jpaserver/ca/uhn/fhir/jpa/rp/dstu2/PatientResourceProvider.html">PatientResourceProvider</a>.
<br/><br/>
<a href="./xref-jpaserver/ca/uhn/fhir/jpa/rp/dstu2/PatientResourceProvider.html">
PatientResourceProvider</a>.
<br/>
<br/>
The resource providers do not actually implement any of the logic
in searching, updating, etc. They simply receive the incoming HTTP calls (via the RestfulServer)
and pass along the incoming requests to the DAOs.
<br/><br/>
<br/>
<br/>
</li>
<li>
<b>HAPI DAOs: </b>
The DAOs actually implement all of the database business logic relating to
<b>HAPI DAOs:</b>
The DAOs actually implement all of the database business logic relating to
the storage, indexing, and retrieval of FHIR resources, using the underlying JPA
API.
<br/><br/>
</li>
<br/>
<br/>
</li>
<li>
<b>Hibernate: </b>
<b>Hibernate:</b>
The HAPI JPA Server uses the JPA library, implemented by Hibernate. No Hibernate
specific features are used, so the library should also work with other
providers (e.g. Eclipselink) but it is not tested regularly with them.
<br/><br/>
</li>
<br/>
<br/>
</li>
<li>
<b>Database: </b>
<b>Database:</b>
The RESTful server uses an embedded Derby database, but can be configured to
talk to
<a href="https://developer.jboss.org/wiki/SupportedDatabases2?_sscc=t">any database supported by Hibernate</a>.
</li>
talk to
<a href="https://developer.jboss.org/wiki/SupportedDatabases2?_sscc=t">any database supported by
Hibernate</a>.
</li>
</ul>
</section>
<section name="Additional Information">
<ul>
<li>
<a href="https://www.openhealthhub.org/t/hapi-terminology-server-uk-snomed-ct-import/592">This page</a>
@ -330,9 +367,9 @@ public DaoConfig daoConfig() {
the database.
</li>
</ul>
</section>
<!--
alter table hfj_res_link ALTER COLUMN "TARGET_RESOURCE_ID" NULL;
@ -378,19 +415,24 @@ delete from hfj_res_ver where res_id in (select res_id from hfj_resource where s
<p>
When upgrading the JPA server from one version of HAPI FHIR to a newer version,
often there will be changes to the database schema. The <b>Migrate Database</b>
often there will be changes to the database schema. The
<b>Migrate Database</b>
command can be used to perform a migration from one version to the next.
</p>
<p>
Note that this feature was added in HAPI FHIR 3.5.0. It is not able to migrate
from versions prior to HAPI FHIR 3.4.0. <b>Please make a backup of your
database before running this command!</b>
from versions prior to HAPI FHIR 3.4.0.
<b>Please make a backup of your
database before running this command!
</b>
</p>
<p>
The following example shows how to use the migrator utility to migrate between two versions.
</p>
<pre>./hapi-fhir-cli migrate-database -d DERBY_EMBEDDED -u "jdbc:derby:directory:target/jpaserver_derby_files;create=true" -n "" -p "" -f V3_4_0 -t V3_5_0</pre>
<pre>./hapi-fhir-cli migrate-database -d DERBY_EMBEDDED -u
"jdbc:derby:directory:target/jpaserver_derby_files;create=true" -n "" -p "" -f V3_4_0 -t V3_5_0
</pre>
<p>
You may use the following command to get detailed help on the options:
@ -400,9 +442,18 @@ delete from hfj_res_ver where res_id in (select res_id from hfj_resource where s
<p>
Note the arguments:
<ul>
<li><code>-d [dialect]</code> - This indicates the database dialect to use. See the detailed help for a list of options</li>
<li><code>-f [version]</code> - The version to migrate from</li>
<li><code>-t [version]</code> - The version to migrate to</li>
<li>
<code>-d [dialect]</code>
- This indicates the database dialect to use. See the detailed help for a list of options
</li>
<li>
<code>-f [version]</code>
- The version to migrate from
</li>
<li>
<code>-t [version]</code>
- The version to migrate to
</li>
</ul>
</p>
@ -412,7 +463,9 @@ delete from hfj_res_ver where res_id in (select res_id from hfj_resource where s
so they are not included in HAPI FHIR. In order to use this command with an Oracle database,
you will need to invoke the CLI as follows:
</p>
<pre>java -cp hapi-fhir-cli.jar ca.uhn.fhir.cli.App migrate-database -d ORACLE_12C -u "[url]" -n "[username]" -p "[password]" -f V3_4_0 -t V3_5_0</pre>
<pre>java -cp hapi-fhir-cli.jar ca.uhn.fhir.cli.App migrate-database -d ORACLE_12C -u "[url]" -n
"[username]" -p "[password]" -f V3_4_0 -t V3_5_0
</pre>
</subsection>
<subsection name="Migrating 3.4.0 to 3.5.0+">
@ -446,19 +499,26 @@ delete from hfj_res_ver where res_id in (select res_id from hfj_resource where s
</li>
<li>
Modify your <code>DaoConfig</code> to specify that hash-based searches should not be used, using
the following setting:<br/>
the following setting:
<br/>
<pre>myDaoConfig.setDisableHashBasedSearches(true);</pre>
</li>
<li>
Make sure that you have your JPA settings configured to not automatically
create database indexes and columns using the following setting
in your JPA Properties:<br/>
in your JPA Properties:
<br/>
<pre>extraProperties.put("hibernate.hbm2ddl.auto", "none");</pre>
</li>
<li>
Run the database migrator command, including the entry <code>-x no-migrate-350-hashes</code>
on the command line. For example:<br/>
<pre>./hapi-fhir-cli migrate-database -d DERBY_EMBEDDED -u "jdbc:derby:directory:target/jpaserver_derby_files;create=true" -n "" -p "" -f V3_4_0 -t V3_6_0 -x no-migrate-350-hashes</pre>
Run the database migrator command, including the entry
<code>-x no-migrate-350-hashes</code>
on the command line. For example:
<br/>
<pre>./hapi-fhir-cli migrate-database -d DERBY_EMBEDDED -u
"jdbc:derby:directory:target/jpaserver_derby_files;create=true" -n "" -p "" -f V3_4_0 -t V3_6_0 -x
no-migrate-350-hashes
</pre>
</li>
<li>
Rebuild and start your HAPI FHIR JPA server. At this point you should have a working
@ -468,7 +528,8 @@ delete from hfj_res_ver where res_id in (select res_id from hfj_resource where s
</li>
<li>
With the system running, request a complete reindex of the data in the database using
an HTTP request such as the following:<br/>
an HTTP request such as the following:
<br/>
<pre>GET /$mark-all-resources-for-reindexing</pre>
Note that this is a custom operation built into the HAPI FHIR JPA server. It should
be secured in a real deployment, so Authentication is likely required for this
@ -488,12 +549,16 @@ delete from hfj_res_ver where res_id in (select res_id from hfj_resource where s
<li>
Modify your <code>DaoConfig</code> to specify that hash-based searches are used, using
the following setting (this is the default setting, so it could also simply
be omitted):<br/>
be omitted):
<br/>
<pre>myDaoConfig.setDisableHashBasedSearches(false);</pre>
</li>
<li>
Execute the migrator tool again, this time omitting the flag option, e.g.<br/>
<pre>./hapi-fhir-cli migrate-database -d DERBY_EMBEDDED -u "jdbc:derby:directory:target/jpaserver_derby_files;create=true" -n "" -p "" -f V3_4_0 -t V3_6_0</pre>
Execute the migrator tool again, this time omitting the flag option, e.g.
<br/>
<pre>./hapi-fhir-cli migrate-database -d DERBY_EMBEDDED -u
"jdbc:derby:directory:target/jpaserver_derby_files;create=true" -n "" -p "" -f V3_4_0 -t V3_6_0
</pre>
</li>
<li>
Rebuild, and start HAPI FHIR JPA again.
@ -503,6 +568,23 @@ delete from hfj_res_ver where res_id in (select res_id from hfj_resource where s
</section>
<section type="Cascading Deletes">
<p>
An interceptor called
<code>CascadingDeleteInterceptor</code>
may be registered against the Server. When this interceptor is enabled,
cascading deletes may be performed using either of the following:
</p>
<ul>
<li>The request may include the following parameter:
<code>_cascade=true</code>
</li>
<li>The request may include the following header:
<code>X-Cascade-Delete: true</code>
</li>
</ul>
</section>
</body>
</document>