delete expunge (#2131)

Added delete _expunge=true
This commit is contained in:
Ken Stevens 2020-10-20 17:21:50 -04:00 committed by GitHub
parent 3738297354
commit 3b91873b7c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
49 changed files with 939 additions and 192 deletions

View File

@ -916,6 +916,95 @@ public enum Pointcut {
"org.hl7.fhir.instance.model.api.IBaseResource"
),
/**
* <b>Storage Hook:</b>
* Invoked when a set of resources are about to be deleted and expunged via url like http://localhost/Patient?active=false&_expunge=true
* <p>
* Hooks may accept the following parameters:
* </p>
* <ul>
* <li>
* ca.uhn.fhir.rest.api.server.RequestDetails - A bean containing details about the request that is about to be processed, including details such as the
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
* pulled out of the servlet request. Note that the bean
* properties are not all guaranteed to be populated, depending on how early during processing the
* exception occurred. <b>Note that this parameter may be null in contexts where the request is not
* known, such as while processing searches</b>
* </li>
* <li>
* ca.uhn.fhir.rest.server.servlet.ServletRequestDetails - A bean containing details about the request that is about to be processed, including details such as the
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
* pulled out of the servlet request. This parameter is identical to the RequestDetails parameter above but will
* only be populated when operating in a RestfulServer implementation. It is provided as a convenience.
* </li>
* <li>
* java.lang.String - Contains the url used to delete and expunge the resources
* </li>
* </ul>
* <p>
* Hooks should return <code>void</code>. They may choose to throw an exception however, in
* which case the delete expunge will not occur.
* </p>
*/
STORAGE_PRE_DELETE_EXPUNGE(
void.class,
"ca.uhn.fhir.rest.api.server.RequestDetails",
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails",
"java.lang.String"
),
/**
* <b>Storage Hook:</b>
* Invoked when a batch of resource pids are about to be deleted and expunged via url like http://localhost/Patient?active=false&_expunge=true
* <p>
* Hooks may accept the following parameters:
* </p>
* <ul>
* <li>
* java.lang.String - the name of the resource type being deleted
* </li>
* <li>
* java.util.List - the list of Long pids of the resources about to be deleted
* </li>
* <li>
* java.util.concurrent.atomic.AtomicLong - holds a running tally of all entities deleted so far.
* If the pointcut callback deletes any entities, then this parameter should be incremented by the total number
* of additional entities deleted.
* </li>
* <li>
* ca.uhn.fhir.rest.api.server.RequestDetails - A bean containing details about the request that is about to be processed, including details such as the
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
* pulled out of the servlet request. Note that the bean
* properties are not all guaranteed to be populated, depending on how early during processing the
* exception occurred. <b>Note that this parameter may be null in contexts where the request is not
* known, such as while processing searches</b>
* </li>
* <li>
* ca.uhn.fhir.rest.server.servlet.ServletRequestDetails - A bean containing details about the request that is about to be processed, including details such as the
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
* pulled out of the servlet request. This parameter is identical to the RequestDetails parameter above but will
* only be populated when operating in a RestfulServer implementation. It is provided as a convenience.
* </li>
* <li>
* java.lang.String - Contains the url used to delete and expunge the resources
* </li>
* </ul>
* <p>
* Hooks should return <code>void</code>. They may choose to throw an exception however, in
* which case the delete expunge will not occur.
* </p>
*/
STORAGE_PRE_DELETE_EXPUNGE_PID_LIST(
void.class,
"java.lang.String",
"java.util.List",
"java.util.concurrent.atomic.AtomicLong",
"ca.uhn.fhir.rest.api.server.RequestDetails",
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
),
/**
* <b>Storage Hook:</b>
* Invoked when one or more resources may be returned to the user, whether as a part of a READ,

View File

@ -0,0 +1,10 @@
---
type: add
issue: 2131
title: "A new _expunge parameter has been added to the DELETE operation when deleting multiple resources via a URL. For
example DELETE http://www.example.com:8000/Observation?_expunge=true or
DELETE http://www.example.com:8000/Observation?status=cancelled&_expunge=true. When the _expunge parameter is provided to DELETE
then the matched resources and all of their history will be both deleted and expunged from the database. This will
perform considerably faster than doing the delete and expunge separately. Note that Expunge must be enabled on the
server for this to work."

View File

@ -144,6 +144,7 @@ public class DaoConfig {
private IdStrategyEnum myResourceServerIdStrategy = IdStrategyEnum.SEQUENTIAL_NUMERIC;
private boolean myMarkResourcesForReindexingUponSearchParameterChange;
private boolean myExpungeEnabled;
private boolean myDeleteExpungeEnabled;
private int myExpungeBatchSize = DEFAULT_EXPUNGE_BATCH_SIZE;
private int myReindexThreadCount;
private int myExpungeThreadCount;
@ -1308,6 +1309,42 @@ public class DaoConfig {
return myExpungeEnabled;
}
/**
* If set to <code>true</code> (default is <code>false</code>), the _expunge parameter on the DELETE
* operation will be enabled on this server. DELETE _expunge removes all data associated with a resource in a highly performant
* way, skipping most of the the checks that are enforced with usual DELETE operations. The only check
* that is performed before deleting the resources and their indexes is that no other resources reference the resources about to
* be deleted. This operation is potentially dangerous since it allows
* a client to physically delete data in a way that can not be recovered (without resorting
* to backups).
* <p>
* It is recommended to not enable this setting without appropriate security
* in place on your server to prevent non-administrators from using this
* operation.
* </p>
*/
public void setDeleteExpungeEnabled(boolean theDeleteExpungeEnabled) {
myDeleteExpungeEnabled = theDeleteExpungeEnabled;
}
/**
* If set to <code>true</code> (default is <code>false</code>), the _expunge parameter on the DELETE
* operation will be enabled on this server. DELETE _expunge removes all data associated with a resource in a highly performant
* way, skipping most of the the checks that are enforced with usual DELETE operations. The only check
* that is performed before deleting the data is that no other resources reference the resources about to
* be deleted. This operation is potentially dangerous since it allows
* a client to physically delete data in a way that can not be recovered (without resorting
* to backups).
* <p>
* It is recommended to not enable this setting without appropriate security
* in place on your server to prevent non-administrators from using this
* operation.
* </p>
*/
public boolean isDeleteExpungeEnabled() {
return myDeleteExpungeEnabled;
}
/**
* If set to <code>true</code> (default is <code>false</code>), the $expunge operation
* will be enabled on this server. This operation is potentially dangerous since it allows

View File

@ -32,6 +32,8 @@ import java.util.List;
public class DeleteMethodOutcome extends MethodOutcome {
private List<ResourceTable> myDeletedEntities;
private long myExpungedResourcesCount;
private long myExpungedEntitiesCount;
public List<ResourceTable> getDeletedEntities() {
return myDeletedEntities;
@ -42,4 +44,21 @@ public class DeleteMethodOutcome extends MethodOutcome {
return this;
}
public long getExpungedResourcesCount() {
return myExpungedResourcesCount;
}
public DeleteMethodOutcome setExpungedResourcesCount(long theExpungedResourcesCount) {
myExpungedResourcesCount = theExpungedResourcesCount;
return this;
}
public long getExpungedEntitiesCount() {
return myExpungedEntitiesCount;
}
public DeleteMethodOutcome setExpungedEntitiesCount(long theExpungedEntitiesCount) {
myExpungedEntitiesCount = theExpungedEntitiesCount;
return this;
}
}

View File

@ -33,6 +33,7 @@ import ca.uhn.fhir.jpa.api.model.DeleteConflictList;
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
import ca.uhn.fhir.jpa.dao.expunge.DeleteExpungeService;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.delete.DeleteConflictService;
import ca.uhn.fhir.jpa.model.entity.BaseHasResource;
@ -51,6 +52,7 @@ import ca.uhn.fhir.jpa.patch.XmlPatchUtils;
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
import ca.uhn.fhir.model.api.IQueryParameterType;
@ -102,6 +104,7 @@ import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Required;
import org.springframework.data.domain.SliceImpl;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.annotation.Propagation;
@ -151,6 +154,10 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
private HapiTransactionService myTransactionService;
@Autowired(required = false)
protected IFulltextSearchSvc mySearchDao;
@Autowired
private MatchUrlService myMatchUrlService;
@Autowired
private DeleteExpungeService myDeleteExpungeService;
private IInstanceValidatorModule myInstanceValidator;
private String myResourceName;
@ -456,6 +463,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
.add(TransactionDetails.class, theTransactionDetails);
if (theTransactionDetails.isAcceptingDeferredInterceptorBroadcasts()) {
theTransactionDetails.addDeferredInterceptorBroadcast(Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED, hookParams);
} else {
@ -499,14 +507,31 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
@Nonnull
private DeleteMethodOutcome doDeleteByUrl(String theUrl, DeleteConflictList deleteConflicts, RequestDetails theRequest) {
Set<ResourcePersistentId> resourceIds = myMatchResourceUrlService.processMatchUrl(theUrl, myResourceType, theRequest);
RuntimeResourceDefinition resourceDef = getContext().getResourceDefinition(myResourceType);
SearchParameterMap paramMap = myMatchUrlService.translateMatchUrl(theUrl, resourceDef);
paramMap.setLoadSynchronous(true);
Set<ResourcePersistentId> resourceIds = myMatchResourceUrlService.search(paramMap, myResourceType, theRequest);
if (resourceIds.size() > 1) {
if (!myDaoConfig.isAllowMultipleDelete()) {
throw new PreconditionFailedException(getContext().getLocalizer().getMessageSanitized(BaseHapiFhirDao.class, "transactionOperationWithMultipleMatchFailure", "DELETE", theUrl, resourceIds.size()));
}
}
return deletePidList(theUrl, resourceIds, deleteConflicts, theRequest);
if (paramMap.isDeleteExpunge()) {
return deleteExpunge(theUrl, theRequest, resourceIds);
} else {
return deletePidList(theUrl, resourceIds, deleteConflicts, theRequest);
}
}
private DeleteMethodOutcome deleteExpunge(String theUrl, RequestDetails theTheRequest, Set<ResourcePersistentId> theResourceIds) {
if (!myDaoConfig.isExpungeEnabled() || !myDaoConfig.isDeleteExpungeEnabled()) {
throw new MethodNotAllowedException("_expunge is not enabled on this server");
}
return myDeleteExpungeService.expungeByResourcePids(theUrl, myResourceName, new SliceImpl<>(ResourcePersistentId.toLongList(theResourceIds)), theTheRequest);
}
@NotNull

View File

@ -27,12 +27,12 @@ import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
@ -55,23 +55,24 @@ public class MatchResourceUrlService {
private IInterceptorBroadcaster myInterceptorBroadcaster;
public <R extends IBaseResource> Set<ResourcePersistentId> processMatchUrl(String theMatchUrl, Class<R> theResourceType, RequestDetails theRequest) {
StopWatch sw = new StopWatch();
RuntimeResourceDefinition resourceDef = myContext.getResourceDefinition(theResourceType);
SearchParameterMap paramMap = myMatchUrlService.translateMatchUrl(theMatchUrl, resourceDef);
paramMap.setLoadSynchronous(true);
if (paramMap.isEmpty() && paramMap.getLastUpdated() == null) {
throw new InvalidRequestException("Invalid match URL[" + theMatchUrl + "] - URL has no search parameters");
}
paramMap.setLoadSynchronous(true);
return search(paramMap, theResourceType, theRequest);
}
public <R extends IBaseResource> Set<ResourcePersistentId> search(SearchParameterMap theParamMap, Class<R> theResourceType, RequestDetails theRequest) {
StopWatch sw = new StopWatch();
IFhirResourceDao<R> dao = myDaoRegistry.getResourceDao(theResourceType);
if (dao == null) {
throw new InternalErrorException("No DAO for resource type: " + theResourceType.getName());
}
Set<ResourcePersistentId> retVal = dao.searchForIds(paramMap, theRequest);
Set<ResourcePersistentId> retVal = dao.searchForIds(theParamMap, theRequest);
// Interceptor broadcast: JPA_PERFTRACE_INFO
if (JpaInterceptorBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INFO, myInterceptorBroadcaster, theRequest)) {

View File

@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.dao.data;
* #L%
*/
import ca.uhn.fhir.empi.api.EmpiMatchResultEnum;
import ca.uhn.fhir.jpa.entity.EmpiLink;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
@ -32,4 +33,8 @@ public interface IEmpiLinkDao extends JpaRepository<EmpiLink, Long> {
@Modifying
@Query("DELETE FROM EmpiLink f WHERE myPersonPid = :pid OR myTargetPid = :pid")
int deleteWithAnyReferenceToPid(@Param("pid") Long thePid);
@Modifying
@Query("DELETE FROM EmpiLink f WHERE (myPersonPid = :pid OR myTargetPid = :pid) AND myMatchResult <> :matchResult")
int deleteWithAnyReferenceToPidAndMatchResultNot(@Param("pid") Long thePid, @Param("matchResult")EmpiMatchResultEnum theMatchResult);
}

View File

@ -35,5 +35,8 @@ public interface IResourceLinkDao extends JpaRepository<ResourceLink, Long> {
void deleteByResourceId(@Param("resId") Long theResourcePid);
@Query("SELECT t FROM ResourceLink t WHERE t.mySourceResourcePid = :resId")
List<ResourceLink> findAllForResourceId(@Param("resId") Long thePatientId);
List<ResourceLink> findAllForSourceResourceId(@Param("resId") Long thePatientId);
@Query("SELECT t FROM ResourceLink t WHERE t.myTargetResourcePid in :resIds")
List<ResourceLink> findWithTargetPidIn(@Param("resIds") List<Long> thePids);
}

View File

@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.dao.empi;
* #L%
*/
import ca.uhn.fhir.empi.api.EmpiMatchResultEnum;
import ca.uhn.fhir.jpa.dao.data.IEmpiLinkDao;
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import org.hl7.fhir.instance.model.api.IBaseResource;
@ -50,4 +51,13 @@ public class EmpiLinkDeleteSvc {
}
return removed;
}
public int deleteNonRedirectWithWithAnyReferenceTo(IBaseResource theResource) {
Long pid = myIdHelperService.getPidOrThrowException(theResource.getIdElement(), null);
int removed = myEmpiLinkDao.deleteWithAnyReferenceToPidAndMatchResultNot(pid, EmpiMatchResultEnum.REDIRECT);
if (removed > 0) {
ourLog.info("Removed {} non-redirect EMPI links with references to {}", removed, theResource.getIdElement().toVersionless());
}
return removed;
}
}

View File

@ -0,0 +1,171 @@
package ca.uhn.fhir.jpa.dao.expunge;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.util.OperationOutcomeUtil;
import ca.uhn.fhir.util.StopWatch;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Slice;
import org.springframework.stereotype.Service;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.support.TransactionTemplate;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceContextType;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
@Service
public class DeleteExpungeService {
private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeService.class);
@Autowired
protected PlatformTransactionManager myPlatformTransactionManager;
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
private EntityManager myEntityManager;
@Autowired
private FhirContext myFhirContext;
@Autowired
private PartitionRunner myPartitionRunner;
@Autowired
private ResourceTableFKProvider myResourceTableFKProvider;
@Autowired
private IResourceTableDao myResourceTableDao;
@Autowired
private IResourceLinkDao myResourceLinkDao;
@Autowired
private IInterceptorBroadcaster myInterceptorBroadcaster;
@Autowired
private DaoConfig myDaoConfig;
public DeleteMethodOutcome expungeByResourcePids(String theUrl, String theResourceName, Slice<Long> thePids, RequestDetails theRequest) {
StopWatch w = new StopWatch();
if (thePids.isEmpty()) {
return new DeleteMethodOutcome();
}
HookParams params = new HookParams()
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest)
.add(String.class, theUrl);
JpaInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRE_DELETE_EXPUNGE, params);
TransactionTemplate txTemplate = new TransactionTemplate(myPlatformTransactionManager);
txTemplate.executeWithoutResult(t -> validateOkToDeleteAndExpunge(thePids));
ourLog.info("Expunging all records linking to {} resources...", thePids.getNumber());
AtomicLong expungedEntitiesCount = new AtomicLong();
AtomicLong expungedResourcesCount = new AtomicLong();
myPartitionRunner.runInPartitionedThreads(thePids, pidChunk -> deleteInTransaction(theResourceName, pidChunk, expungedResourcesCount, expungedEntitiesCount, theRequest));
ourLog.info("Expunged a total of {} records", expungedEntitiesCount);
IBaseOperationOutcome oo;
if (expungedResourcesCount.get() == 0) {
oo = OperationOutcomeUtil.newInstance(myFhirContext);
String message = myFhirContext.getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "unableToDeleteNotFound", theUrl);
String severity = "warning";
String code = "not-found";
OperationOutcomeUtil.addIssue(myFhirContext, oo, severity, message, null, code);
} else {
oo = OperationOutcomeUtil.newInstance(myFhirContext);
String message = myFhirContext.getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "successfulDeletes", expungedResourcesCount.get(), w.getMillis());
String severity = "information";
String code = "informational";
OperationOutcomeUtil.addIssue(myFhirContext, oo, severity, message, null, code);
}
DeleteMethodOutcome retval = new DeleteMethodOutcome();
retval.setExpungedResourcesCount(expungedResourcesCount.get());
retval.setExpungedEntitiesCount(expungedEntitiesCount.get());
retval.setOperationOutcome(oo);
return retval;
}
public void validateOkToDeleteAndExpunge(Slice<Long> theAllTargetPids) {
if (!myDaoConfig.isEnforceReferentialIntegrityOnDelete()) {
ourLog.info("Referential integrity on delete disabled. Skipping referential integrity check.");
return;
}
List<ResourceLink> conflictResourceLinks = Collections.synchronizedList(new ArrayList<>());
myPartitionRunner.runInPartitionedThreads(theAllTargetPids, someTargetPids -> findResourceLinksWithTargetPidIn(theAllTargetPids.getContent(), someTargetPids, conflictResourceLinks));
if (conflictResourceLinks.isEmpty()) {
return;
}
ResourceLink firstConflict = conflictResourceLinks.get(0);
String sourceResourceId = firstConflict.getSourceResource().getIdDt().toVersionless().getValue();
String targetResourceId = firstConflict.getTargetResource().getIdDt().toVersionless().getValue();
throw new InvalidRequestException("DELETE with _expunge=true failed. Unable to delete " +
targetResourceId + " because " + sourceResourceId + " refers to it via the path " + firstConflict.getSourcePath());
}
private void findResourceLinksWithTargetPidIn(List<Long> theAllTargetPids, List<Long> theSomeTargetPids, List<ResourceLink> theConflictResourceLinks) {
// We only need to find one conflict, so if we found one already in an earlier partition run, we can skip the rest of the searches
if (theConflictResourceLinks.isEmpty()) {
List<ResourceLink> conflictResourceLinks = myResourceLinkDao.findWithTargetPidIn(theSomeTargetPids).stream()
// Filter out resource links for which we are planning to delete the source.
// theAllTargetPids contains a list of all the pids we are planning to delete. So we only want
// to consider a link to be a conflict if the source of that link is not in theAllTargetPids.
.filter(link -> !theAllTargetPids.contains(link.getSourceResourcePid()))
.collect(Collectors.toList());
// We do this in two steps to avoid lock contention on this synchronized list
theConflictResourceLinks.addAll(conflictResourceLinks);
}
}
private void deleteInTransaction(String theResourceName, List<Long> thePidChunk, AtomicLong theExpungedResourcesCount, AtomicLong theExpungedEntitiesCount, RequestDetails theRequest) {
TransactionTemplate txTemplate = new TransactionTemplate(myPlatformTransactionManager);
txTemplate.executeWithoutResult(t -> deleteAllRecordsLinkingTo(theResourceName, thePidChunk, theExpungedResourcesCount, theExpungedEntitiesCount, theRequest));
}
private void deleteAllRecordsLinkingTo(String theResourceName, List<Long> thePids, AtomicLong theExpungedResourcesCount, AtomicLong theExpungedEntitiesCount, RequestDetails theRequest) {
HookParams params = new HookParams()
.add(String.class, theResourceName)
.add(List.class, thePids)
.add(AtomicLong.class, theExpungedEntitiesCount)
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest);
JpaInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRE_DELETE_EXPUNGE_PID_LIST, params);
String pidListString = thePids.toString().replace("[", "(").replace("]", ")");
List<ResourceForeignKey> resourceForeignKeys = myResourceTableFKProvider.getResourceForeignKeys();
for (ResourceForeignKey resourceForeignKey : resourceForeignKeys) {
deleteRecordsByColumn(pidListString, resourceForeignKey, theExpungedEntitiesCount);
}
// Lastly we need to delete records from the resource table all of these other tables link to:
ResourceForeignKey resourceTablePk = new ResourceForeignKey("HFJ_RESOURCE", "RES_ID");
int entitiesDeleted = deleteRecordsByColumn(pidListString, resourceTablePk, theExpungedEntitiesCount);
theExpungedResourcesCount.addAndGet(entitiesDeleted);
}
private int deleteRecordsByColumn(String thePidListString, ResourceForeignKey theResourceForeignKey, AtomicLong theExpungedEntitiesCount) {
int entitesDeleted = myEntityManager.createNativeQuery("DELETE FROM " + theResourceForeignKey.table + " WHERE " + theResourceForeignKey.key + " IN " + thePidListString).executeUpdate();
ourLog.info("Expunged {} records from {}", entitesDeleted, theResourceForeignKey.table);
theExpungedEntitiesCount.addAndGet(entitesDeleted);
return entitesDeleted;
}
}

View File

@ -33,7 +33,15 @@ import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.*;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.RejectedExecutionHandler;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
@Service
@ -48,7 +56,7 @@ public class PartitionRunner {
myDaoConfig = theDaoConfig;
}
void runInPartitionedThreads(Slice<Long> theResourceIds, Consumer<List<Long>> partitionConsumer) {
public void runInPartitionedThreads(Slice<Long> theResourceIds, Consumer<List<Long>> partitionConsumer) {
List<Callable<Void>> callableTasks = buildCallableTasks(theResourceIds, partitionConsumer);
if (callableTasks.size() == 0) {
@ -89,18 +97,19 @@ public class PartitionRunner {
List<List<Long>> partitions = Lists.partition(theResourceIds.getContent(), myDaoConfig.getExpungeBatchSize());
for (List<Long> nextPartition : partitions) {
Callable<Void> callableTask = () -> {
ourLog.info("Expunging any search results pointing to {} resources", nextPartition.size());
partitionConsumer.accept(nextPartition);
return null;
};
retval.add(callableTask);
if (nextPartition.size() > 0) {
Callable<Void> callableTask = () -> {
ourLog.info("Expunging any search results pointing to {} resources", nextPartition.size());
partitionConsumer.accept(nextPartition);
return null;
};
retval.add(callableTask);
}
}
return retval;
}
private ExecutorService buildExecutor(int numberOfTasks) {
int threadCount = Math.min(numberOfTasks, myDaoConfig.getExpungeThreadCount());
assert (threadCount > 0);

View File

@ -0,0 +1,45 @@
package ca.uhn.fhir.jpa.dao.expunge;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
public class ResourceForeignKey {
public final String table;
public final String key;
public ResourceForeignKey(String theTable, String theKey) {
table = theTable;
key = theKey;
}
@Override
public boolean equals(Object theO) {
if (this == theO) return true;
if (theO == null || getClass() != theO.getClass()) return false;
ResourceForeignKey that = (ResourceForeignKey) theO;
return new EqualsBuilder()
.append(table, that.table)
.append(key, that.key)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.append(table)
.append(key)
.toHashCode();
}
@Override
public String toString() {
return new ToStringBuilder(this)
.append("table", table)
.append("key", key)
.toString();
}
}

View File

@ -0,0 +1,48 @@
package ca.uhn.fhir.jpa.dao.expunge;
import org.springframework.stereotype.Service;
import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.List;
@Service
public class ResourceTableFKProvider {
@Nonnull
public List<ResourceForeignKey> getResourceForeignKeys() {
List<ResourceForeignKey> retval = new ArrayList<>();
// Add some secondary related records that don't have foreign keys
retval.add(new ResourceForeignKey("HFJ_HISTORY_TAG", "RES_ID"));
retval.add(new ResourceForeignKey("TRM_CODESYSTEM_VER", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_RES_VER_PROV", "RES_PID"));
// To find all the FKs that need to be included here, run the following SQL in the INFORMATION_SCHEMA:
// SELECT FKTABLE_NAME, FKCOLUMN_NAME FROM CROSS_REFERENCES WHERE PKTABLE_NAME = 'HFJ_RESOURCE'
retval.add(new ResourceForeignKey("HFJ_FORCED_ID", "RESOURCE_PID"));
retval.add(new ResourceForeignKey("HFJ_IDX_CMP_STRING_UNIQ", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_RES_LINK", "SRC_RESOURCE_ID"));
retval.add(new ResourceForeignKey("HFJ_RES_LINK", "TARGET_RESOURCE_ID"));
retval.add(new ResourceForeignKey("HFJ_RES_PARAM_PRESENT", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_RES_TAG", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_RES_VER", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_RES_VER_PROV", "RES_PID"));
retval.add(new ResourceForeignKey("HFJ_SPIDX_COORDS", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_SPIDX_DATE", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_SPIDX_NUMBER", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_SPIDX_QUANTITY", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_SPIDX_STRING", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_SPIDX_TOKEN", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_SPIDX_URI", "RES_ID"));
retval.add(new ResourceForeignKey("HFJ_SUBSCRIPTION_STATS", "RES_ID"));
retval.add(new ResourceForeignKey("MPI_LINK", "PERSON_PID"));
retval.add(new ResourceForeignKey("MPI_LINK", "TARGET_PID"));
retval.add(new ResourceForeignKey("NPM_PACKAGE_VER", "BINARY_RES_ID"));
retval.add(new ResourceForeignKey("NPM_PACKAGE_VER_RES", "BINARY_RES_ID"));
retval.add(new ResourceForeignKey("TRM_CODESYSTEM", "RES_ID"));
retval.add(new ResourceForeignKey("TRM_CODESYSTEM_VER", "RES_ID"));
retval.add(new ResourceForeignKey("TRM_CONCEPT_MAP", "RES_ID"));
retval.add(new ResourceForeignKey("TRM_VALUESET", "RES_ID"));
return retval;
}
}

View File

@ -184,6 +184,10 @@ public class EmpiLink {
return myMatchResult == EmpiMatchResultEnum.POSSIBLE_MATCH;
}
public boolean isRedirect() {
return myMatchResult == EmpiMatchResultEnum.REDIRECT;
}
public boolean isPossibleDuplicate() {
return myMatchResult == EmpiMatchResultEnum.POSSIBLE_DUPLICATE;
}
@ -276,9 +280,15 @@ public class EmpiLink {
return this;
}
public EmpiLink setEmpiTargetType(String theEmpiTargetType) {
myEmpiTargetType = theEmpiTargetType;
return this;
}
@Override
public String toString() {
return new ToStringBuilder(this)
.append("myId", myId)
.append("myPersonPid", myPersonPid)
.append("myTargetPid", myTargetPid)
.append("myEmpiTargetType", myEmpiTargetType)
@ -293,8 +303,4 @@ public class EmpiLink {
public String getEmpiTargetType() {
return myEmpiTargetType;
}
public void setEmpiTargetType(String theEmpiTargetType) {
myEmpiTargetType = theEmpiTargetType;
}
}

View File

@ -380,4 +380,17 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc {
void setCodeSystemVersionDaoForUnitTest(ITermCodeSystemVersionDao theCodeSystemVersionDao) {
myCodeSystemVersionDao = theCodeSystemVersionDao;
}
@Override
@VisibleForTesting
public void logQueueForUnitTest() {
ourLog.info("isProcessDeferredPaused: {}", isProcessDeferredPaused());
ourLog.info("isDeferredConcepts: {}", isDeferredConcepts());
ourLog.info("isConceptLinksToSaveLater: {}", isConceptLinksToSaveLater());
ourLog.info("isDeferredValueSets: {}", isDeferredValueSets());
ourLog.info("isDeferredConceptMaps: {}", isDeferredConceptMaps());
ourLog.info("isDeferredCodeSystemDeletions: {}", isDeferredCodeSystemDeletions());
}
}

View File

@ -21,7 +21,6 @@ package ca.uhn.fhir.jpa.term.api;
*/
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
@ -63,4 +62,5 @@ public interface ITermDeferredStorageSvc {
*/
void saveAllDeferred();
void logQueueForUnitTest();
}

View File

@ -0,0 +1,76 @@
package ca.uhn.fhir.jpa.dao.expunge;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Organization;
import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.Reference;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.List;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;
class DeleteExpungeServiceTest extends BaseJpaR4Test {
@Autowired
DaoConfig myDaoConfig;
@BeforeEach
public void before() {
myDaoConfig.setAllowMultipleDelete(true);
myDaoConfig.setExpungeEnabled(true);
myDaoConfig.setDeleteExpungeEnabled(true);
}
@AfterEach
public void after() {
DaoConfig daoConfig = new DaoConfig();
myDaoConfig.setAllowMultipleDelete(daoConfig.isAllowMultipleDelete());
myDaoConfig.setExpungeEnabled(daoConfig.isExpungeEnabled());
myDaoConfig.setDeleteExpungeEnabled(daoConfig.isDeleteExpungeEnabled());
}
@Test
public void testDeleteExpungeThrowExceptionIfLink() {
Organization organization = new Organization();
organization.setName("FOO");
IIdType organizationId = myOrganizationDao.create(organization).getId().toUnqualifiedVersionless();
Patient patient = new Patient();
patient.setManagingOrganization(new Reference(organizationId));
IIdType patientId = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
try {
myOrganizationDao.deleteByUrl("Organization?" + JpaConstants.PARAM_DELETE_EXPUNGE + "=true", mySrd);
fail();
} catch (InvalidRequestException e) {
assertEquals(e.getMessage(), "DELETE with _expunge=true failed. Unable to delete " + organizationId.toVersionless() + " because " + patientId.toVersionless() + " refers to it via the path Patient.managingOrganization");
}
}
@Test
public void testDeleteExpungeNoThrowExceptionWhenLinkInSearchResults() {
Patient mom = new Patient();
IIdType momId = myPatientDao.create(mom).getId().toUnqualifiedVersionless();
Patient child = new Patient();
List<Patient.PatientLinkComponent> link;
child.addLink().setOther(new Reference(mom));
IIdType childId = myPatientDao.create(child).getId().toUnqualifiedVersionless();
DeleteMethodOutcome outcome = myPatientDao.deleteByUrl("Patient?" + JpaConstants.PARAM_DELETE_EXPUNGE + "=true", mySrd);
assertEquals(2, outcome.getExpungedResourcesCount());
assertEquals(7, outcome.getExpungedEntitiesCount());
}
}

View File

@ -0,0 +1,41 @@
package ca.uhn.fhir.jpa.dao.expunge;
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceContextType;
import java.util.List;
import java.util.stream.Collectors;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder;
class ResourceTableFKProviderTest extends BaseJpaR4Test {
private static final Logger ourLog = LoggerFactory.getLogger(ResourceTableFKProviderTest.class);
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
protected EntityManager myEntityManager;
@Autowired
ResourceTableFKProvider myResourceTableFKProvider;
@Test
public void testWeHaveAllForeignKeys() {
List<Object[]> result = myEntityManager.createNativeQuery("SELECT FKTABLE_NAME, FKCOLUMN_NAME FROM INFORMATION_SCHEMA.CROSS_REFERENCES WHERE PKTABLE_NAME = 'HFJ_RESOURCE'").getResultList();
List<ResourceForeignKey> expected = result.stream().map(a -> new ResourceForeignKey(a[0].toString(), a[1].toString())).collect(Collectors.toList());
// Add the extra FKs that are not available in the CROSS_REFERENCES table
expected.add(new ResourceForeignKey("HFJ_HISTORY_TAG", "RES_ID"));
expected.add(new ResourceForeignKey("TRM_CODESYSTEM_VER", "RES_ID"));
expected.add(new ResourceForeignKey("HFJ_RES_VER_PROV", "RES_PID"));
// If this assertion fails, it means hapi-fhir has added a new foreign-key dependency to HFJ_RESOURCE. To fix
// the test, add the missing key to myResourceTableFKProvider.getResourceForeignKeys()
assertThat(myResourceTableFKProvider.getResourceForeignKeys(), containsInAnyOrder(expected.toArray()));
}
}

View File

@ -38,7 +38,6 @@ import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.util.HapiExtensions;
import ca.uhn.fhir.util.TestUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.hamcrest.Matchers;
@ -458,7 +457,7 @@ public class PartitioningR4Test extends BaseJpaR4SystemTest {
assertEquals(myPartitionDate, dates.get(1).getPartitionId().getPartitionDate());
// HFJ_RES_LINK
List<ResourceLink> resourceLinks = myResourceLinkDao.findAllForResourceId(patientId);
List<ResourceLink> resourceLinks = myResourceLinkDao.findAllForSourceResourceId(patientId);
assertEquals(1, resourceLinks.size());
assertEquals(myPartitionId, resourceLinks.get(0).getPartitionId().getPartitionId().intValue());
assertEquals(myPartitionDate, resourceLinks.get(0).getPartitionId().getPartitionDate());
@ -542,7 +541,7 @@ public class PartitioningR4Test extends BaseJpaR4SystemTest {
assertEquals(myPartitionDate, dates.get(1).getPartitionId().getPartitionDate());
// HFJ_RES_LINK
List<ResourceLink> resourceLinks = myResourceLinkDao.findAllForResourceId(patientId);
List<ResourceLink> resourceLinks = myResourceLinkDao.findAllForSourceResourceId(patientId);
assertEquals(1, resourceLinks.size());
assertEquals(null, resourceLinks.get(0).getPartitionId().getPartitionId());
assertEquals(myPartitionDate, resourceLinks.get(0).getPartitionId().getPartitionDate());

View File

@ -2,7 +2,6 @@ package ca.uhn.fhir.jpa.delete;
import ca.uhn.fhir.interceptor.api.Hook;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.model.DeleteConflict;
import ca.uhn.fhir.jpa.api.model.DeleteConflictList;
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
@ -19,19 +18,18 @@ import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Function;
import static org.junit.jupiter.api.Assertions.*;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.fail;
public class DeleteConflictServiceR4Test extends BaseJpaR4Test {
private static final Logger ourLog = LoggerFactory.getLogger(DeleteConflictServiceR4Test.class);
@Autowired
DaoConfig myDaoConfig;
private final DeleteConflictInterceptor myDeleteInterceptor = new DeleteConflictInterceptor();
private int myInterceptorDeleteCount;

View File

@ -67,6 +67,8 @@ public class AuthorizationInterceptorJpaR4Test extends BaseResourceProviderR4Tes
public void before() throws Exception {
super.before();
myDaoConfig.setAllowMultipleDelete(true);
myDaoConfig.setExpungeEnabled(true);
myDaoConfig.setDeleteExpungeEnabled(true);
}
/**
@ -604,7 +606,7 @@ public class AuthorizationInterceptorJpaR4Test extends BaseResourceProviderR4Tes
obs.getSubject().setReferenceElement(patientId);
myClient.create().resource(obs).execute();
// Allow any deletes, but don't allow cascade
// Allow any deletes and allow cascade
ourRestServer.registerInterceptor(new AuthorizationInterceptor(PolicyEnum.DENY) {
@Override
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
@ -1155,5 +1157,59 @@ public class AuthorizationInterceptorJpaR4Test extends BaseResourceProviderR4Tes
}
}
@Test
public void testDeleteExpungeBlocked() {
// Create Patient, and Observation that refers to it
Patient patient = new Patient();
patient.addIdentifier().setSystem("http://uhn.ca/mrns").setValue("100");
patient.addName().setFamily("Tester").addGiven("Siobhan");
myClient.create().resource(patient).execute().getId().toUnqualifiedVersionless();
// Allow any deletes, but don't allow expunge
ourRestServer.registerInterceptor(new AuthorizationInterceptor(PolicyEnum.DENY) {
@Override
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
return new RuleBuilder()
.allow().delete().allResources().withAnyId().andThen()
.build();
}
});
try {
myClient
.delete()
.resourceConditionalByUrl("Patient?name=Siobhan&_expunge=true")
.execute();
fail();
} catch (ForbiddenOperationException e) {
// good
}
}
// FIXME KHS
@Test
public void testDeleteExpungeAllowed() {
// Create Patient, and Observation that refers to it
Patient patient = new Patient();
patient.addIdentifier().setSystem("http://uhn.ca/mrns").setValue("100");
patient.addName().setFamily("Tester").addGiven("Raghad");
myClient.create().resource(patient).execute().getId().toUnqualifiedVersionless();
// Allow deletes and allow expunge
ourRestServer.registerInterceptor(new AuthorizationInterceptor(PolicyEnum.DENY) {
@Override
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
return new RuleBuilder()
.allow().delete().allResources().withAnyId().andThen()
.allow().delete().onExpunge().allResources().withAnyId().andThen()
.build();
}
});
myClient
.delete()
.resourceConditionalByUrl("Patient?name=Siobhan&_expunge=true")
.execute();
}
}

View File

@ -12,7 +12,6 @@ import ca.uhn.fhir.jpa.term.custom.CustomTerminologySet;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.param.UriParam;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.util.TestUtil;
import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.CodeType;
import org.hl7.fhir.r4.model.Coding;
@ -20,7 +19,6 @@ import org.hl7.fhir.r4.model.Parameters;
import org.hl7.fhir.r4.model.StringType;
import org.hl7.fhir.r4.model.ValueSet;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
@ -34,10 +32,10 @@ import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.leftPad;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.matchesPattern;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.jupiter.api.Assertions.fail;
public class TerminologySvcDeltaR4Test extends BaseJpaR4Test {
@ -396,9 +394,15 @@ public class TerminologySvcDeltaR4Test extends BaseJpaR4Test {
myTermCodeSystemStorageSvc.applyDeltaCodeSystemsAdd("http://foo/cs", delta);
assertFalse(myTermDeferredStorageSvc.isStorageQueueEmpty());
while (!myTermDeferredStorageSvc.isStorageQueueEmpty()) {
int counter = 0;
while (!myTermDeferredStorageSvc.isStorageQueueEmpty() && ++counter < 10000) {
myTermDeferredStorageSvc.saveDeferred();
}
if (counter >= 10000) {
ourLog.error("Failed to empty myTermDeferredStorageSvc storage queue after 10000 attempts");
myTermDeferredStorageSvc.logQueueForUnitTest();
fail("myTermDeferredStorageSvc.saveDeferred() did not empty myTermDeferredStorageSvc storage queue.");
}
List<String> expectedHierarchy = new ArrayList<>();
for (int i = 0; i < nestedDepth + 1; i++) {

View File

@ -38,10 +38,12 @@ import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
public class EmpiLinkDaoSvc {
@ -87,10 +89,10 @@ public class EmpiLinkDaoSvc {
if (oExisting.isPresent()) {
return oExisting.get();
} else {
EmpiLink empiLink = myEmpiLinkFactory.newEmpiLink();
empiLink.setPersonPid(thePersonPid);
empiLink.setTargetPid(theResourcePid);
return empiLink;
EmpiLink newLink = myEmpiLinkFactory.newEmpiLink();
newLink.setPersonPid(thePersonPid);
newLink.setTargetPid(theResourcePid);
return newLink;
}
}
@ -192,8 +194,8 @@ public class EmpiLinkDaoSvc {
if (pid == null) {
return Optional.empty();
}
EmpiLink empiLink = myEmpiLinkFactory.newEmpiLink().setTargetPid(pid);
Example<EmpiLink> example = Example.of(empiLink);
EmpiLink exampleLink = myEmpiLinkFactory.newEmpiLink().setTargetPid(pid);
Example<EmpiLink> example = Example.of(exampleLink);
return myEmpiLinkDao.findOne(example);
}
@ -220,8 +222,8 @@ public class EmpiLinkDaoSvc {
if (pid == null) {
return Collections.emptyList();
}
EmpiLink empiLink = myEmpiLinkFactory.newEmpiLink().setPersonPid(pid);
Example<EmpiLink> example = Example.of(empiLink);
EmpiLink exampleLink = myEmpiLinkFactory.newEmpiLink().setPersonPid(pid);
Example<EmpiLink> example = Example.of(exampleLink);
return myEmpiLinkDao.findAll(example);
}
@ -237,11 +239,12 @@ public class EmpiLinkDaoSvc {
}
private List<Long> deleteEmpiLinksAndReturnPersonPids(List<EmpiLink> theLinks) {
List<Long> collect = theLinks.stream().map(EmpiLink::getPersonPid).distinct().collect(Collectors.toList());
Set<Long> persons = theLinks.stream().map(EmpiLink::getPersonPid).collect(Collectors.toSet());
persons.addAll(theLinks.stream().filter(link -> "Person".equals(link.getEmpiTargetType())).map(EmpiLink::getTargetPid).collect(Collectors.toSet()));
ourLog.info("Deleting {} EMPI link records...", theLinks.size());
myEmpiLinkDao.deleteAll(theLinks);
ourLog.info("{} EMPI link records deleted", theLinks.size());
return collect;
return new ArrayList<>(persons);
}
/**
@ -300,8 +303,8 @@ public class EmpiLinkDaoSvc {
if (pid == null) {
return Collections.emptyList();
}
EmpiLink empiLink = myEmpiLinkFactory.newEmpiLink().setTargetPid(pid);
Example<EmpiLink> example = Example.of(empiLink);
EmpiLink exampleLink = myEmpiLinkFactory.newEmpiLink().setTargetPid(pid);
Example<EmpiLink> example = Example.of(exampleLink);
return myEmpiLinkDao.findAll(example);
}
@ -313,5 +316,4 @@ public class EmpiLinkDaoSvc {
public EmpiLink newEmpiLink() {
return myEmpiLinkFactory.newEmpiLink();
}
}

View File

@ -88,7 +88,10 @@ public class EmpiStorageInterceptor implements IEmpiStorageInterceptor {
if (EmpiUtil.isEmpiManagedPerson(myFhirContext, theNewResource) &&
myPersonHelper.isDeactivated(theNewResource)) {
ourLog.debug("Deleting empi links to deactivated Person {}", theNewResource.getIdElement().toUnqualifiedVersionless());
myEmpiLinkDeleteSvc.deleteWithAnyReferenceTo(theNewResource);
int deleted = myEmpiLinkDeleteSvc.deleteNonRedirectWithWithAnyReferenceTo(theNewResource);
if (deleted > 0) {
ourLog.debug("Deleted {} empi links", deleted);
}
}
if (isInternalRequest(theRequestDetails)) {

View File

@ -23,9 +23,11 @@ package ca.uhn.fhir.jpa.empi.svc;
import ca.uhn.fhir.empi.api.IEmpiExpungeSvc;
import ca.uhn.fhir.empi.log.Logs;
import ca.uhn.fhir.empi.util.EmpiUtil;
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
import ca.uhn.fhir.jpa.empi.dao.EmpiLinkDaoSvc;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
@ -48,14 +50,13 @@ public class EmpiClearSvcImpl implements IEmpiExpungeSvc {
}
@Override
public long expungeAllEmpiLinksOfTargetType(String theResourceType) {
public long expungeAllEmpiLinksOfTargetType(String theResourceType, ServletRequestDetails theRequestDetails) {
throwExceptionIfInvalidTargetType(theResourceType);
ourLog.info("Clearing all EMPI Links for resource type {}...", theResourceType);
List<Long> longs = myEmpiLinkDaoSvc.deleteAllEmpiLinksOfTypeAndReturnPersonPids(theResourceType);
myEmpiPersonDeletingSvcImpl.deletePersonResourcesAndHandleConflicts(longs);
myEmpiPersonDeletingSvcImpl.expungeHistoricalAndCurrentVersionsOfIds(longs);
ourLog.info("EMPI clear operation complete. Removed {} EMPI links.", longs.size());
return longs.size();
List<Long> personPids = myEmpiLinkDaoSvc.deleteAllEmpiLinksOfTypeAndReturnPersonPids(theResourceType);
DeleteMethodOutcome deleteOutcome = myEmpiPersonDeletingSvcImpl.expungePersonPids(personPids, theRequestDetails);
ourLog.info("EMPI clear operation complete. Removed {} EMPI links and {} Person resources.", personPids.size(), deleteOutcome.getExpungedResourcesCount());
return personPids.size();
}
private void throwExceptionIfInvalidTargetType(String theResourceType) {
@ -65,12 +66,11 @@ public class EmpiClearSvcImpl implements IEmpiExpungeSvc {
}
@Override
public long removeAllEmpiLinks() {
public long expungeAllEmpiLinks(ServletRequestDetails theRequestDetails) {
ourLog.info("Clearing all EMPI Links...");
List<Long> personPids = myEmpiLinkDaoSvc.deleteAllEmpiLinksAndReturnPersonPids();
myEmpiPersonDeletingSvcImpl.deletePersonResourcesAndHandleConflicts(personPids);
myEmpiPersonDeletingSvcImpl.expungeHistoricalAndCurrentVersionsOfIds(personPids);
ourLog.info("EMPI clear operation complete. Removed {} EMPI links.", personPids.size());
DeleteMethodOutcome deleteOutcome = myEmpiPersonDeletingSvcImpl.expungePersonPids(personPids, theRequestDetails);
ourLog.info("EMPI clear operation complete. Removed {} EMPI links and expunged {} Person resources.", personPids.size(), deleteOutcome.getExpungedResourcesCount());
return personPids.size();
}
}

View File

@ -110,7 +110,7 @@ public class EmpiLinkSvcImpl implements IEmpiLinkSvc {
List<EmpiLink> empiLinks = myEmpiLinkDaoSvc.findEmpiLinksByPerson(thePersonResource);
List<IBaseBackboneElement> newLinks = empiLinks.stream()
.filter(link -> link.isMatch() || link.isPossibleMatch())
.filter(link -> link.isMatch() || link.isPossibleMatch() || link.isRedirect())
.map(this::personLinkFromEmpiLink)
.collect(Collectors.toList());
myPersonHelper.setLinks(thePersonResource, newLinks);
@ -119,6 +119,7 @@ public class EmpiLinkSvcImpl implements IEmpiLinkSvc {
} else if (newLinks.size() < origLinkCount) {
log(theEmpiTransactionContext, thePersonResource.getIdElement().toVersionless() + " links decreased from " + origLinkCount + " to " + newLinks.size());
}
}
@Override

View File

@ -22,20 +22,15 @@ package ca.uhn.fhir.jpa.empi.svc;
import ca.uhn.fhir.empi.log.Logs;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.DeleteConflict;
import ca.uhn.fhir.jpa.api.model.DeleteConflictList;
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
import ca.uhn.fhir.jpa.dao.expunge.DeleteExpungeService;
import ca.uhn.fhir.jpa.dao.expunge.ExpungeService;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import org.hl7.fhir.instance.model.api.IBaseResource;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.SliceImpl;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
@ -52,57 +47,10 @@ public class EmpiPersonDeletingSvc {
private DaoRegistry myDaoRegistry;
@Autowired
private ExpungeService myExpungeService;
@Autowired
DeleteExpungeService myDeleteExpungeService;
/**
* Function which will delete all resources by their PIDs, and also delete any resources that were undeletable due to
* VersionConflictException
*
* @param theResourcePids
*/
@Transactional
public void deletePersonResourcesAndHandleConflicts(List<Long> theResourcePids) {
List<ResourcePersistentId> resourceIds = ResourcePersistentId.fromLongList(theResourcePids);
ourLog.info("Deleting {} Person resources...", resourceIds.size());
DeleteConflictList
deleteConflictList = new DeleteConflictList();
IFhirResourceDao<?> resourceDao = myDaoRegistry.getResourceDao("Person");
resourceDao.deletePidList(ProviderConstants.EMPI_CLEAR, resourceIds, deleteConflictList, null);
IFhirResourceDao personDao = myDaoRegistry.getResourceDao("Person");
int batchCount = 0;
while (!deleteConflictList.isEmpty()) {
deleteConflictBatch(deleteConflictList, personDao);
batchCount += 1;
if (batchCount > MAXIMUM_DELETE_ATTEMPTS) {
throw new IllegalStateException("Person deletion seems to have entered an infinite loop. Aborting");
}
}
ourLog.info("Deleted {} Person resources in {} batches", resourceIds.size(), batchCount);
}
/**
* Use the expunge service to expunge all historical and current versions of the resources associated to the PIDs.
*/
public void expungeHistoricalAndCurrentVersionsOfIds(List<Long> theLongs) {
ourLog.info("Expunging historical versions of {} Person resources...", theLongs.size());
ExpungeOptions options = new ExpungeOptions();
options.setExpungeDeletedResources(true);
options.setExpungeOldVersions(true);
theLongs
.forEach(personId -> myExpungeService.expunge("Person", personId, null, options, null));
ourLog.info("Expunged historical versions of {} Person resources", theLongs.size());
}
private void deleteConflictBatch(DeleteConflictList theDcl, IFhirResourceDao<IBaseResource> theDao) {
DeleteConflictList newBatch = new DeleteConflictList();
TransactionDetails transactionDetails = new TransactionDetails();
for (DeleteConflict next : theDcl) {
IdDt nextSource = next.getSourceId();
ourLog.info("Have delete conflict {} - Cascading delete", nextSource);
theDao.delete(nextSource.toVersionless(), newBatch, null, transactionDetails);
}
theDcl.removeAll();
theDcl.addAll(newBatch);
public DeleteMethodOutcome expungePersonPids(List<Long> thePersonPids, ServletRequestDetails theRequestDetails) {
return myDeleteExpungeService.expungeByResourcePids(ProviderConstants.EMPI_CLEAR, "Person", new SliceImpl<>(thePersonPids), theRequestDetails);
}
}

View File

@ -62,7 +62,6 @@ public class EmpiPersonMergerSvcImpl implements IEmpiPersonMergerSvc {
myPersonHelper.mergePersonFields(theFromPerson, theToPerson);
mergeLinks(theFromPerson, theToPerson, toPersonPid, theEmpiTransactionContext);
refreshLinksAndUpdatePerson(theToPerson, theEmpiTransactionContext);
Long fromPersonPid = myIdHelperService.getPidOrThrowException(theFromPerson);
@ -75,11 +74,11 @@ public class EmpiPersonMergerSvcImpl implements IEmpiPersonMergerSvc {
return theToPerson;
}
private void addMergeLink(Long theFromPersonPid, Long theToPersonPid) {
EmpiLink empiLink = myEmpiLinkDaoSvc.newEmpiLink()
.setPersonPid(theFromPersonPid)
.setTargetPid(theToPersonPid)
.setMatchResult(EmpiMatchResultEnum.MATCH)
private void addMergeLink(Long theDeactivatedPersonPid, Long theActivePersonPid) {
EmpiLink empiLink = myEmpiLinkDaoSvc.getOrCreateEmpiLinkByPersonPidAndTargetPid(theDeactivatedPersonPid, theActivePersonPid);
empiLink
.setEmpiTargetType("Person")
.setMatchResult(EmpiMatchResultEnum.REDIRECT)
.setLinkSource(EmpiLinkSourceEnum.MANUAL);
myEmpiLinkDaoSvc.save(empiLink);
}
@ -90,25 +89,25 @@ public class EmpiPersonMergerSvcImpl implements IEmpiPersonMergerSvc {
}
private void mergeLinks(IAnyResource theFromPerson, IAnyResource theToPerson, Long theToPersonPid, EmpiTransactionContext theEmpiTransactionContext) {
List<EmpiLink> incomingLinks = myEmpiLinkDaoSvc.findEmpiLinksByPerson(theFromPerson);
List<EmpiLink> origLinks = myEmpiLinkDaoSvc.findEmpiLinksByPerson(theToPerson);
List<EmpiLink> fromLinks = myEmpiLinkDaoSvc.findEmpiLinksByPerson(theFromPerson);
List<EmpiLink> toLinks = myEmpiLinkDaoSvc.findEmpiLinksByPerson(theToPerson);
// For each incomingLink, either ignore it, move it, or replace the original one
for (EmpiLink incomingLink : incomingLinks) {
Optional<EmpiLink> optionalOrigLink = findLinkWithMatchingTarget(origLinks, incomingLink);
if (optionalOrigLink.isPresent()) {
for (EmpiLink fromLink : fromLinks) {
Optional<EmpiLink> optionalToLink = findFirstLinkWithMatchingTarget(toLinks, fromLink);
if (optionalToLink.isPresent()) {
// The original links already contain this target, so move it over to the toPerson
EmpiLink origLink = optionalOrigLink.get();
if (incomingLink.isManual()) {
switch (origLink.getLinkSource()) {
EmpiLink toLink = optionalToLink.get();
if (fromLink.isManual()) {
switch (toLink.getLinkSource()) {
case AUTO:
ourLog.trace("MANUAL overrides AUT0. Deleting link {}", origLink);
myEmpiLinkDaoSvc.deleteLink(origLink);
ourLog.trace("MANUAL overrides AUT0. Deleting link {}", toLink);
myEmpiLinkDaoSvc.deleteLink(toLink);
break;
case MANUAL:
if (incomingLink.getMatchResult() != origLink.getMatchResult()) {
throw new InvalidRequestException("A MANUAL " + incomingLink.getMatchResult() + " link may not be merged into a MANUAL " + origLink.getMatchResult() + " link for the same target");
if (fromLink.getMatchResult() != toLink.getMatchResult()) {
throw new InvalidRequestException("A MANUAL " + fromLink.getMatchResult() + " link may not be merged into a MANUAL " + toLink.getMatchResult() + " link for the same target");
}
}
} else {
@ -117,13 +116,13 @@ public class EmpiPersonMergerSvcImpl implements IEmpiPersonMergerSvc {
}
}
// The original links didn't contain this target, so move it over to the toPerson
incomingLink.setPersonPid(theToPersonPid);
ourLog.trace("Saving link {}", incomingLink);
myEmpiLinkDaoSvc.save(incomingLink);
fromLink.setPersonPid(theToPersonPid);
ourLog.trace("Saving link {}", fromLink);
myEmpiLinkDaoSvc.save(fromLink);
}
}
private Optional<EmpiLink> findLinkWithMatchingTarget(List<EmpiLink> theEmpiLinks, EmpiLink theLinkWithTargetToMatch) {
private Optional<EmpiLink> findFirstLinkWithMatchingTarget(List<EmpiLink> theEmpiLinks, EmpiLink theLinkWithTargetToMatch) {
return theEmpiLinks.stream()
.filter(empiLink -> empiLink.getTargetPid().equals(theLinkWithTargetToMatch.getTargetPid()))
.findFirst();

View File

@ -8,6 +8,7 @@ import ca.uhn.fhir.empi.api.IEmpiSettings;
import ca.uhn.fhir.empi.model.EmpiTransactionContext;
import ca.uhn.fhir.empi.rules.svc.EmpiResourceMatcherSvc;
import ca.uhn.fhir.empi.util.EIDHelper;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.dao.data.IEmpiLinkDao;
@ -43,6 +44,7 @@ import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.Person;
import org.hl7.fhir.r4.model.Practitioner;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.extension.ExtendWith;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
@ -103,8 +105,15 @@ abstract public class BaseEmpiR4Test extends BaseJpaR4Test {
EmpiSearchParameterLoader myEmpiSearchParameterLoader;
@Autowired
SearchParamRegistryImpl mySearchParamRegistry;
@Autowired
private IInterceptorBroadcaster myInterceptorBroadcaster;
protected ServletRequestDetails myRequestDetails = new ServletRequestDetails(null);
protected ServletRequestDetails myRequestDetails;
@BeforeEach
public void beforeSetRequestDetails() {
myRequestDetails = new ServletRequestDetails(myInterceptorBroadcaster);
}
@Override
@AfterEach
@ -159,6 +168,7 @@ abstract public class BaseEmpiR4Test extends BaseJpaR4Test {
patient.setId(outcome.getId());
return patient;
}
@Nonnull
protected Practitioner createPractitioner(Practitioner thePractitioner) {
//Note that since our empi-rules block on active=true, all patients must be active.

View File

@ -11,8 +11,8 @@ public class EmpiEnumTest {
public void empiEnumOrdinals() {
// This test is here to enforce that new values in these enums are always added to the end
assertEquals(5, EmpiMatchResultEnum.values().length);
assertEquals(EmpiMatchResultEnum.GOLDEN_RECORD, EmpiMatchResultEnum.values()[EmpiMatchResultEnum.values().length - 1]);
assertEquals(6, EmpiMatchResultEnum.values().length);
assertEquals(EmpiMatchResultEnum.REDIRECT, EmpiMatchResultEnum.values()[EmpiMatchResultEnum.values().length - 1]);
assertEquals(2, EmpiLinkSourceEnum.values().length);
assertEquals(EmpiLinkSourceEnum.MANUAL, EmpiLinkSourceEnum.values()[EmpiLinkSourceEnum.values().length - 1]);

View File

@ -2,13 +2,17 @@ package ca.uhn.fhir.jpa.empi.provider;
import ca.uhn.fhir.empi.api.EmpiLinkSourceEnum;
import ca.uhn.fhir.empi.api.EmpiMatchResultEnum;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.entity.EmpiLink;
import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.Person;
import org.hl7.fhir.r4.model.StringType;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.Nonnull;
import java.io.IOException;
import java.util.List;
import static org.junit.jupiter.api.Assertions.assertEquals;
@ -19,6 +23,9 @@ public abstract class BaseLinkR4Test extends BaseProviderR4Test {
protected static final StringType POSSIBLE_MATCH_RESULT = new StringType(EmpiMatchResultEnum.POSSIBLE_MATCH.name());
protected static final StringType POSSIBLE_DUPLICATE_RESULT = new StringType(EmpiMatchResultEnum.POSSIBLE_DUPLICATE.name());
@Autowired
DaoConfig myDaoConfig;
protected Patient myPatient;
protected Person myPerson;
protected EmpiLink myLink;
@ -43,6 +50,13 @@ public abstract class BaseLinkR4Test extends BaseProviderR4Test {
myLink.setMatchResult(EmpiMatchResultEnum.POSSIBLE_MATCH);
saveLink(myLink);
assertEquals(EmpiLinkSourceEnum.AUTO, myLink.getLinkSource());
myDaoConfig.setExpungeEnabled(true);
}
@AfterEach
public void after() throws IOException {
myDaoConfig.setExpungeEnabled(new DaoConfig().isExpungeEnabled());
super.after();
}
@Nonnull
@ -50,10 +64,8 @@ public abstract class BaseLinkR4Test extends BaseProviderR4Test {
return myEmpiLinkDaoSvc.findEmpiLinkByTarget(myPatient).get();
}
@Nonnull
protected List<EmpiLink> getPatientLinks() {
return myEmpiLinkDaoSvc.findEmpiLinksByTarget(myPatient);
}
}

View File

@ -52,21 +52,21 @@ public class EmpiProviderBatchR4Test extends BaseLinkR4Test {
@Test
public void testBatchRunOnAllPractitioners() throws InterruptedException {
StringType criteria = null;
myEmpiProviderR4.clearEmpiLinks(null);
myEmpiProviderR4.clearEmpiLinks(null, myRequestDetails);
afterEmpiLatch.runWithExpectedCount(1, () -> myEmpiProviderR4.empiBatchPractitionerType(criteria, null));
assertLinkCount(1);
}
@Test
public void testBatchRunOnSpecificPractitioner() throws InterruptedException {
myEmpiProviderR4.clearEmpiLinks(null);
myEmpiProviderR4.clearEmpiLinks(null, myRequestDetails);
afterEmpiLatch.runWithExpectedCount(1, () -> myEmpiProviderR4.empiBatchPractitionerInstance(myPractitioner.getIdElement(), null));
assertLinkCount(1);
}
@Test
public void testBatchRunOnNonExistentSpecificPractitioner() {
myEmpiProviderR4.clearEmpiLinks(null);
myEmpiProviderR4.clearEmpiLinks(null, myRequestDetails);
try {
myEmpiProviderR4.empiBatchPractitionerInstance(new IdType("Practitioner/999"), null);
fail();
@ -77,7 +77,7 @@ public class EmpiProviderBatchR4Test extends BaseLinkR4Test {
public void testBatchRunOnAllPatients() throws InterruptedException {
assertLinkCount(2);
StringType criteria = null;
myEmpiProviderR4.clearEmpiLinks(null);
myEmpiProviderR4.clearEmpiLinks(null, myRequestDetails);
afterEmpiLatch.runWithExpectedCount(1, () -> myEmpiProviderR4.empiBatchPatientType(criteria, null));
assertLinkCount(1);
}
@ -85,7 +85,7 @@ public class EmpiProviderBatchR4Test extends BaseLinkR4Test {
@Test
public void testBatchRunOnSpecificPatient() throws InterruptedException {
assertLinkCount(2);
myEmpiProviderR4.clearEmpiLinks(null);
myEmpiProviderR4.clearEmpiLinks(null, myRequestDetails);
afterEmpiLatch.runWithExpectedCount(1, () -> myEmpiProviderR4.empiBatchPatientInstance(myPatient.getIdElement(), null));
assertLinkCount(1);
}
@ -93,7 +93,7 @@ public class EmpiProviderBatchR4Test extends BaseLinkR4Test {
@Test
public void testBatchRunOnNonExistentSpecificPatient() {
assertLinkCount(2);
myEmpiProviderR4.clearEmpiLinks(null);
myEmpiProviderR4.clearEmpiLinks(null, myRequestDetails);
try {
myEmpiProviderR4.empiBatchPatientInstance(new IdType("Patient/999"), null);
fail();
@ -104,7 +104,7 @@ public class EmpiProviderBatchR4Test extends BaseLinkR4Test {
public void testBatchRunOnAllTypes() throws InterruptedException {
assertLinkCount(2);
StringType criteria = new StringType("");
myEmpiProviderR4.clearEmpiLinks(null);
myEmpiProviderR4.clearEmpiLinks(null, myRequestDetails);
afterEmpiLatch.runWithExpectedCount(2, () -> {
myEmpiProviderR4.empiBatchOnAllTargets(criteria, null);
});
@ -115,7 +115,7 @@ public class EmpiProviderBatchR4Test extends BaseLinkR4Test {
public void testBatchRunOnAllTypesWithInvalidCriteria() {
assertLinkCount(2);
StringType criteria = new StringType("death-date=2020-06-01");
myEmpiProviderR4.clearEmpiLinks(null);
myEmpiProviderR4.clearEmpiLinks(null, myRequestDetails);
try {
myEmpiProviderR4.empiBatchPractitionerType(criteria, null);

View File

@ -27,8 +27,6 @@ import static org.hamcrest.Matchers.nullValue;
import static org.junit.jupiter.api.Assertions.fail;
public class EmpiProviderClearLinkR4Test extends BaseLinkR4Test {
protected Practitioner myPractitioner;
protected StringType myPractitionerId;
protected Person myPractitionerPerson;
@ -46,7 +44,7 @@ public class EmpiProviderClearLinkR4Test extends BaseLinkR4Test {
@Test
public void testClearAllLinks() {
assertLinkCount(2);
myEmpiProviderR4.clearEmpiLinks(null);
myEmpiProviderR4.clearEmpiLinks(null, myRequestDetails);
assertNoLinksExist();
}
@ -68,7 +66,7 @@ public class EmpiProviderClearLinkR4Test extends BaseLinkR4Test {
assertLinkCount(2);
Person read = myPersonDao.read(new IdDt(myPersonId.getValueAsString()).toVersionless());
assertThat(read, is(notNullValue()));
myEmpiProviderR4.clearEmpiLinks(new StringType("Patient"));
myEmpiProviderR4.clearEmpiLinks(new StringType("Patient"), myRequestDetails);
assertNoPatientLinksExist();
try {
myPersonDao.read(new IdDt(myPersonId.getValueAsString()).toVersionless());
@ -85,7 +83,7 @@ public class EmpiProviderClearLinkR4Test extends BaseLinkR4Test {
Patient patientAndUpdateLinks = createPatientAndUpdateLinks(buildJanePatient());
Person person = getPersonFromTarget(patientAndUpdateLinks);
assertThat(person, is(notNullValue()));
myEmpiProviderR4.clearEmpiLinks(null);
myEmpiProviderR4.clearEmpiLinks(null, myRequestDetails);
assertNoPatientLinksExist();
person = getPersonFromTarget(patientAndUpdateLinks);
assertThat(person, is(nullValue()));
@ -102,7 +100,7 @@ public class EmpiProviderClearLinkR4Test extends BaseLinkR4Test {
linkPersons(personFromTarget, personFromTarget2);
//SUT
myEmpiProviderR4.clearEmpiLinks(null);
myEmpiProviderR4.clearEmpiLinks(null, myRequestDetails);
assertNoPatientLinksExist();
IBundleProvider search = myPersonDao.search(new SearchParameterMap().setLoadSynchronous(true));
@ -125,7 +123,7 @@ public class EmpiProviderClearLinkR4Test extends BaseLinkR4Test {
linkPersons(personFromTarget2, personFromTarget);
//SUT
Parameters parameters = myEmpiProviderR4.clearEmpiLinks(null);
Parameters parameters = myEmpiProviderR4.clearEmpiLinks(null, myRequestDetails);
assertNoPatientLinksExist();
IBundleProvider search = myPersonDao.search(new SearchParameterMap().setLoadSynchronous(true));
assertThat(search.size(), is(equalTo(0)));
@ -145,7 +143,7 @@ public class EmpiProviderClearLinkR4Test extends BaseLinkR4Test {
assertLinkCount(2);
Person read = myPersonDao.read(new IdDt(myPractitionerPersonId.getValueAsString()).toVersionless());
assertThat(read, is(notNullValue()));
myEmpiProviderR4.clearEmpiLinks(new StringType("Practitioner"));
myEmpiProviderR4.clearEmpiLinks(new StringType("Practitioner"), myRequestDetails);
assertNoPractitionerLinksExist();
try {
myPersonDao.read(new IdDt(myPractitionerPersonId.getValueAsString()).toVersionless());
@ -156,7 +154,7 @@ public class EmpiProviderClearLinkR4Test extends BaseLinkR4Test {
@Test
public void testClearInvalidTargetType() {
try {
myEmpiProviderR4.clearEmpiLinks(new StringType("Observation"));
myEmpiProviderR4.clearEmpiLinks(new StringType("Observation"), myRequestDetails);
fail();
} catch (InvalidRequestException e) {
assertThat(e.getMessage(), is(equalTo("$empi-clear does not support resource type: Observation")));

View File

@ -51,7 +51,7 @@ public class EmpiProviderMergePersonsR4Test extends BaseProviderR4Test {
List<Person.PersonLinkComponent> links = fromPerson.getLink();
assertThat(links, hasSize(1));
assertThat(links.get(0).getTarget().getReference(), is (myToPerson.getIdElement().toUnqualifiedVersionless().getValue()));
assertThat(links.get(0).getAssurance(), is (AssuranceLevelUtil.getAssuranceLevel(EmpiMatchResultEnum.MATCH, EmpiLinkSourceEnum.MANUAL).toR4()));
assertThat(links.get(0).getAssurance(), is (AssuranceLevelUtil.getAssuranceLevel(EmpiMatchResultEnum.REDIRECT, EmpiLinkSourceEnum.MANUAL).toR4()));
}
@Test

View File

@ -24,11 +24,13 @@ import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Example;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
import java.util.stream.Collectors;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.hasSize;
@ -101,7 +103,16 @@ public class EmpiPersonMergerSvcTest extends BaseEmpiR4Test {
}
private Person mergePersons() {
return (Person) myEmpiPersonMergerSvc.mergePersons(myFromPerson, myToPerson, createEmpiContext());
assertEquals(0, redirectLinkCount());
Person retval = (Person) myEmpiPersonMergerSvc.mergePersons(myFromPerson, myToPerson, createEmpiContext());
assertEquals(1, redirectLinkCount());
return retval;
}
private int redirectLinkCount() {
EmpiLink empiLink = new EmpiLink().setMatchResult(EmpiMatchResultEnum.REDIRECT);
Example<EmpiLink> example = Example.of(empiLink);
return myEmpiLinkDao.findAll(example).size();
}
private EmpiTransactionContext createEmpiContext() {
@ -112,9 +123,20 @@ public class EmpiPersonMergerSvcTest extends BaseEmpiR4Test {
public void mergeRemovesPossibleDuplicatesLink() {
EmpiLink empiLink = myEmpiLinkDaoSvc.newEmpiLink().setPersonPid(myToPersonPid).setTargetPid(myFromPersonPid).setMatchResult(EmpiMatchResultEnum.POSSIBLE_DUPLICATE).setLinkSource(EmpiLinkSourceEnum.AUTO);
saveLink(empiLink);
assertEquals(1, myEmpiLinkDao.count());
{
List<EmpiLink> foundLinks = myEmpiLinkDao.findAll();
assertEquals(1, foundLinks.size());
assertEquals(EmpiMatchResultEnum.POSSIBLE_DUPLICATE, foundLinks.get(0).getMatchResult());
}
mergePersons();
assertEquals(0, myEmpiLinkDao.count());
{
List<EmpiLink> foundLinks = myEmpiLinkDao.findAll();
assertEquals(1, foundLinks.size());
assertEquals(EmpiMatchResultEnum.REDIRECT, foundLinks.get(0).getMatchResult());
}
}
@Test
@ -145,7 +167,7 @@ public class EmpiPersonMergerSvcTest extends BaseEmpiR4Test {
createEmpiLink(myFromPerson, myTargetPatient1);
Person mergedPerson = mergePersons();
List<EmpiLink> links = myEmpiLinkDaoSvc.findEmpiLinksByPerson(mergedPerson);
List<EmpiLink> links = getNonRedirectLinksByPerson(mergedPerson);
assertEquals(1, links.size());
assertThat(mergedPerson, is(possibleLinkedTo(myTargetPatient1)));
assertEquals(1, myToPerson.getLink().size());
@ -156,7 +178,7 @@ public class EmpiPersonMergerSvcTest extends BaseEmpiR4Test {
createEmpiLink(myToPerson, myTargetPatient1);
Person mergedPerson = mergePersons();
List<EmpiLink> links = myEmpiLinkDaoSvc.findEmpiLinksByPerson(mergedPerson);
List<EmpiLink> links = getNonRedirectLinksByPerson(mergedPerson);
assertEquals(1, links.size());
assertThat(mergedPerson, is(possibleLinkedTo(myTargetPatient1)));
assertEquals(1, myToPerson.getLink().size());
@ -172,11 +194,17 @@ public class EmpiPersonMergerSvcTest extends BaseEmpiR4Test {
createEmpiLink(myToPerson, myTargetPatient1);
mergePersons();
List<EmpiLink> links = myEmpiLinkDaoSvc.findEmpiLinksByPerson(myToPerson);
List<EmpiLink> links = getNonRedirectLinksByPerson(myToPerson);
assertEquals(1, links.size());
assertEquals(EmpiLinkSourceEnum.MANUAL, links.get(0).getLinkSource());
}
private List<EmpiLink> getNonRedirectLinksByPerson(Person thePerson) {
return myEmpiLinkDaoSvc.findEmpiLinksByPerson(thePerson).stream()
.filter(link -> !link.isRedirect())
.collect(Collectors.toList());
}
@Test
public void fromManualNoMatchLinkOverridesAutoToLink() {
EmpiLink fromLink = createEmpiLink(myFromPerson, myTargetPatient1);
@ -188,7 +216,7 @@ public class EmpiPersonMergerSvcTest extends BaseEmpiR4Test {
createEmpiLink(myToPerson, myTargetPatient1);
mergePersons();
List<EmpiLink> links = myEmpiLinkDaoSvc.findEmpiLinksByPerson(myToPerson);
List<EmpiLink> links = getNonRedirectLinksByPerson(myToPerson);
assertEquals(1, links.size());
assertEquals(EmpiLinkSourceEnum.MANUAL, links.get(0).getLinkSource());
}
@ -203,7 +231,7 @@ public class EmpiPersonMergerSvcTest extends BaseEmpiR4Test {
saveLink(toLink);
mergePersons();
List<EmpiLink> links = myEmpiLinkDaoSvc.findEmpiLinksByPerson(myToPerson);
List<EmpiLink> links = getNonRedirectLinksByPerson(myToPerson);
assertEquals(1, links.size());
assertEquals(EmpiLinkSourceEnum.MANUAL, links.get(0).getLinkSource());
}
@ -262,7 +290,7 @@ public class EmpiPersonMergerSvcTest extends BaseEmpiR4Test {
mergePersons();
assertEquals(1, myToPerson.getLink().size());
assertEquals(2, myEmpiLinkDao.count());
assertEquals(3, myEmpiLinkDao.count());
}
@Test

View File

@ -186,6 +186,13 @@ public class JpaConstants {
* Parameter for the $export operation
*/
public static final String PARAM_EXPORT_TYPE_FILTER = "_typeFilter";
/**
* Parameter for delete to indicate the deleted resources should also be expunged
*/
public static final String PARAM_DELETE_EXPUNGE = "_expunge";
/**
* URL for extension on a SearchParameter indicating that text values should not be indexed
*/

View File

@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.searchparam;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
import ca.uhn.fhir.model.api.IQueryParameterAnd;
import ca.uhn.fhir.model.api.IQueryParameterType;
@ -128,6 +129,8 @@ public class MatchUrlService {
} else if (Constants.PARAM_SOURCE.equals(nextParamName)) {
IQueryParameterAnd<?> param = ParameterUtil.parseQueryParams(myContext, RestSearchParameterTypeEnum.TOKEN, nextParamName, paramList);
paramMap.add(nextParamName, param);
} else if (JpaConstants.PARAM_DELETE_EXPUNGE.equals(nextParamName)) {
paramMap.setDeleteExpunge(true);
} else if (nextParamName.startsWith("_")) {
// ignore these since they aren't search params (e.g. _sort)
} else {

View File

@ -5,7 +5,11 @@ import ca.uhn.fhir.model.api.IQueryParameterAnd;
import ca.uhn.fhir.model.api.IQueryParameterOr;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.model.api.Include;
import ca.uhn.fhir.rest.api.*;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.SearchTotalModeEnum;
import ca.uhn.fhir.rest.api.SortOrderEnum;
import ca.uhn.fhir.rest.api.SortSpec;
import ca.uhn.fhir.rest.api.SummaryEnum;
import ca.uhn.fhir.rest.param.DateParam;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.param.QuantityParam;
@ -17,7 +21,16 @@ import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import java.io.Serializable;
import java.util.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.isBlank;
@ -64,6 +77,7 @@ public class SearchParameterMap implements Serializable {
private QuantityParam myNearDistanceParam;
private boolean myLastN;
private Integer myLastNMax;
private boolean myDeleteExpunge;
/**
* Constructor
@ -549,6 +563,15 @@ public class SearchParameterMap implements Serializable {
return myNearDistanceParam;
}
public boolean isDeleteExpunge() {
return myDeleteExpunge;
}
public SearchParameterMap setDeleteExpunge(boolean theDeleteExpunge) {
myDeleteExpunge = theDeleteExpunge;
return this;
}
public enum EverythingModeEnum {
/*
* Don't reorder! We rely on the ordinals

View File

@ -45,6 +45,13 @@ public enum EmpiMatchResultEnum {
* Link between Person and Target pointing to the Golden Record for that Person
*/
GOLDEN_RECORD
GOLDEN_RECORD,
/**
* Link between two Person resources resulting from a merge. The Person points to the active person after the merge
* and the Target points to the inactive person after the merge.
*/
REDIRECT
// Stored in database as ORDINAL. Only add new values to bottom!
}

View File

@ -20,6 +20,8 @@ package ca.uhn.fhir.empi.api;
* #L%
*/
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
public interface IEmpiExpungeSvc {
/**
@ -27,15 +29,17 @@ public interface IEmpiExpungeSvc {
*
* @param theResourceType The type of resources
*
* @param theRequestDetails
* @return the count of deleted EMPI links
*/
long expungeAllEmpiLinksOfTargetType(String theResourceType);
long expungeAllEmpiLinksOfTargetType(String theResourceType, ServletRequestDetails theRequestDetails);
/**
* Delete all EMPI links, and their related Person objects.
*
*
* @return the count of deleted EMPI links
* @param theRequestDetails
*/
long removeAllEmpiLinks();
long expungeAllEmpiLinks(ServletRequestDetails theRequestDetails);
}

View File

@ -24,7 +24,8 @@ import ca.uhn.fhir.empi.rules.json.EmpiRulesJson;
public interface IEmpiSettings {
String EMPI_CHANNEL_NAME = "empi";
int EMPI_DEFAULT_CONCURRENT_CONSUMERS = 5;
// Parallel processing of EMPI can result in missed matches. Best to single-thread.
int EMPI_DEFAULT_CONCURRENT_CONSUMERS = 1;
boolean isEnabled();

View File

@ -160,12 +160,13 @@ public class EmpiProviderDstu3 extends BaseEmpiProvider {
@Operation(name = ProviderConstants.EMPI_CLEAR, returnParameters = {
@OperationParam(name = ProviderConstants.OPERATION_EMPI_BATCH_RUN_OUT_PARAM_SUBMIT_COUNT, type= DecimalType.class)
})
public Parameters clearEmpiLinks(@OperationParam(name=ProviderConstants.EMPI_CLEAR_TARGET_TYPE, min = 0, max = 1) StringType theTargetType) {
public Parameters clearEmpiLinks(@OperationParam(name=ProviderConstants.EMPI_CLEAR_TARGET_TYPE, min = 0, max = 1) StringType theTargetType,
ServletRequestDetails theRequestDetails) {
long resetCount;
if (theTargetType == null || StringUtils.isBlank(theTargetType.getValue())) {
resetCount = myEmpiResetSvc.removeAllEmpiLinks();
resetCount = myEmpiResetSvc.expungeAllEmpiLinks(theRequestDetails);
} else {
resetCount = myEmpiResetSvc.expungeAllEmpiLinksOfTargetType(theTargetType.getValueNotNull());
resetCount = myEmpiResetSvc.expungeAllEmpiLinksOfTargetType(theTargetType.getValueNotNull(), theRequestDetails);
}
Parameters parameters = new Parameters();
parameters.addParameter().setName(ProviderConstants.OPERATION_EMPI_CLEAR_OUT_PARAM_DELETED_COUNT)

View File

@ -116,12 +116,13 @@ public class EmpiProviderR4 extends BaseEmpiProvider {
@Operation(name = ProviderConstants.EMPI_CLEAR, returnParameters = {
@OperationParam(name = ProviderConstants.OPERATION_EMPI_BATCH_RUN_OUT_PARAM_SUBMIT_COUNT, type=DecimalType.class)
})
public Parameters clearEmpiLinks(@OperationParam(name=ProviderConstants.EMPI_CLEAR_TARGET_TYPE, min = 0, max = 1) StringType theTargetType) {
public Parameters clearEmpiLinks(@OperationParam(name=ProviderConstants.EMPI_CLEAR_TARGET_TYPE, min = 0, max = 1) StringType theTargetType,
ServletRequestDetails theRequestDetails) {
long resetCount;
if (theTargetType == null || StringUtils.isBlank(theTargetType.getValue())) {
resetCount = myEmpiExpungeSvc.removeAllEmpiLinks();
resetCount = myEmpiExpungeSvc.expungeAllEmpiLinks(theRequestDetails);
} else {
resetCount = myEmpiExpungeSvc.expungeAllEmpiLinksOfTargetType(theTargetType.getValueNotNull());
resetCount = myEmpiExpungeSvc.expungeAllEmpiLinksOfTargetType(theTargetType.getValueNotNull(), theRequestDetails);
}
Parameters parameters = new Parameters();
parameters.addParameter().setName(ProviderConstants.OPERATION_EMPI_CLEAR_OUT_PARAM_DELETED_COUNT)

View File

@ -60,6 +60,7 @@ public final class AssuranceLevelUtil {
private static CanonicalIdentityAssuranceLevel getAssuranceFromManualResult(EmpiMatchResultEnum theMatchResult) {
switch (theMatchResult) {
case MATCH:
case REDIRECT:
return CanonicalIdentityAssuranceLevel.LEVEL3;
case NO_MATCH:
case POSSIBLE_DUPLICATE:

View File

@ -346,6 +346,11 @@ public class AuthorizationInterceptor implements IRuleApplier {
checkPointcutAndFailIfDeny(theRequestDetails, thePointcut, theResourceToDelete);
}
@Hook(Pointcut.STORAGE_PRE_DELETE_EXPUNGE)
public void hookDeleteExpunge(RequestDetails theRequestDetails, Pointcut thePointcut) {
applyRulesAndFailIfDeny(theRequestDetails.getRestOperationType(), theRequestDetails, null, null, null, thePointcut);
}
private void checkPointcutAndFailIfDeny(RequestDetails theRequestDetails, Pointcut thePointcut, @Nonnull IBaseResource theInputResource) {
applyRulesAndFailIfDeny(theRequestDetails.getRestOperationType(), theRequestDetails, theInputResource, theInputResource.getIdElement(), null, thePointcut);
}

View File

@ -30,4 +30,9 @@ public interface IAuthRuleBuilderRuleOpDelete extends IAuthRuleBuilderRuleOp {
*/
IAuthRuleBuilderRuleOp onCascade();
/**
* Specifies that this rule applies to delete expunges as opposed to regular
* deletes. A delete expunge is a delete operation called with the _expunge=true parameter.
*/
IAuthRuleBuilderRuleOp onExpunge();
}

View File

@ -368,6 +368,7 @@ public class RuleBuilder implements IAuthRuleBuilder {
private final RuleOpEnum myRuleOp;
private RuleBuilderRuleOpClassifier myInstancesBuilder;
private boolean myOnCascade;
private boolean myOnExpunge;
RuleBuilderRuleOp(RuleOpEnum theRuleOp) {
myRuleOp = theRuleOp;
@ -428,6 +429,12 @@ public class RuleBuilder implements IAuthRuleBuilder {
return this;
}
@Override
public IAuthRuleBuilderRuleOp onExpunge() {
myOnExpunge = true;
return this;
}
private class RuleBuilderRuleOpClassifier implements IAuthRuleBuilderRuleOpClassifier {
private final AppliesTypeEnum myAppliesTo;
@ -468,6 +475,7 @@ public class RuleBuilder implements IAuthRuleBuilder {
myRule.setClassifierCompartmentName(myInCompartmentName);
myRule.setClassifierCompartmentOwners(myInCompartmentOwners);
myRule.setAppliesToDeleteCascade(myOnCascade);
myRule.setAppliesToDeleteExpunge(myOnExpunge);
myRules.add(myRule);
return new RuleBuilderFinished(myRule);

View File

@ -67,6 +67,7 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ {
private TransactionAppliesToEnum myTransactionAppliesToOp;
private Collection<IIdType> myAppliesToInstances;
private boolean myAppliesToDeleteCascade;
private boolean myAppliesToDeleteExpunge;
/**
* Constructor
@ -229,6 +230,9 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ {
break;
case DELETE:
if (theOperation == RestOperationTypeEnum.DELETE) {
if (thePointcut == Pointcut.STORAGE_PRE_DELETE_EXPUNGE && myAppliesToDeleteExpunge) {
return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource);
}
if (myAppliesToDeleteCascade != (thePointcut == Pointcut.STORAGE_CASCADE_DELETE)) {
return null;
}
@ -623,4 +627,7 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ {
myAppliesToDeleteCascade = theAppliesToDeleteCascade;
}
void setAppliesToDeleteExpunge(boolean theAppliesToDeleteExpunge) {
myAppliesToDeleteExpunge = theAppliesToDeleteExpunge;
}
}

View File

@ -31,20 +31,29 @@ import org.apache.commons.io.input.ReaderInputStream;
import org.apache.http.Header;
import org.apache.http.HttpResponse;
import org.apache.http.ProtocolVersion;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpEntityEnclosingRequestBase;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.message.BasicHeader;
import org.apache.http.message.BasicStatusLine;
import org.hl7.fhir.dstu3.model.*;
import org.hl7.fhir.dstu3.model.Binary;
import org.hl7.fhir.dstu3.model.Bundle;
import org.hl7.fhir.dstu3.model.Bundle.BundleType;
import org.hl7.fhir.dstu3.model.CapabilityStatement;
import org.hl7.fhir.dstu3.model.Device;
import org.hl7.fhir.dstu3.model.Encounter;
import org.hl7.fhir.dstu3.model.EpisodeOfCare;
import org.hl7.fhir.dstu3.model.IdType;
import org.hl7.fhir.dstu3.model.Observation;
import org.hl7.fhir.dstu3.model.OperationOutcome;
import org.hl7.fhir.dstu3.model.Parameters;
import org.hl7.fhir.dstu3.model.Patient;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.internal.stubbing.defaultanswers.ReturnsDeepStubs;
import org.mockito.invocation.InvocationOnMock;
@ -53,7 +62,6 @@ import org.mockito.stubbing.Answer;
import java.io.IOException;
import java.io.InputStream;
import java.io.StringReader;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;