Merge pull request #2663 from hapifhir/issue-2661-bad-error-during-delete-with-expunge

Issue 2661 bad error during delete with expunge
This commit is contained in:
Tadgh 2021-05-18 13:50:50 -04:00 committed by GitHub
commit ec565c6b24
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 57 additions and 5 deletions

View File

@ -0,0 +1,6 @@
---
type: fix
issue: 2661
title: "Fixed a bug where delete with expunge would throw a LazyInitializationException if there were delete conflicts, and
the expunge batch size was smaller than the available list of resources to delete."

View File

@ -28,7 +28,7 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
import ca.uhn.fhir.rest.api.server.RequestDetails;
@ -74,6 +74,8 @@ public class DeleteExpungeService {
private IInterceptorBroadcaster myInterceptorBroadcaster;
@Autowired
private DaoConfig myDaoConfig;
@Autowired
private IdHelperService myIdHelper;
public DeleteMethodOutcome expungeByResourcePids(String theUrl, String theResourceName, Slice<Long> thePids, RequestDetails theRequest) {
StopWatch w = new StopWatch();
@ -132,13 +134,18 @@ public class DeleteExpungeService {
}
ResourceLink firstConflict = conflictResourceLinks.get(0);
String sourceResourceId = firstConflict.getSourceResource().getIdDt().toVersionless().getValue();
String targetResourceId = firstConflict.getTargetResource().getIdDt().toVersionless().getValue();
//NB-GGG: We previously instantiated these ID values from firstConflict.getSourceResource().getIdDt(), but in a situation where we
//actually had to run delete conflict checks in multiple partitions, the executor service starts its own sessions on a per thread basis, and by the time
//we arrive here, those sessions are closed. So instead, we resolve them from PIDs, which are eagerly loaded.
String sourceResourceId = myIdHelper.resourceIdFromPidOrThrowException(firstConflict.getSourceResourcePid()).toVersionless().getValue();
String targetResourceId = myIdHelper.resourceIdFromPidOrThrowException(firstConflict.getTargetResourcePid()).toVersionless().getValue();
throw new InvalidRequestException("DELETE with _expunge=true failed. Unable to delete " +
targetResourceId + " because " + sourceResourceId + " refers to it via the path " + firstConflict.getSourcePath());
}
private void findResourceLinksWithTargetPidIn(List<Long> theAllTargetPids, List<Long> theSomeTargetPids, List<ResourceLink> theConflictResourceLinks) {
public void findResourceLinksWithTargetPidIn(List<Long> theAllTargetPids, List<Long> theSomeTargetPids, List<ResourceLink> theConflictResourceLinks) {
// We only need to find one conflict, so if we found one already in an earlier partition run, we can skip the rest of the searches
if (theConflictResourceLinks.isEmpty()) {
List<ResourceLink> conflictResourceLinks = myResourceLinkDao.findWithTargetPidIn(theSomeTargetPids).stream()

View File

@ -30,6 +30,8 @@ import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Slice;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import java.util.ArrayList;
import java.util.List;

View File

@ -4,10 +4,15 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.util.BundleBuilder;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.Claim;
import org.hl7.fhir.r4.model.Encounter;
import org.hl7.fhir.r4.model.Organization;
import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.Reference;
@ -18,7 +23,9 @@ import org.springframework.beans.factory.annotation.Autowired;
import java.util.List;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.startsWith;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;
import static org.hamcrest.MatcherAssert.assertThat;
@ -65,6 +72,36 @@ class DeleteExpungeServiceTest extends BaseJpaR4Test {
assertEquals(e.getMessage(), "DELETE with _expunge=true failed. Unable to delete " + organizationId.toVersionless() + " because " + patientId.toVersionless() + " refers to it via the path Patient.managingOrganization");
}
}
@Test
public void testDeleteWithExpungeFailsIfConflictsAreGeneratedByMultiplePartitions() {
//See https://github.com/hapifhir/hapi-fhir/issues/2661
//Given
BundleBuilder builder = new BundleBuilder(myFhirCtx);
for (int i = 0; i < 20; i++) {
Organization o = new Organization();
o.setId("Organization/O-" + i);
Patient p = new Patient();
p.setId("Patient/P-" + i);
p.setManagingOrganization(new Reference(o.getId()));
builder.addTransactionUpdateEntry(o);
builder.addTransactionUpdateEntry(p);
}
mySystemDao.transaction(new SystemRequestDetails(), (Bundle) builder.getBundle());
//When
myDaoConfig.setExpungeBatchSize(10);
try {
myOrganizationDao.deleteByUrl("Organization?" + JpaConstants.PARAM_DELETE_EXPUNGE + "=true", mySrd);
fail();
} catch (InvalidRequestException e) {
//Then
assertThat(e.getMessage(), is(containsString("DELETE with _expunge=true failed. Unable to delete ")));
}
}
@Test
public void testDeleteExpungeRespectsSynchronousSize() {
//Given

View File

@ -72,7 +72,7 @@ public class JpaFhirRetrieveProvider extends SearchParamFhirRetrieveProvider {
protected Collection<Object> executeQuery(String dataType, SearchParameterMap map) {
// TODO: Once HAPI breaks this out from the server dependencies
// we can include it on its own.
ca.uhn.fhir.jpa.searchparam.SearchParameterMap hapiMap = new ca.uhn.fhir.jpa.searchparam.SearchParameterMap();
ca.uhn.fhir.jpa.searchparam.SearchParameterMap hapiMap = ca.uhn.fhir.jpa.searchparam.SearchParameterMap.newSynchronous();
try {
Method[] methods = hapiMap.getClass().getDeclaredMethods();