Enabling massIngestionMode causes incomplete resource deletion (#4476)

* Adding initial test.

* Adding fix and subsequent test.

* Adding changelog.

---------

Co-authored-by: peartree <etienne.poirier@smilecdr.com>
This commit is contained in:
Etienne Poirier 2023-01-30 07:34:50 -05:00 committed by GitHub
parent 078dcd9c0d
commit 1a9b749b5a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 68 additions and 12 deletions

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 4475
jira: SMILE-4961
title: "Enabling mass ingestion mode alters the resource deletion process leaving resources partially deleted. The problem has been fixed."

View File

@ -143,7 +143,7 @@ import java.util.Set;
import java.util.StringTokenizer; import java.util.StringTokenizer;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static ca.uhn.fhir.jpa.model.util.JpaConstants.ALL_PARTITIONS_NAME; import static java.util.Objects.nonNull;
import static org.apache.commons.lang3.StringUtils.defaultString; import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank;
@ -601,7 +601,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
} else { } else {
theEntity.setHashSha256(null); if(nonNull(theEntity.getHashSha256())){
theEntity.setHashSha256(null);
changed = true;
}
resourceBinary = null; resourceBinary = null;
resourceText = null; resourceText = null;
encoding = ResourceEncodingEnum.DEL; encoding = ResourceEncodingEnum.DEL;
@ -939,7 +943,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
entity.setUpdated(theDeletedTimestampOrNull); entity.setUpdated(theDeletedTimestampOrNull);
entity.setNarrativeText(null); entity.setNarrativeText(null);
entity.setContentText(null); entity.setContentText(null);
entity.setHashSha256(null);
entity.setIndexStatus(INDEX_STATUS_INDEXED); entity.setIndexStatus(INDEX_STATUS_INDEXED);
changed = populateResourceIntoEntity(theTransactionDetails, theRequest, theResource, entity, true); changed = populateResourceIntoEntity(theTransactionDetails, theRequest, theResource, entity, true);

View File

@ -2895,9 +2895,28 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
} }
private void printQueryCount(String theMessage){ @Test
public void testDeleteResource_WithMassIngestionMode_enabled(){
ourLog.info("QueryCount {} is: ", theMessage); myDaoConfig.setMassIngestionMode(true);
// given
Observation observation = new Observation()
.setStatus(Observation.ObservationStatus.FINAL)
.addCategory(new CodeableConcept().addCoding(new Coding("http://category-type", "12345", null)))
.setCode(new CodeableConcept().addCoding(new Coding("http://coverage-type", "12345", null)));
IIdType idDt = myObservationDao.create(observation, mySrd).getEntity().getIdDt();
// when
myCaptureQueriesListener.clear();
myObservationDao.delete(idDt, mySrd);
// then
assertQueryCount(3,1,1, 1);
}
private void printQueryCount(){
ourLog.info("\tselect: {}", myCaptureQueriesListener.getSelectQueriesForCurrentThread().size()); ourLog.info("\tselect: {}", myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
ourLog.info("\tupdate: {}", myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size()); ourLog.info("\tupdate: {}", myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
ourLog.info("\tinsert: {}", myCaptureQueriesListener.getInsertQueriesForCurrentThread().size()); ourLog.info("\tinsert: {}", myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
@ -2905,16 +2924,15 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
} }
private void assertQueryCount(int theExpectedSelect, int theExpectedUpdate, int theExpectedInsert, int theExpectedDelete){ private void assertQueryCount(int theExpectedSelectCount, int theExpectedUpdateCount, int theExpectedInsertCount, int theExpectedDeleteCount){
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(theExpectedSelect, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size()); assertEquals(theExpectedSelectCount, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
myCaptureQueriesListener.logUpdateQueriesForCurrentThread(); myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
assertEquals(theExpectedUpdate, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size()); assertEquals(theExpectedUpdateCount, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
myCaptureQueriesListener.logInsertQueriesForCurrentThread(); myCaptureQueriesListener.logInsertQueriesForCurrentThread();
assertEquals(theExpectedInsert, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size()); assertEquals(theExpectedInsertCount, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
myCaptureQueriesListener.logDeleteQueriesForCurrentThread(); myCaptureQueriesListener.logDeleteQueriesForCurrentThread();
assertEquals(theExpectedDelete, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size()); assertEquals(theExpectedDeleteCount, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
} }
private Group createGroup(List<IIdType> theIIdTypeList) { private Group createGroup(List<IIdType> theIIdTypeList) {

View File

@ -175,6 +175,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
myDaoConfig.setInternalSynchronousSearchSize(new DaoConfig().getInternalSynchronousSearchSize()); myDaoConfig.setInternalSynchronousSearchSize(new DaoConfig().getInternalSynchronousSearchSize());
myModelConfig.setNormalizedQuantitySearchLevel(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED); myModelConfig.setNormalizedQuantitySearchLevel(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED);
myDaoConfig.setHistoryCountMode(DaoConfig.DEFAULT_HISTORY_COUNT_MODE); myDaoConfig.setHistoryCountMode(DaoConfig.DEFAULT_HISTORY_COUNT_MODE);
myDaoConfig.setMassIngestionMode(false);
} }
private void assertGone(IIdType theId) { private void assertGone(IIdType theId) {
@ -2498,6 +2499,35 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
} }
@Test
public void testDeleteWithMassInjectionModeEnabled(){
myDaoConfig.setMassIngestionMode(true);
// given
Observation observation = new Observation()
.setStatus(ObservationStatus.FINAL)
.addCategory(newCodeableConcept("http://somesystem","somecode"))
.setCode(newCodeableConcept("http://loinc.org", "15074-8"));
// when
IIdType idDt = myObservationDao.create(observation, mySrd).getEntity().getIdDt();
myObservationDao.delete(idDt, mySrd);
// then
runInTransaction(() -> {
Long obsertionId = idDt.getIdPartAsLong();
Long resourceCurrentVersion = myResourceTableDao.findCurrentVersionByPid(obsertionId);
int resourceVersionCount = myResourceHistoryTableDao.findAllVersionsForResourceIdInOrder(obsertionId).size();
int indexedTokenCount = myResourceIndexedSearchParamTokenDao.countForResourceId(obsertionId);
assertThat(resourceCurrentVersion, equalTo(2L));
assertThat(resourceVersionCount, equalTo(2));
assertThat(indexedTokenCount, equalTo(0));
});
}
@Test @Test
public void testPersistContactPoint() { public void testPersistContactPoint() {
List<IAnyResource> found = toList(myPatientDao.search(new SearchParameterMap(Patient.SP_TELECOM, new TokenParam(null, "555-123-4567")).setLoadSynchronous(true))); List<IAnyResource> found = toList(myPatientDao.search(new SearchParameterMap(Patient.SP_TELECOM, new TokenParam(null, "555-123-4567")).setLoadSynchronous(true)));